text stringlengths 2.5k 6.39M | kind stringclasses 3
values |
|---|---|
import { ClassType, CompilerContext, isArray, isObject, toFastProperties } from '@deepkit/core';
import { ClassSchema, getClassSchema, getGlobalStore, getSortedUnionTypes, JitStack, jsonTypeGuards, PropertySchema, UnpopulatedCheck, unpopulatedSymbol } from '@deepkit/type';
import { seekElementSize } from './continuation';
import { isObjectId, isUUID, ObjectId, ObjectIdSymbol, UUID, UUIDSymbol } from './model';
import { BSON_BINARY_SUBTYPE_BIGINT, BSON_BINARY_SUBTYPE_DEFAULT, BSON_BINARY_SUBTYPE_UUID, BSONType, digitByteSize, TWO_PWR_32_DBL_N } from './utils';
export function createBuffer(size: number): Uint8Array {
return 'undefined' !== typeof Buffer && 'function' === typeof Buffer.allocUnsafe ? Buffer.allocUnsafe(size) : new Uint8Array(size);
}
(BigInt.prototype as any).toJSON = function () {
return this.toString();
};
// BSON MAX VALUES
const BSON_INT32_MAX = 0x7fffffff;
const BSON_INT32_MIN = -0x80000000;
// JS MAX PRECISE VALUES
export const JS_INT_MAX = 0x20000000000000; // Any integer up to 2^53 can be precisely represented by a double.
export const JS_INT_MIN = -0x20000000000000; // Any integer down to -2^53 can be precisely represented by a double.
const LONG_MAX = 'undefined' !== typeof BigInt ? BigInt('9223372036854775807') : 9223372036854775807;
const LONG_MIN = 'undefined' !== typeof BigInt ? BigInt('-9223372036854775807') : -9223372036854775807;
export function hexToByte(hex: string, index: number = 0, offset: number = 0): number {
let code1 = hex.charCodeAt(index * 2 + offset) - 48;
if (code1 > 9) code1 -= 39;
let code2 = hex.charCodeAt((index * 2) + offset + 1) - 48;
if (code2 > 9) code2 -= 39;
return code1 * 16 + code2;
}
export function uuidStringToByte(hex: string, index: number = 0): number {
let offset = 0;
//e.g. bef8de96-41fe-442f-b70c-c3a150f8c96c
if (index > 3) offset += 1;
if (index > 5) offset += 1;
if (index > 7) offset += 1;
if (index > 9) offset += 1;
return hexToByte(hex, index, offset);
}
export function stringByteLength(str: string): number {
if (!str) return 0;
let size = 0;
for (let i = 0; i < str.length; i++) {
const c = str.charCodeAt(i);
if (c < 128) size += 1;
else if (c > 127 && c < 2048) size += 2;
else size += 3;
}
return size;
}
function getBigIntSizeBinary(value: bigint): number {
let hex = value.toString(16);
let signum = hex === '0' ? 0 : 1;
if (hex[0] === '-') {
//negative number
signum = -1;
hex = hex.slice(1);
}
if (hex.length % 2) hex = '0' + hex;
let size = 4 + 1 + Math.ceil(hex.length / 2);
if (signum !== 0) size++;
return size;
}
export function getValueSize(value: any): number {
if ('boolean' === typeof value) {
return 1;
} else if ('string' === typeof value) {
//size + content + null
return 4 + stringByteLength(value) + 1;
} else if ('bigint' === typeof value) {
//for bigint in `any` context, we serialize always as binary
return getBigIntSizeBinary(value);
} else if ('number' === typeof value) {
if (Math.floor(value) === value) {
//it's an int
if (value >= BSON_INT32_MIN && value <= BSON_INT32_MAX) {
//32bit
return 4;
} else if (value >= JS_INT_MIN && value <= JS_INT_MAX) {
//double, 64bit
return 8;
} else {
//long
return 8;
}
} else {
//double
return 8;
}
} else if (value instanceof Date) {
return 8;
} else if (value instanceof ArrayBuffer || ArrayBuffer.isView(value)) {
let size = 4; //size
size += 1; //sub type
size += value.byteLength;
return size;
} else if (isArray(value)) {
let size = 4; //object size
for (let i = 0; i < value.length; i++) {
size += 1; //element type
size += digitByteSize(i); //element name
size += getValueSize(value[i]);
}
size += 1; //null
return size;
} else if (isUUID(value)) {
return 4 + 1 + 16;
} else if (isObjectId(value)) {
return 12;
} else if (value && value['_bsontype'] === 'Binary') {
let size = 4; //size
size += 1; //sub type
size += value.buffer.byteLength;
return size;
} else if (value instanceof RegExp) {
return stringByteLength(value.source) + 1
+
(value.global ? 1 : 0) +
(value.ignoreCase ? 1 : 0) +
(value.multiline ? 1 : 0) +
1;
} else if (isObject(value)) {
let size = 4; //object size
for (let i in value) {
if (!value.hasOwnProperty(i)) continue;
size += 1; //element type
size += stringByteLength(i) + 1; //element name + null
size += getValueSize(value[i]);
}
size += 1; //null
return size;
} //isObject() should be last
return 0;
}
function getPropertySizer(schema: ClassSchema, compiler: CompilerContext, property: PropertySchema, accessor: string, jitStack: JitStack): string {
if (property.type === 'class' && property.getResolvedClassSchema().decorator) {
property = property.getResolvedClassSchema().getDecoratedPropertySchema();
accessor = `(${accessor} && ${accessor}.${property.name})`;
} else if (property.type === 'promise') {
property = property.templateArgs[0] || new PropertySchema(property.name);
}
compiler.context.set('getValueSize', getValueSize);
let code = `size += getValueSize(${accessor});`;
if (property.type === 'array') {
compiler.context.set('digitByteSize', digitByteSize);
const isArrayVar = compiler.reserveVariable('isArray', isArray);
const unpopulatedSymbolVar = compiler.reserveVariable('unpopulatedSymbol', unpopulatedSymbol);
const i = compiler.reserveName('i');
code = `
if (${accessor} && ${accessor} !== ${unpopulatedSymbolVar} && ${isArrayVar}(${accessor})) {
size += 4; //array size
for (let ${i} = 0; ${i} < ${accessor}.length; ${i}++) {
size += 1; //element type
size += digitByteSize(${i}); //element name
${getPropertySizer(schema, compiler, property.getSubType(), `${accessor}[${i}]`, jitStack)}
}
size += 1; //null
}
`;
} else if (property.type === 'bigint') {
compiler.context.set('getBigIntSizeBinary', getBigIntSizeBinary);
code = `
if (typeof ${accessor} === 'bigint') {
size += getBigIntSizeBinary(${accessor});
}
`;
} else if (property.type === 'number') {
code = `
if (typeof ${accessor} === 'number') {
if (Math.floor(${accessor}) === ${accessor}) {
//it's an int
if (${accessor} >= ${BSON_INT32_MIN} && ${accessor} <= ${BSON_INT32_MAX}) {
//32bit
size += 4;
} else if (${accessor} >= ${JS_INT_MIN} && ${accessor} <= ${JS_INT_MAX}) {
//double, 64bit
size += 8;
} else {
//long
size += 8;
}
} else {
//double
size += 8;
}
} else if (typeof ${accessor} === 'bigint') {
size += 8;
}
`;
} else if (property.type === 'string') {
code = `
if (typeof ${accessor} === 'string') {
size += getValueSize(${accessor});
}
`;
} else if (property.type === 'literal') {
code = `
if (typeof ${accessor} === 'string' || typeof ${accessor} === 'number' || typeof ${accessor} === 'boolean') {
size += getValueSize(${accessor});
} else if (!${property.isOptional} && !${property.isOptional}) {
size += getValueSize(${JSON.stringify(property.literalValue)});
}
`;
} else if (property.type === 'boolean') {
code = `
if (typeof ${accessor} === 'boolean') {
size += 1;
}
`;
} else if (property.type === 'map') {
compiler.context.set('stringByteLength', stringByteLength);
const i = compiler.reserveName('i');
code = `
size += 4; //object size
for (let ${i} in ${accessor}) {
if (!${accessor}.hasOwnProperty(${i})) continue;
size += 1; //element type
size += stringByteLength(${i}) + 1; //element name + null;
${getPropertySizer(schema, compiler, property.getSubType(), `${accessor}[${i}]`, jitStack)}
}
size += 1; //null
`;
} else if (property.type === 'class') {
const forwardSchema = property.getResolvedClassSchema();
const sizerFn = jitStack.getOrCreate(forwardSchema, () => createBSONSizer(property.getResolvedClassSchema(), jitStack));
const sizer = compiler.reserveVariable('_sizer' + property.name, sizerFn);
const unpopulatedSymbolVar = compiler.reserveVariable('unpopulatedSymbol', unpopulatedSymbol);
compiler.context.set('isObject', isObject);
compiler.context.set('UUIDSymbol', UUIDSymbol);
compiler.context.set('ObjectIdSymbol', ObjectIdSymbol);
let primarKeyHandling = '';
const isReferenceCheck = property.isReference || (property.parent && property.parent.isReference);
if (isReferenceCheck) {
primarKeyHandling = getPropertySizer(schema, compiler, forwardSchema.getPrimaryField(), accessor, jitStack);
}
let circularCheck = 'true';
if (schema.hasCircularReference()) {
circularCheck = `!_stack.includes(${accessor})`;
}
code = `
if (${accessor} !== ${unpopulatedSymbolVar}) {
if (isObject(${accessor}) && !${accessor}.hasOwnProperty(UUIDSymbol) && !${accessor}.hasOwnProperty(ObjectIdSymbol) && ${circularCheck}) {
size += ${sizer}.fn(${accessor}, _stack);
} else if (${isReferenceCheck}) {
${primarKeyHandling}
}
}
`;
} else if (property.type === 'date') {
code = `if (${accessor} instanceof Date) size += 8;`;
} else if (property.type === 'objectId') {
compiler.context.set('isObjectId', isObjectId);
code = `if ('string' === typeof ${accessor}|| isObjectId(${accessor})) size += 12;`;
} else if (property.type === 'uuid') {
compiler.context.set('isUUID', isUUID);
code = `if ('string' === typeof ${accessor} || isUUID(${accessor})) size += 4 + 1 + 16;`;
} else if (property.type === 'arrayBuffer' || property.isTypedArray) {
code = `
size += 4; //size
size += 1; //sub type
if (${accessor}['_bsontype'] === 'Binary') {
size += ${accessor}.buffer.byteLength
} else {
size += ${accessor}.byteLength;
}
`;
} else if (property.type === 'union') {
let discriminator: string[] = [`if (false) {\n}`];
const discriminants: string[] = [];
for (const unionType of getSortedUnionTypes(property, jsonTypeGuards)) {
discriminants.push(unionType.property.type);
}
const elseBranch = `throw new Error('No valid discriminant was found for ${property.name}, so could not determine class type. Guard tried: [${discriminants.join(',')}]. Got: ' + ${accessor});`;
for (const unionType of getSortedUnionTypes(property, jsonTypeGuards)) {
const guardVar = compiler.reserveVariable('guard_' + unionType.property.type, unionType.guard);
discriminator.push(`
//guard:${unionType.property.type}
else if (${guardVar}(${accessor})) {
${getPropertySizer(schema, compiler, unionType.property, `${accessor}`, jitStack)}
}
`);
}
code = `
${discriminator.join('\n')}
else {
${elseBranch}
}
`;
}
// since JSON does not support undefined, we emulate it via using null for serialization, and convert that back to undefined when deserialization happens
// not: When the value is not defined (property.name in object === false), then this code will never run.
let writeDefaultValue = `
// size += 0; //null
`;
if (!property.hasDefaultValue && property.defaultValue !== undefined) {
const propertyVar = compiler.reserveVariable('property', property);
const cloned = property.clone();
cloned.defaultValue = undefined;
writeDefaultValue = `
${propertyVar}.lastGeneratedDefaultValue = ${propertyVar}.defaultValue();
${getPropertySizer(schema, compiler, cloned, `${propertyVar}.lastGeneratedDefaultValue`, jitStack)}
`;
} else if (!property.isOptional && property.type === 'literal') {
writeDefaultValue = `size += getValueSize(${JSON.stringify(property.literalValue)});`;
}
return `
if (${accessor} === undefined || ${accessor} === unpopulatedSymbol) {
${writeDefaultValue}
} else if (${accessor} === null) {
if (${property.isNullable}) {
// size += 0; //null
} else {
${writeDefaultValue}
}
} else {
${code}
}
`;
}
/**
* Creates a JIT compiled function that allows to get the BSON buffer size of a certain object.
*/
export function createBSONSizer(schema: ClassSchema, jitStack: JitStack = new JitStack()): (data: object) => number {
const compiler = new CompilerContext;
let getSizeCode: string[] = [];
const prepared = jitStack.prepare(schema);
for (const property of schema.getProperties()) {
//todo, support non-ascii names
let setDefault = '';
if (property.hasManualDefaultValue() || property.type === 'literal') {
if (property.defaultValue !== undefined) {
const propertyVar = compiler.reserveVariable('property', property);
setDefault = `
size += 1; //type
size += ${property.name.length} + 1; //property name
${propertyVar}.lastGeneratedDefaultValue = ${propertyVar}.defaultValue();
${getPropertySizer(schema, compiler, property, `${propertyVar}.lastGeneratedDefaultValue`, jitStack)}
`;
} else if (property.type === 'literal' && !property.isOptional) {
setDefault = `
size += 1; //type
size += ${property.name.length} + 1; //property name
${getPropertySizer(schema, compiler, property, JSON.stringify(property.literalValue), jitStack)}`;
}
} else if (property.isNullable) {
setDefault = `
size += 1; //type null
size += ${property.name.length} + 1; //property name
`;
}
getSizeCode.push(`
//${property.name}
if (${JSON.stringify(property.name)} in obj) {
size += 1; //type
size += ${property.name.length} + 1; //property name
${getPropertySizer(schema, compiler, property, `obj.${property.name}`, jitStack)}
} else {
${setDefault}
}
`);
}
compiler.context.set('_global', getGlobalStore());
compiler.context.set('unpopulatedSymbol', unpopulatedSymbol);
compiler.context.set('UnpopulatedCheck', UnpopulatedCheck);
compiler.context.set('seekElementSize', seekElementSize);
let circularCheckBeginning = '';
let circularCheckEnd = '';
if (schema.hasCircularReference()) {
circularCheckBeginning = `
if (!_stack) _stack = [];
_stack.push(obj);
`;
circularCheckEnd = `_stack.pop();`;
}
const functionCode = `
${circularCheckBeginning}
let size = 4; //object size
const unpopulatedCheck = _global.unpopulatedCheck;
_global.unpopulatedCheck = UnpopulatedCheck.ReturnSymbol;
${getSizeCode.join('\n')}
size += 1; //null
_global.unpopulatedCheck = unpopulatedCheck;
${circularCheckEnd}
return size;
`;
try {
const fn = compiler.build(functionCode, 'obj', '_stack');
prepared(fn);
return fn;
} catch (error) {
console.log('Error compiling BSON sizer', functionCode);
throw error;
}
}
export class Writer {
public dataView: DataView;
constructor(public buffer: Uint8Array, public offset: number = 0) {
this.dataView = new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength);
}
writeUint32(v: number) {
this.dataView.setUint32(this.offset, v, true);
this.offset += 4;
}
writeInt32(v: number) {
this.dataView.setInt32(this.offset, v, true);
this.offset += 4;
}
writeDouble(v: number) {
this.dataView.setFloat64(this.offset, v, true);
this.offset += 8;
}
writeDelayedSize(v: number, position: number) {
this.dataView.setUint32(position, v, true);
}
writeByte(v: number) {
this.buffer[this.offset++] = v;
}
writeBuffer(buffer: Uint8Array, offset: number = 0) {
// buffer.copy(this.buffer, this.buffer.byteOffset + this.offset);
for (let i = offset; i < buffer.byteLength; i++) {
this.buffer[this.offset++] = buffer[i];
}
// this.offset += buffer.byteLength;
}
writeNull() {
this.writeByte(0);
}
writeAsciiString(str: string) {
for (let i = 0; i < str.length; i++) {
this.buffer[this.offset++] = str.charCodeAt(i);
}
}
writeString(str: string) {
if (!str) return;
if (typeof str !== 'string') return;
for (let i = 0; i < str.length; i++) {
const c = str.charCodeAt(i);
if (c < 128) {
this.buffer[this.offset++] = c;
} else if (c > 127 && c < 2048) {
this.buffer[this.offset++] = (c >> 6) | 192;
this.buffer[this.offset++] = ((c & 63) | 128);
} else {
this.buffer[this.offset++] = (c >> 12) | 224;
this.buffer[this.offset++] = ((c >> 6) & 63) | 128;
this.buffer[this.offset++] = (c & 63) | 128;
}
}
}
getBigIntBSONType(value: bigint): number {
if (BSON_INT32_MIN <= value && value <= BSON_INT32_MAX) {
return BSONType.INT;
} else if (LONG_MIN <= value && value <= LONG_MAX) {
return BSONType.LONG;
} else {
return BSONType.BINARY;
}
}
writeBigIntLong(value: bigint) {
if (value < 0) {
this.writeInt32(~Number(-value % BigInt(TWO_PWR_32_DBL_N)) + 1 | 0); //low
this.writeInt32(~(Number(-value / BigInt(TWO_PWR_32_DBL_N))) | 0); //high
} else {
this.writeInt32(Number(value % BigInt(TWO_PWR_32_DBL_N)) | 0); //low
this.writeInt32(Number(value / BigInt(TWO_PWR_32_DBL_N)) | 0); //high
}
}
writeBigIntBinary(value: bigint) {
//custom binary
let hex = value.toString(16);
let signum = hex === '0' ? 0 : 1;
if (hex[0] === '-') {
//negative number
signum = -1;
hex = hex.slice(1);
}
if (hex.length % 2) hex = '0' + hex;
if (signum === 0) {
this.writeUint32(1);
this.writeByte(BSON_BINARY_SUBTYPE_BIGINT);
this.buffer[this.offset++] = 0;
return;
}
let size = Math.ceil(hex.length / 2);
this.writeUint32(size + 1);
this.writeByte(BSON_BINARY_SUBTYPE_BIGINT);
this.buffer[this.offset++] = signum === 1 ? 1 : 255; //255 means -1
for (let i = 0; i < size; i++) {
this.buffer[this.offset++] = hexToByte(hex, i);
}
}
writeLong(value: number) {
if (value > 9223372036854775807) value = 9223372036854775807;
if (value < -9223372036854775807) value = -9223372036854775807;
if (value < 0) {
this.writeInt32(~(-value % TWO_PWR_32_DBL_N) + 1 | 0); //low
this.writeInt32(~(-value / TWO_PWR_32_DBL_N) | 0); //high
} else {
this.writeInt32((value % TWO_PWR_32_DBL_N) | 0); //low
this.writeInt32((value / TWO_PWR_32_DBL_N) | 0); //high
}
}
writeUUID(value: string | UUID) {
value = value instanceof UUID ? value.id : value;
this.writeUint32(16);
this.writeByte(BSON_BINARY_SUBTYPE_UUID);
this.buffer[this.offset + 0] = uuidStringToByte(value, 0);
this.buffer[this.offset + 1] = uuidStringToByte(value, 1);
this.buffer[this.offset + 2] = uuidStringToByte(value, 2);
this.buffer[this.offset + 3] = uuidStringToByte(value, 3);
//-
this.buffer[this.offset + 4] = uuidStringToByte(value, 4);
this.buffer[this.offset + 5] = uuidStringToByte(value, 5);
//-
this.buffer[this.offset + 6] = uuidStringToByte(value, 6);
this.buffer[this.offset + 7] = uuidStringToByte(value, 7);
//-
this.buffer[this.offset + 8] = uuidStringToByte(value, 8);
this.buffer[this.offset + 9] = uuidStringToByte(value, 9);
//-
this.buffer[this.offset + 10] = uuidStringToByte(value, 10);
this.buffer[this.offset + 11] = uuidStringToByte(value, 11);
this.buffer[this.offset + 12] = uuidStringToByte(value, 12);
this.buffer[this.offset + 13] = uuidStringToByte(value, 13);
this.buffer[this.offset + 14] = uuidStringToByte(value, 14);
this.buffer[this.offset + 15] = uuidStringToByte(value, 15);
this.offset += 16;
}
writeObjectId(value: string | ObjectId) {
value = 'string' === typeof value ? value : value.id;
this.buffer[this.offset + 0] = hexToByte(value, 0);
this.buffer[this.offset + 1] = hexToByte(value, 1);
this.buffer[this.offset + 2] = hexToByte(value, 2);
this.buffer[this.offset + 3] = hexToByte(value, 3);
this.buffer[this.offset + 4] = hexToByte(value, 4);
this.buffer[this.offset + 5] = hexToByte(value, 5);
this.buffer[this.offset + 6] = hexToByte(value, 6);
this.buffer[this.offset + 7] = hexToByte(value, 7);
this.buffer[this.offset + 8] = hexToByte(value, 8);
this.buffer[this.offset + 9] = hexToByte(value, 9);
this.buffer[this.offset + 10] = hexToByte(value, 10);
this.buffer[this.offset + 11] = hexToByte(value, 11);
this.offset += 12;
}
write(value: any, nameWriter?: () => void): void {
if ('boolean' === typeof value) {
if (nameWriter) {
this.writeByte(BSONType.BOOLEAN);
nameWriter();
}
this.writeByte(value ? 1 : 0);
} else if (value instanceof RegExp) {
if (nameWriter) {
this.writeByte(BSONType.REGEXP);
nameWriter();
}
this.writeString(value.source);
this.writeNull();
if (value.ignoreCase) this.writeString('i');
if (value.global) this.writeString('s'); //BSON does not use the RegExp flag format
if (value.multiline) this.writeString('m');
this.writeNull();
} else if ('string' === typeof value) {
//size + content + null
if (nameWriter) {
this.writeByte(BSONType.STRING);
nameWriter();
}
const start = this.offset;
this.offset += 4; //size placeholder
this.writeString(value);
this.writeByte(0); //null
this.writeDelayedSize(this.offset - start - 4, start);
} else if ('number' === typeof value) {
if (Math.floor(value) === value) {
//it's an int
if (value >= BSON_INT32_MIN && value <= BSON_INT32_MAX) {
//32bit
if (nameWriter) {
this.writeByte(BSONType.INT);
nameWriter();
}
this.writeInt32(value);
} else if (value >= JS_INT_MIN && value <= JS_INT_MAX) {
//double, 64bit
if (nameWriter) {
this.writeByte(BSONType.NUMBER);
nameWriter();
}
this.writeDouble(value);
} else {
//long, but we serialize as Double, because deserialize will be BigInt
if (nameWriter) {
this.writeByte(BSONType.NUMBER);
nameWriter();
}
this.writeDouble(value);
}
} else {
//double
if (nameWriter) {
this.writeByte(BSONType.NUMBER);
nameWriter();
}
this.writeDouble(value);
}
} else if (value instanceof Date) {
if (nameWriter) {
this.writeByte(BSONType.DATE);
nameWriter();
}
this.writeLong(value.valueOf());
} else if (isUUID(value)) {
if (nameWriter) {
this.writeByte(BSONType.BINARY);
nameWriter();
}
this.writeUUID(value);
} else if ('bigint' === typeof value) {
//this is only called for bigint in any structures.
//to make sure the deserializing yields a bigint as well, we have to always use binary representation
if (nameWriter) {
this.writeByte(BSONType.BINARY);
nameWriter();
}
this.writeBigIntBinary(value);
} else if (isObjectId(value)) {
if (nameWriter) {
this.writeByte(BSONType.OID);
nameWriter();
}
this.writeObjectId(value);
} else if (value instanceof ArrayBuffer || ArrayBuffer.isView(value)) {
if (nameWriter) {
this.writeByte(BSONType.BINARY);
nameWriter();
}
let view = value instanceof ArrayBuffer ? new Uint8Array(value) : new Uint8Array(value.buffer, value.byteOffset, value.byteLength);
if ((value as any)['_bsontype'] === 'Binary') {
view = (value as any).buffer;
}
this.writeUint32(value.byteLength);
this.writeByte(BSON_BINARY_SUBTYPE_DEFAULT);
for (let i = 0; i < value.byteLength; i++) {
this.buffer[this.offset++] = view[i];
}
} else if (isArray(value)) {
if (nameWriter) {
this.writeByte(BSONType.ARRAY);
nameWriter();
}
const start = this.offset;
this.offset += 4; //size
for (let i = 0; i < value.length; i++) {
this.write(value[i], () => {
this.writeAsciiString('' + i);
this.writeByte(0);
});
}
this.writeNull();
this.writeDelayedSize(this.offset - start, start);
} else if (value === undefined) {
if (nameWriter) {
this.writeByte(BSONType.UNDEFINED);
nameWriter();
}
} else if (value === null) {
if (nameWriter) {
this.writeByte(BSONType.NULL);
nameWriter();
}
} else if (isObject(value)) {
if (nameWriter) {
this.writeByte(BSONType.OBJECT);
nameWriter();
}
const start = this.offset;
this.offset += 4; //size
for (let i in value) {
if (!value.hasOwnProperty(i)) continue;
this.write(value[i], () => {
this.writeString(i);
this.writeByte(0);
});
}
this.writeNull();
this.writeDelayedSize(this.offset - start, start);
} else {
//the sizer incldues the type and name, so we have to write that
if (nameWriter) {
this.writeByte(BSONType.UNDEFINED);
nameWriter();
}
}
}
}
function getNameWriterCode(property: PropertySchema): string {
const nameSetter: string[] = [];
for (let i = 0; i < property.name.length; i++) {
nameSetter.push(`writer.buffer[writer.offset++] = ${property.name.charCodeAt(i)};`);
}
return `
//write name: '${property.name}'
${nameSetter.join('\n')}
writer.writeByte(0); //null
`;
}
function getPropertySerializerCode(
schema: ClassSchema,
compiler: CompilerContext,
property: PropertySchema,
accessor: string,
jitStack: JitStack,
nameAccessor?: string,
): string {
if (property.isParentReference) return '';
let nameWriter = `
writer.writeAsciiString(${nameAccessor});
writer.writeByte(0);
`;
if (!nameAccessor) {
nameWriter = getNameWriterCode(property);
}
//important to happen after the nameWriter
if (property.type === 'promise') {
property = property.templateArgs[0] || new PropertySchema(property.name);
}
let undefinedWriter = `
writer.writeByte(${BSONType.UNDEFINED});
${nameWriter}`;
let code = `writer.write(${accessor}, () => {
${nameWriter}
});`;
//important to put it after nameWriter and nullable check, since we want to keep the name
if (property.type === 'class' && property.getResolvedClassSchema().decorator) {
property = property.getResolvedClassSchema().getDecoratedPropertySchema();
accessor = `(${accessor} && ${accessor}.${property.name})`;
}
if (property.type === 'class') {
const propertySerializer = `_serializer_${property.name}`;
const forwardSchema = property.getResolvedClassSchema();
const serializerFn = jitStack.getOrCreate(property.getResolvedClassSchema(), () => createBSONSerialize(property.getResolvedClassSchema(), jitStack));
compiler.context.set(propertySerializer, serializerFn);
const unpopulatedSymbolVar = compiler.reserveVariable('unpopulatedSymbol', unpopulatedSymbol);
compiler.context.set('isObject', isObject);
compiler.context.set('UUIDSymbol', UUIDSymbol);
compiler.context.set('ObjectIdSymbol', ObjectIdSymbol);
let primarKeyHandling = '';
const isReference = property.isReference || (property.parent && property.parent.isReference);
if (isReference) {
primarKeyHandling = getPropertySerializerCode(schema, compiler, forwardSchema.getPrimaryField(), accessor, jitStack, nameAccessor || JSON.stringify(property.name));
}
let circularCheck = 'true';
if (schema.hasCircularReference()) {
circularCheck = `!_stack.includes(${accessor})`;
}
code = `
if (${accessor} !== ${unpopulatedSymbolVar}) {
if (isObject(${accessor}) && !${accessor}.hasOwnProperty(UUIDSymbol) && !${accessor}.hasOwnProperty(ObjectIdSymbol) && ${circularCheck}) {
writer.writeByte(${BSONType.OBJECT});
${nameWriter}
${propertySerializer}.fn(${accessor}, writer, _stack);
} else if (${isReference}) {
${primarKeyHandling}
} else {
${undefinedWriter}
}
} else {
${undefinedWriter}
}
`;
} else if (property.type === 'string') {
code = `
if (typeof ${accessor} === 'string') {
writer.writeByte(${BSONType.STRING});
${nameWriter}
const start = writer.offset;
writer.offset += 4; //size placeholder
writer.writeString(${accessor});
writer.writeByte(0); //null
writer.writeDelayedSize(writer.offset - start - 4, start);
} else {
${undefinedWriter}
}
`;
} else if (property.type === 'literal') {
code = `
if (typeof ${accessor} === 'string' || typeof ${accessor} === 'number' || typeof ${accessor} === 'boolean') {
${code}
} else if (!${property.isOptional} && !${property.isOptional}) {
writer.write(${JSON.stringify(property.literalValue)}, () => {
${nameWriter}
});
} else {
${undefinedWriter}
}
`;
} else if (property.type === 'boolean') {
code = `
if (typeof ${accessor} === 'boolean') {
writer.writeByte(${BSONType.BOOLEAN});
${nameWriter}
writer.writeByte(${accessor} ? 1 : 0);
} else {
${undefinedWriter}
}
`;
} else if (property.type === 'date') {
code = `
if (${accessor} instanceof Date) {
writer.writeByte(${BSONType.DATE});
${nameWriter}
if (!(${accessor} instanceof Date)) {
throw new Error(${JSON.stringify(accessor)} + " not a Date object");
}
writer.writeLong(${accessor}.valueOf());
} else {
${undefinedWriter}
}
`;
} else if (property.type === 'objectId') {
compiler.context.set('isObjectId', isObjectId);
compiler.context.set('hexToByte', hexToByte);
code = `
if ('string' === typeof ${accessor} || isObjectId(${accessor})) {
writer.writeByte(${BSONType.OID});
${nameWriter}
writer.writeObjectId(${accessor});
} else {
${undefinedWriter}
}
`;
} else if (property.type === 'uuid') {
compiler.context.set('isUUID', isUUID);
compiler.context.set('UUID', UUID);
code = `
if ('string' === typeof ${accessor} || isUUID(${accessor})) {
writer.writeByte(${BSONType.BINARY});
${nameWriter}
writer.writeUUID(${accessor});
} else {
${undefinedWriter}
}
`;
} else if (property.type === 'bigint') {
code = `
if ('bigint' === typeof ${accessor}) {
writer.writeByte(${BSONType.BINARY});
${nameWriter}
writer.writeBigIntBinary(${accessor});
} else {
${undefinedWriter}
}
`;
} else if (property.type === 'number') {
code = `
if ('bigint' === typeof ${accessor}) {
//long
writer.writeByte(${BSONType.LONG});
${nameWriter}
writer.writeBigIntLong(${accessor});
} else if ('number' === typeof ${accessor}) {
if (Math.floor(${accessor}) === ${accessor}) {
//it's an int
if (${accessor} >= ${BSON_INT32_MIN} && ${accessor} <= ${BSON_INT32_MAX}) {
//32bit
writer.writeByte(${BSONType.INT});
${nameWriter}
writer.writeInt32(${accessor});
} else if (${accessor} >= ${JS_INT_MIN} && ${accessor} <= ${JS_INT_MAX}) {
//double, 64bit
writer.writeByte(${BSONType.NUMBER});
${nameWriter}
writer.writeDouble(${accessor});
} else {
//long, but we serialize as Double, because deserialize will be BigInt
writer.writeByte(${BSONType.NUMBER});
${nameWriter}
writer.writeDouble(${accessor});
}
} else {
//double, 64bit
writer.writeByte(${BSONType.NUMBER});
${nameWriter}
writer.writeDouble(${accessor});
}
} else {
${undefinedWriter}
}
`;
} else if (property.type === 'array') {
const i = compiler.reserveName('i');
const isArrayVar = compiler.reserveVariable('isArray', isArray);
const unpopulatedSymbolVar = compiler.reserveVariable('unpopulatedSymbol', unpopulatedSymbol);
code = `
if (${accessor} && ${accessor} !== ${unpopulatedSymbolVar} && ${isArrayVar}(${accessor})) {
writer.writeByte(${BSONType.ARRAY});
${nameWriter}
const start = writer.offset;
writer.offset += 4; //size
for (let ${i} = 0; ${i} < ${accessor}.length; ${i}++) {
//${property.getSubType().name} (${property.getSubType().type})
${getPropertySerializerCode(schema, compiler, property.getSubType(), `${accessor}[${i}]`, jitStack, `''+${i}`)}
}
writer.writeNull();
writer.writeDelayedSize(writer.offset - start, start);
} else {
${undefinedWriter}
}
`;
} else if (property.type === 'map') {
const i = compiler.reserveName('i');
code = `
writer.writeByte(${BSONType.OBJECT});
${nameWriter}
const start = writer.offset;
writer.offset += 4; //size
for (let ${i} in ${accessor}) {
if (!${accessor}.hasOwnProperty(${i})) continue;
//${property.getSubType().name} (${property.getSubType().type})
${getPropertySerializerCode(schema, compiler, property.getSubType(), `${accessor}[${i}]`, jitStack, `${i}`)}
}
writer.writeNull();
writer.writeDelayedSize(writer.offset - start, start);
`;
} else if (property.type === 'union') {
let discriminator: string[] = [`if (false) {\n}`];
const discriminants: string[] = [];
for (const unionType of getSortedUnionTypes(property, jsonTypeGuards)) {
discriminants.push(unionType.property.type);
}
const elseBranch = `throw new Error('No valid discriminant was found for ${property.name}, so could not determine class type. Guard tried: [${discriminants.join(',')}]. Got: ' + ${accessor});`;
for (const unionType of getSortedUnionTypes(property, jsonTypeGuards)) {
const guardVar = compiler.reserveVariable('guard_' + unionType.property.type, unionType.guard);
discriminator.push(`
//guard
else if (${guardVar}(${accessor})) {
//${unionType.property.name} (${unionType.property.type})
${getPropertySerializerCode(schema, compiler, unionType.property, `${accessor}`, jitStack, nameAccessor || JSON.stringify(property.name))}
}
`);
}
code = `
${discriminator.join('\n')}
else {
${elseBranch}
}
`;
}
// since JSON does not support undefined, we emulate it via using null for serialization, and convert that back to undefined when deserialization happens
// not: When the value is not defined (property.name in object === false), then this code will never run.
let writeDefaultValue = `
writer.writeByte(${BSONType.NULL});
${nameWriter}
`;
if (!property.hasDefaultValue && property.defaultValue !== undefined) {
const propertyVar = compiler.reserveVariable('property', property);
const cloned = property.clone();
cloned.defaultValue = undefined;
writeDefaultValue = getPropertySerializerCode(schema, compiler, cloned, `${propertyVar}.lastGeneratedDefaultValue`, jitStack);
} else if (!property.isOptional && property.type === 'literal') {
writeDefaultValue = `writer.write(${JSON.stringify(property.literalValue)}, () => {${nameWriter}});`;
}
// Since mongodb does not support undefined as column type (or better it shouldn't be used that way)
// we transport fields that are `undefined` and isOptional as `null`, and decode this `null` back to `undefined`.
return `
if (${accessor} === undefined || ${accessor} === unpopulatedSymbol) {
${writeDefaultValue}
} else if (${accessor} === null) {
if (${property.isNullable}) {
writer.writeByte(${BSONType.NULL});
${nameWriter}
} else {
${writeDefaultValue}
}
} else {
//serialization code
${code}
}
`;
}
function createBSONSerialize(schema: ClassSchema, jitStack: JitStack = new JitStack()): (data: object, writer?: Writer) => Uint8Array {
const compiler = new CompilerContext();
const prepared = jitStack.prepare(schema);
compiler.context.set('_global', getGlobalStore());
compiler.context.set('UnpopulatedCheck', UnpopulatedCheck);
compiler.context.set('unpopulatedSymbol', unpopulatedSymbol);
compiler.context.set('_sizer', getBSONSizer(schema));
compiler.context.set('Writer', Writer);
compiler.context.set('seekElementSize', seekElementSize);
compiler.context.set('createBuffer', createBuffer);
compiler.context.set('schema', schema);
let functionCode = '';
let getPropertyCode: string[] = [];
for (const property of schema.getProperties()) {
let setDefault = '';
if (property.hasManualDefaultValue() || property.type === 'literal') {
if (property.defaultValue !== undefined) {
const propertyVar = compiler.reserveVariable('property', property);
//the sizer creates for us a lastGeneratedDefaultValue
setDefault = getPropertySerializerCode(schema, compiler, property, `${propertyVar}.lastGeneratedDefaultValue`, jitStack);
} else if (property.type === 'literal' && !property.isOptional) {
setDefault = getPropertySerializerCode(schema, compiler, property, JSON.stringify(property.literalValue), jitStack);
}
} else if (property.isNullable) {
setDefault = getPropertySerializerCode(schema, compiler, property, 'null', jitStack);
}
getPropertyCode.push(`
//${property.name}:${property.type}
if (${JSON.stringify(property.name)} in obj) {
${getPropertySerializerCode(schema, compiler, property, `obj.${property.name}`, jitStack)}
} else {
${setDefault}
}
`);
}
let circularCheckBeginning = '';
let circularCheckEnd = '';
if (schema.hasCircularReference()) {
circularCheckBeginning = `
if (!_stack) _stack = [];
_stack.push(obj);
`;
circularCheckEnd = `_stack.pop();`;
}
functionCode = `
${circularCheckBeginning}
const size = _sizer(obj, _stack);
writer = writer || new Writer(createBuffer(size));
const started = writer.offset;
writer.writeUint32(size);
const unpopulatedCheck = _global.unpopulatedCheck;
_global.unpopulatedCheck = UnpopulatedCheck.ReturnSymbol;
${getPropertyCode.join('\n')}
writer.writeNull();
_global.unpopulatedCheck = unpopulatedCheck;
if (size !== writer.offset - started) {
console.error('Wrong size calculated. Calculated=' + size + ', but serializer wrote ' + (writer.offset - started) + ' bytes. Object: ', JSON.stringify(obj), Object.getOwnPropertyNames(obj), schema.toString());
throw new Error('Wrong size calculated. Calculated=' + size + ', but serializer wrote ' + (writer.offset - started) + ' bytes');
}
${circularCheckEnd}
return writer.buffer;
`;
const fn = compiler.build(functionCode, 'obj', 'writer', '_stack');
prepared(fn);
return fn;
}
export function serialize(data: any): Uint8Array {
const size = getValueSize(data);
const writer = new Writer(createBuffer(size));
writer.write(data);
return writer.buffer;
}
export type BSONSerializer = (data: any, writer?: Writer) => Uint8Array;
export type BSONSizer = (data: any) => number;
/**
* Serializes an schema instance to BSON.
*
* Note: The instances needs to be in the mongo format already since it does not resolve decorated properties.
* So call it with the result of classToMongo(Schema, item).
*/
export function getBSONSerializer(schema: ClassSchema | ClassType): BSONSerializer {
schema = getClassSchema(schema);
const jit = schema.jit;
if (jit.bsonSerializer) return jit.bsonSerializer;
jit.bsonSerializer = createBSONSerialize(schema);
toFastProperties(jit);
return jit.bsonSerializer;
}
export function getBSONSizer(schema: ClassSchema | ClassType): BSONSizer {
schema = getClassSchema(schema);
const jit = schema.jit;
if (jit.bsonSizer) return jit.bsonSizer;
jit.bsonSizer = createBSONSizer(schema);
toFastProperties(jit);
return jit.bsonSizer;
} | the_stack |
import { ParserContext } from './ParserContext';
import { Token, TokenKind } from './Token';
import { Tokenizer } from './Tokenizer';
import {
DocBlockTag,
DocCodeSpan,
DocErrorText,
DocEscapedText,
DocHtmlAttribute,
DocHtmlEndTag,
DocHtmlStartTag,
DocInlineTag,
DocNode,
DocPlainText,
DocSoftBreak,
EscapeStyle,
DocComment,
DocBlock,
DocNodeKind,
DocSection,
DocParamBlock,
DocFencedCode,
DocLinkTag,
IDocLinkTagParameters,
DocMemberReference,
DocDeclarationReference,
DocMemberSymbol,
DocMemberIdentifier,
DocMemberSelector,
DocInheritDocTag,
IDocInheritDocTagParameters,
IDocInlineTagParsedParameters,
DocInlineTagBase,
IDocLinkTagParsedParameters,
IDocMemberReferenceParsedParameters
} from '../nodes';
import { TokenSequence } from './TokenSequence';
import { TokenReader } from './TokenReader';
import { StringChecks } from './StringChecks';
import { ModifierTagSet } from '../details/ModifierTagSet';
import { TSDocConfiguration } from '../configuration/TSDocConfiguration';
import { TSDocTagDefinition, TSDocTagSyntaxKind } from '../configuration/TSDocTagDefinition';
import { StandardTags } from '../details/StandardTags';
import { PlainTextEmitter } from '../emitters/PlainTextEmitter';
import { TSDocMessageId } from './TSDocMessageId';
interface IFailure {
// (We use "failureMessage" instead of "errorMessage" here so that DocErrorText doesn't
// accidentally implement this interface.)
failureMessageId: TSDocMessageId;
failureMessage: string;
failureLocation: TokenSequence;
}
type ResultOrFailure<T> = T | IFailure;
function isFailure<T>(resultOrFailure: ResultOrFailure<T>): resultOrFailure is IFailure {
return resultOrFailure !== undefined && Object.hasOwnProperty.call(resultOrFailure, 'failureMessage');
}
/**
* The main parser for TSDoc comments.
*/
export class NodeParser {
private readonly _parserContext: ParserContext;
private readonly _configuration: TSDocConfiguration;
private _currentSection: DocSection;
public constructor(parserContext: ParserContext) {
this._parserContext = parserContext;
this._configuration = parserContext.configuration;
this._currentSection = parserContext.docComment.summarySection;
}
public parse(): void {
const tokenReader: TokenReader = new TokenReader(this._parserContext);
let done: boolean = false;
while (!done) {
// Extract the next token
switch (tokenReader.peekTokenKind()) {
case TokenKind.EndOfInput:
done = true;
break;
case TokenKind.Newline:
this._pushAccumulatedPlainText(tokenReader);
tokenReader.readToken();
this._pushNode(
new DocSoftBreak({
parsed: true,
configuration: this._configuration,
softBreakExcerpt: tokenReader.extractAccumulatedSequence()
})
);
break;
case TokenKind.Backslash:
this._pushAccumulatedPlainText(tokenReader);
this._pushNode(this._parseBackslashEscape(tokenReader));
break;
case TokenKind.AtSign:
this._pushAccumulatedPlainText(tokenReader);
this._parseAndPushBlock(tokenReader);
break;
case TokenKind.LeftCurlyBracket: {
this._pushAccumulatedPlainText(tokenReader);
const marker: number = tokenReader.createMarker();
const docNode: DocNode = this._parseInlineTag(tokenReader);
const docComment: DocComment = this._parserContext.docComment;
if (docNode instanceof DocInheritDocTag) {
// The @inheritDoc tag is irregular because it looks like an inline tag, but
// it actually represents the entire comment body
const tagEndMarker: number = tokenReader.createMarker() - 1;
if (docComment.inheritDocTag === undefined) {
this._parserContext.docComment.inheritDocTag = docNode;
} else {
this._pushNode(
this._backtrackAndCreateErrorRange(
tokenReader,
marker,
tagEndMarker,
TSDocMessageId.ExtraInheritDocTag,
'A doc comment cannot have more than one @inheritDoc tag'
)
);
}
} else {
this._pushNode(docNode);
}
break;
}
case TokenKind.RightCurlyBracket:
this._pushAccumulatedPlainText(tokenReader);
this._pushNode(
this._createError(
tokenReader,
TSDocMessageId.EscapeRightBrace,
'The "}" character should be escaped using a backslash to avoid confusion with a TSDoc inline tag'
)
);
break;
case TokenKind.LessThan:
this._pushAccumulatedPlainText(tokenReader);
// Look ahead two tokens to see if this is "<a>" or "</a>".
if (tokenReader.peekTokenAfterKind() === TokenKind.Slash) {
this._pushNode(this._parseHtmlEndTag(tokenReader));
} else {
this._pushNode(this._parseHtmlStartTag(tokenReader));
}
break;
case TokenKind.GreaterThan:
this._pushAccumulatedPlainText(tokenReader);
this._pushNode(
this._createError(
tokenReader,
TSDocMessageId.EscapeGreaterThan,
'The ">" character should be escaped using a backslash to avoid confusion with an HTML tag'
)
);
break;
case TokenKind.Backtick:
this._pushAccumulatedPlainText(tokenReader);
if (
tokenReader.peekTokenAfterKind() === TokenKind.Backtick &&
tokenReader.peekTokenAfterAfterKind() === TokenKind.Backtick
) {
this._pushNode(this._parseFencedCode(tokenReader));
} else {
this._pushNode(this._parseCodeSpan(tokenReader));
}
break;
default:
// If nobody recognized this token, then accumulate plain text
tokenReader.readToken();
break;
}
}
this._pushAccumulatedPlainText(tokenReader);
this._performValidationChecks();
}
private _performValidationChecks(): void {
const docComment: DocComment = this._parserContext.docComment;
if (docComment.deprecatedBlock) {
if (!PlainTextEmitter.hasAnyTextContent(docComment.deprecatedBlock)) {
this._parserContext.log.addMessageForTokenSequence(
TSDocMessageId.MissingDeprecationMessage,
`The ${docComment.deprecatedBlock.blockTag.tagName} block must include a deprecation message,` +
` e.g. describing the recommended alternative`,
docComment.deprecatedBlock.blockTag.getTokenSequence(),
docComment.deprecatedBlock
);
}
}
if (docComment.inheritDocTag) {
if (docComment.remarksBlock) {
this._parserContext.log.addMessageForTokenSequence(
TSDocMessageId.InheritDocIncompatibleTag,
`A "${docComment.remarksBlock.blockTag.tagName}" block must not be used, because that` +
` content is provided by the @inheritDoc tag`,
docComment.remarksBlock.blockTag.getTokenSequence(),
docComment.remarksBlock.blockTag
);
}
if (PlainTextEmitter.hasAnyTextContent(docComment.summarySection)) {
this._parserContext.log.addMessageForTextRange(
TSDocMessageId.InheritDocIncompatibleSummary,
'The summary section must not have any content, because that' +
' content is provided by the @inheritDoc tag',
this._parserContext.commentRange
);
}
}
}
private _validateTagDefinition(
tagDefinition: TSDocTagDefinition | undefined,
tagName: string,
expectingInlineTag: boolean,
tokenSequenceForErrorContext: TokenSequence,
nodeForErrorContext: DocNode
): void {
if (tagDefinition) {
const isInlineTag: boolean = tagDefinition.syntaxKind === TSDocTagSyntaxKind.InlineTag;
if (isInlineTag !== expectingInlineTag) {
// The tag is defined, but it is used incorrectly
if (expectingInlineTag) {
this._parserContext.log.addMessageForTokenSequence(
TSDocMessageId.TagShouldNotHaveBraces,
`The TSDoc tag "${tagName}" is not an inline tag; it must not be enclosed in "{ }" braces`,
tokenSequenceForErrorContext,
nodeForErrorContext
);
} else {
this._parserContext.log.addMessageForTokenSequence(
TSDocMessageId.InlineTagMissingBraces,
`The TSDoc tag "${tagName}" is an inline tag; it must be enclosed in "{ }" braces`,
tokenSequenceForErrorContext,
nodeForErrorContext
);
}
} else {
if (this._parserContext.configuration.validation.reportUnsupportedTags) {
if (!this._parserContext.configuration.isTagSupported(tagDefinition)) {
// The tag is defined, but not supported
this._parserContext.log.addMessageForTokenSequence(
TSDocMessageId.UnsupportedTag,
`The TSDoc tag "${tagName}" is not supported by this tool`,
tokenSequenceForErrorContext,
nodeForErrorContext
);
}
}
}
} else {
// The tag is not defined
if (!this._parserContext.configuration.validation.ignoreUndefinedTags) {
this._parserContext.log.addMessageForTokenSequence(
TSDocMessageId.UndefinedTag,
`The TSDoc tag "${tagName}" is not defined in this configuration`,
tokenSequenceForErrorContext,
nodeForErrorContext
);
}
}
}
private _pushAccumulatedPlainText(tokenReader: TokenReader): void {
if (!tokenReader.isAccumulatedSequenceEmpty()) {
this._pushNode(
new DocPlainText({
parsed: true,
configuration: this._configuration,
textExcerpt: tokenReader.extractAccumulatedSequence()
})
);
}
}
private _parseAndPushBlock(tokenReader: TokenReader): void {
const docComment: DocComment = this._parserContext.docComment;
const configuration: TSDocConfiguration = this._parserContext.configuration;
const modifierTagSet: ModifierTagSet = docComment.modifierTagSet;
const parsedBlockTag: DocNode = this._parseBlockTag(tokenReader);
if (parsedBlockTag.kind !== DocNodeKind.BlockTag) {
this._pushNode(parsedBlockTag);
return;
}
const docBlockTag: DocBlockTag = parsedBlockTag as DocBlockTag;
// Do we have a definition for this tag?
const tagDefinition: TSDocTagDefinition | undefined = configuration.tryGetTagDefinitionWithUpperCase(
docBlockTag.tagNameWithUpperCase
);
this._validateTagDefinition(
tagDefinition,
docBlockTag.tagName,
/* expectingInlineTag */ false,
docBlockTag.getTokenSequence(),
docBlockTag
);
if (tagDefinition) {
switch (tagDefinition.syntaxKind) {
case TSDocTagSyntaxKind.BlockTag:
if (docBlockTag.tagNameWithUpperCase === StandardTags.param.tagNameWithUpperCase) {
const docParamBlock: DocParamBlock = this._parseParamBlock(
tokenReader,
docBlockTag,
StandardTags.param.tagName
);
this._parserContext.docComment.params.add(docParamBlock);
this._currentSection = docParamBlock.content;
return;
} else if (docBlockTag.tagNameWithUpperCase === StandardTags.typeParam.tagNameWithUpperCase) {
const docParamBlock: DocParamBlock = this._parseParamBlock(
tokenReader,
docBlockTag,
StandardTags.typeParam.tagName
);
this._parserContext.docComment.typeParams.add(docParamBlock);
this._currentSection = docParamBlock.content;
return;
} else {
const newBlock: DocBlock = new DocBlock({
configuration: this._configuration,
blockTag: docBlockTag
});
this._addBlockToDocComment(newBlock);
this._currentSection = newBlock.content;
}
return;
case TSDocTagSyntaxKind.ModifierTag:
// The block tag was recognized as a modifier, so add it to the modifier tag set
// and do NOT call currentSection.appendNode(parsedNode)
modifierTagSet.addTag(docBlockTag);
return;
}
}
this._pushNode(docBlockTag);
}
private _addBlockToDocComment(block: DocBlock): void {
const docComment: DocComment = this._parserContext.docComment;
switch (block.blockTag.tagNameWithUpperCase) {
case StandardTags.remarks.tagNameWithUpperCase:
docComment.remarksBlock = block;
break;
case StandardTags.privateRemarks.tagNameWithUpperCase:
docComment.privateRemarks = block;
break;
case StandardTags.deprecated.tagNameWithUpperCase:
docComment.deprecatedBlock = block;
break;
case StandardTags.returns.tagNameWithUpperCase:
docComment.returnsBlock = block;
break;
case StandardTags.see.tagNameWithUpperCase:
docComment._appendSeeBlock(block);
break;
default:
docComment.appendCustomBlock(block);
}
}
/**
* Used by `_parseParamBlock()`, this parses a JSDoc expression remainder like `string}` or `="]"]` from
* an input like `@param {string} [x="]"] - the X value`. It detects nested balanced pairs of delimiters
* and escaped string literals.
*/
private _tryParseJSDocTypeOrValueRest(
tokenReader: TokenReader,
openKind: TokenKind,
closeKind: TokenKind,
startMarker: number
): TokenSequence | undefined {
let quoteKind: TokenKind | undefined;
let openCount: number = 1;
while (openCount > 0) {
let tokenKind: TokenKind = tokenReader.peekTokenKind();
switch (tokenKind) {
case openKind:
// ignore open bracket/brace inside of a quoted string
if (quoteKind === undefined) openCount++;
break;
case closeKind:
// ignore close bracket/brace inside of a quoted string
if (quoteKind === undefined) openCount--;
break;
case TokenKind.Backslash:
// ignore backslash outside of quoted string
if (quoteKind !== undefined) {
// skip the backslash and the next character.
tokenReader.readToken();
tokenKind = tokenReader.peekTokenKind();
}
break;
case TokenKind.DoubleQuote:
case TokenKind.SingleQuote:
case TokenKind.Backtick:
if (quoteKind === tokenKind) {
// exit quoted string if quote character matches.
quoteKind = undefined;
} else if (quoteKind === undefined) {
// start quoted string if not in a quoted string.
quoteKind = tokenKind;
}
break;
}
// give up at end of input and backtrack to start.
if (tokenKind === TokenKind.EndOfInput) {
tokenReader.backtrackToMarker(startMarker);
return undefined;
}
tokenReader.readToken();
}
return tokenReader.tryExtractAccumulatedSequence();
}
/**
* Used by `_parseParamBlock()`, this parses a JSDoc expression like `{string}` from
* an input like `@param {string} x - the X value`.
*/
private _tryParseUnsupportedJSDocType(
tokenReader: TokenReader,
docBlockTag: DocBlockTag,
tagName: string
): TokenSequence | undefined {
tokenReader.assertAccumulatedSequenceIsEmpty();
// do not parse `{@...` as a JSDoc type
if (
tokenReader.peekTokenKind() !== TokenKind.LeftCurlyBracket ||
tokenReader.peekTokenAfterKind() === TokenKind.AtSign
) {
return undefined;
}
const startMarker: number = tokenReader.createMarker();
tokenReader.readToken(); // read the "{"
let jsdocTypeExcerpt: TokenSequence | undefined = this._tryParseJSDocTypeOrValueRest(
tokenReader,
TokenKind.LeftCurlyBracket,
TokenKind.RightCurlyBracket,
startMarker
);
if (jsdocTypeExcerpt) {
this._parserContext.log.addMessageForTokenSequence(
TSDocMessageId.ParamTagWithInvalidType,
'The ' + tagName + " block should not include a JSDoc-style '{type}'",
jsdocTypeExcerpt,
docBlockTag
);
const spacingAfterJsdocTypeExcerpt: TokenSequence | undefined = this._tryReadSpacingAndNewlines(
tokenReader
);
if (spacingAfterJsdocTypeExcerpt) {
jsdocTypeExcerpt = jsdocTypeExcerpt.getNewSequence(
jsdocTypeExcerpt.startIndex,
spacingAfterJsdocTypeExcerpt.endIndex
);
}
}
return jsdocTypeExcerpt;
}
/**
* Used by `_parseParamBlock()`, this parses a JSDoc expression remainder like `=[]]` from
* an input like `@param {string} [x=[]] - the X value`.
*/
private _tryParseJSDocOptionalNameRest(tokenReader: TokenReader): TokenSequence | undefined {
tokenReader.assertAccumulatedSequenceIsEmpty();
if (tokenReader.peekTokenKind() !== TokenKind.EndOfInput) {
const startMarker: number = tokenReader.createMarker();
return this._tryParseJSDocTypeOrValueRest(
tokenReader,
TokenKind.LeftSquareBracket,
TokenKind.RightSquareBracket,
startMarker
);
}
return undefined;
}
private _parseParamBlock(
tokenReader: TokenReader,
docBlockTag: DocBlockTag,
tagName: string
): DocParamBlock {
const startMarker: number = tokenReader.createMarker();
const spacingBeforeParameterNameExcerpt: TokenSequence | undefined = this._tryReadSpacingAndNewlines(
tokenReader
);
// Skip past a JSDoc type (i.e., '@param {type} paramName') if found, and report a warning.
const unsupportedJsdocTypeBeforeParameterNameExcerpt:
| TokenSequence
| undefined = this._tryParseUnsupportedJSDocType(tokenReader, docBlockTag, tagName);
// Parse opening of invalid JSDoc optional parameter name (e.g., '[')
let unsupportedJsdocOptionalNameOpenBracketExcerpt: TokenSequence | undefined;
if (tokenReader.peekTokenKind() === TokenKind.LeftSquareBracket) {
tokenReader.readToken(); // read the "["
unsupportedJsdocOptionalNameOpenBracketExcerpt = tokenReader.extractAccumulatedSequence();
}
let parameterName: string = '';
let done: boolean = false;
while (!done) {
switch (tokenReader.peekTokenKind()) {
case TokenKind.AsciiWord:
case TokenKind.Period:
case TokenKind.DollarSign:
parameterName += tokenReader.readToken();
break;
default:
done = true;
break;
}
}
const explanation: string | undefined = StringChecks.explainIfInvalidUnquotedIdentifier(parameterName);
if (explanation !== undefined) {
tokenReader.backtrackToMarker(startMarker);
const errorParamBlock: DocParamBlock = new DocParamBlock({
configuration: this._configuration,
blockTag: docBlockTag,
parameterName: ''
});
const errorMessage: string =
parameterName.length > 0
? 'The ' + tagName + ' block should be followed by a valid parameter name: ' + explanation
: 'The ' + tagName + ' block should be followed by a parameter name';
this._parserContext.log.addMessageForTokenSequence(
TSDocMessageId.ParamTagWithInvalidName,
errorMessage,
docBlockTag.getTokenSequence(),
docBlockTag
);
return errorParamBlock;
}
const parameterNameExcerpt: TokenSequence = tokenReader.extractAccumulatedSequence();
// Parse closing of invalid JSDoc optional parameter name (e.g., ']', '=default]').
let unsupportedJsdocOptionalNameRestExcerpt: TokenSequence | undefined;
if (unsupportedJsdocOptionalNameOpenBracketExcerpt) {
unsupportedJsdocOptionalNameRestExcerpt = this._tryParseJSDocOptionalNameRest(tokenReader);
let errorSequence: TokenSequence | undefined = unsupportedJsdocOptionalNameOpenBracketExcerpt;
if (unsupportedJsdocOptionalNameRestExcerpt) {
errorSequence = docBlockTag
.getTokenSequence()
.getNewSequence(
unsupportedJsdocOptionalNameOpenBracketExcerpt.startIndex,
unsupportedJsdocOptionalNameRestExcerpt.endIndex
);
}
this._parserContext.log.addMessageForTokenSequence(
TSDocMessageId.ParamTagWithInvalidOptionalName,
'The ' +
tagName +
" should not include a JSDoc-style optional name; it must not be enclosed in '[ ]' brackets.",
errorSequence,
docBlockTag
);
}
const spacingAfterParameterNameExcerpt: TokenSequence | undefined = this._tryReadSpacingAndNewlines(
tokenReader
);
// Skip past a trailing JSDoc type (i.e., '@param paramName {type}') if found, and report a warning.
const unsupportedJsdocTypeAfterParameterNameExcerpt:
| TokenSequence
| undefined = this._tryParseUnsupportedJSDocType(tokenReader, docBlockTag, tagName);
// TODO: Warn if there is no space before or after the hyphen
let hyphenExcerpt: TokenSequence | undefined;
let spacingAfterHyphenExcerpt: TokenSequence | undefined;
let unsupportedJsdocTypeAfterHyphenExcerpt: TokenSequence | undefined;
if (tokenReader.peekTokenKind() === TokenKind.Hyphen) {
tokenReader.readToken();
hyphenExcerpt = tokenReader.extractAccumulatedSequence();
// TODO: Only read one space
spacingAfterHyphenExcerpt = this._tryReadSpacingAndNewlines(tokenReader);
// Skip past a JSDoc type (i.e., '@param paramName - {type}') if found, and report a warning.
unsupportedJsdocTypeAfterHyphenExcerpt = this._tryParseUnsupportedJSDocType(
tokenReader,
docBlockTag,
tagName
);
} else {
this._parserContext.log.addMessageForTokenSequence(
TSDocMessageId.ParamTagMissingHyphen,
'The ' + tagName + ' block should be followed by a parameter name and then a hyphen',
docBlockTag.getTokenSequence(),
docBlockTag
);
}
return new DocParamBlock({
parsed: true,
configuration: this._configuration,
blockTag: docBlockTag,
spacingBeforeParameterNameExcerpt,
unsupportedJsdocTypeBeforeParameterNameExcerpt,
unsupportedJsdocOptionalNameOpenBracketExcerpt,
parameterNameExcerpt,
parameterName,
unsupportedJsdocOptionalNameRestExcerpt,
spacingAfterParameterNameExcerpt,
unsupportedJsdocTypeAfterParameterNameExcerpt,
hyphenExcerpt,
spacingAfterHyphenExcerpt,
unsupportedJsdocTypeAfterHyphenExcerpt
});
}
private _pushNode(docNode: DocNode): void {
if (this._configuration.docNodeManager.isAllowedChild(DocNodeKind.Paragraph, docNode.kind)) {
this._currentSection.appendNodeInParagraph(docNode);
} else {
this._currentSection.appendNode(docNode);
}
}
private _parseBackslashEscape(tokenReader: TokenReader): DocNode {
tokenReader.assertAccumulatedSequenceIsEmpty();
const marker: number = tokenReader.createMarker();
tokenReader.readToken(); // read the backslash
if (tokenReader.peekTokenKind() === TokenKind.EndOfInput) {
return this._backtrackAndCreateError(
tokenReader,
marker,
TSDocMessageId.UnnecessaryBackslash,
'A backslash must precede another character that is being escaped'
);
}
const escapedToken: Token = tokenReader.readToken(); // read the escaped character
// In CommonMark, a backslash is only allowed before a punctuation
// character. In all other contexts, the backslash is interpreted as a
// literal character.
if (!Tokenizer.isPunctuation(escapedToken.kind)) {
return this._backtrackAndCreateError(
tokenReader,
marker,
TSDocMessageId.UnnecessaryBackslash,
'A backslash can only be used to escape a punctuation character'
);
}
const encodedTextExcerpt: TokenSequence = tokenReader.extractAccumulatedSequence();
return new DocEscapedText({
parsed: true,
configuration: this._configuration,
escapeStyle: EscapeStyle.CommonMarkBackslash,
encodedTextExcerpt,
decodedText: escapedToken.toString()
});
}
private _parseBlockTag(tokenReader: TokenReader): DocNode {
tokenReader.assertAccumulatedSequenceIsEmpty();
const marker: number = tokenReader.createMarker();
if (tokenReader.peekTokenKind() !== TokenKind.AtSign) {
return this._backtrackAndCreateError(
tokenReader,
marker,
TSDocMessageId.MissingTag,
'Expecting a TSDoc tag starting with "@"'
);
}
// "@one" is a valid TSDoc tag at the start of a line, but "@one@two" is
// a syntax error. For two tags it should be "@one @two", or for literal text it
// should be "\@one\@two".
switch (tokenReader.peekPreviousTokenKind()) {
case TokenKind.EndOfInput:
case TokenKind.Spacing:
case TokenKind.Newline:
break;
default:
return this._backtrackAndCreateError(
tokenReader,
marker,
TSDocMessageId.AtSignInWord,
'The "@" character looks like part of a TSDoc tag; use a backslash to escape it'
);
}
// Include the "@" as part of the tagName
let tagName: string = tokenReader.readToken().toString();
if (tokenReader.peekTokenKind() !== TokenKind.AsciiWord) {
return this._backtrackAndCreateError(
tokenReader,
marker,
TSDocMessageId.AtSignWithoutTagName,
'Expecting a TSDoc tag name after "@"; if it is not a tag, use a backslash to escape this character'
);
}
const tagNameMarker: number = tokenReader.createMarker();
while (tokenReader.peekTokenKind() === TokenKind.AsciiWord) {
tagName += tokenReader.readToken().toString();
}
switch (tokenReader.peekTokenKind()) {
case TokenKind.Spacing:
case TokenKind.Newline:
case TokenKind.EndOfInput:
break;
default:
const badCharacter: string = tokenReader.peekToken().range.toString()[0];
return this._backtrackAndCreateError(
tokenReader,
marker,
TSDocMessageId.CharactersAfterBlockTag,
`The token "${tagName}" looks like a TSDoc tag but contains an invalid character` +
` ${JSON.stringify(badCharacter)}; if it is not a tag, use a backslash to escape the "@"`
);
}
if (StringChecks.explainIfInvalidTSDocTagName(tagName)) {
const failure: IFailure = this._createFailureForTokensSince(
tokenReader,
TSDocMessageId.MalformedTagName,
'A TSDoc tag name must start with a letter and contain only letters and numbers',
tagNameMarker
);
return this._backtrackAndCreateErrorForFailure(tokenReader, marker, '', failure);
}
return new DocBlockTag({
parsed: true,
configuration: this._configuration,
tagName,
tagNameExcerpt: tokenReader.extractAccumulatedSequence()
});
}
private _parseInlineTag(tokenReader: TokenReader): DocNode {
tokenReader.assertAccumulatedSequenceIsEmpty();
const marker: number = tokenReader.createMarker();
if (tokenReader.peekTokenKind() !== TokenKind.LeftCurlyBracket) {
return this._backtrackAndCreateError(
tokenReader,
marker,
TSDocMessageId.MissingTag,
'Expecting a TSDoc tag starting with "{"'
);
}
tokenReader.readToken();
const openingDelimiterExcerpt: TokenSequence = tokenReader.extractAccumulatedSequence();
// For inline tags, if we handle errors by backtracking to the "{" token, then the main loop
// will then interpret the "@" as a block tag, which is almost certainly incorrect. So the
// DocErrorText needs to include both the "{" and "@" tokens.
// We will use _backtrackAndCreateErrorRangeForFailure() for that.
const atSignMarker: number = tokenReader.createMarker();
if (tokenReader.peekTokenKind() !== TokenKind.AtSign) {
return this._backtrackAndCreateError(
tokenReader,
marker,
TSDocMessageId.MalformedInlineTag,
'Expecting a TSDoc tag starting with "{@"'
);
}
// Include the "@" as part of the tagName
let tagName: string = tokenReader.readToken().toString();
while (tokenReader.peekTokenKind() === TokenKind.AsciiWord) {
tagName += tokenReader.readToken().toString();
}
if (tagName === '@') {
// This is an unusual case
const failure: IFailure = this._createFailureForTokensSince(
tokenReader,
TSDocMessageId.MalformedInlineTag,
'Expecting a TSDoc inline tag name after the "{@" characters',
atSignMarker
);
return this._backtrackAndCreateErrorRangeForFailure(tokenReader, marker, atSignMarker, '', failure);
}
if (StringChecks.explainIfInvalidTSDocTagName(tagName)) {
const failure: IFailure = this._createFailureForTokensSince(
tokenReader,
TSDocMessageId.MalformedTagName,
'A TSDoc tag name must start with a letter and contain only letters and numbers',
atSignMarker
);
return this._backtrackAndCreateErrorRangeForFailure(tokenReader, marker, atSignMarker, '', failure);
}
const tagNameExcerpt: TokenSequence = tokenReader.extractAccumulatedSequence();
const spacingAfterTagNameExcerpt: TokenSequence | undefined = this._tryReadSpacingAndNewlines(
tokenReader
);
if (spacingAfterTagNameExcerpt === undefined) {
// If there were no spaces at all, that's an error unless it's the degenerate "{@tag}" case
if (tokenReader.peekTokenKind() !== TokenKind.RightCurlyBracket) {
const badCharacter: string = tokenReader.peekToken().range.toString()[0];
const failure: IFailure = this._createFailureForToken(
tokenReader,
TSDocMessageId.CharactersAfterInlineTag,
`The character ${JSON.stringify(
badCharacter
)} cannot appear after the TSDoc tag name; expecting a space`
);
return this._backtrackAndCreateErrorRangeForFailure(tokenReader, marker, atSignMarker, '', failure);
}
}
let done: boolean = false;
while (!done) {
switch (tokenReader.peekTokenKind()) {
case TokenKind.EndOfInput:
return this._backtrackAndCreateErrorRange(
tokenReader,
marker,
atSignMarker,
TSDocMessageId.InlineTagMissingRightBrace,
'The TSDoc inline tag name is missing its closing "}"'
);
case TokenKind.Backslash:
// http://usejsdoc.org/about-block-inline-tags.html
// "If your tag's text includes a closing curly brace (}), you must escape it with
// a leading backslash (\)."
tokenReader.readToken(); // discard the backslash
// In CommonMark, a backslash is only allowed before a punctuation
// character. In all other contexts, the backslash is interpreted as a
// literal character.
if (!Tokenizer.isPunctuation(tokenReader.peekTokenKind())) {
const failure: IFailure = this._createFailureForToken(
tokenReader,
TSDocMessageId.UnnecessaryBackslash,
'A backslash can only be used to escape a punctuation character'
);
return this._backtrackAndCreateErrorRangeForFailure(
tokenReader,
marker,
atSignMarker,
'Error reading inline TSDoc tag: ',
failure
);
}
tokenReader.readToken();
break;
case TokenKind.LeftCurlyBracket: {
const failure: IFailure = this._createFailureForToken(
tokenReader,
TSDocMessageId.InlineTagUnescapedBrace,
'The "{" character must be escaped with a backslash when used inside a TSDoc inline tag'
);
return this._backtrackAndCreateErrorRangeForFailure(tokenReader, marker, atSignMarker, '', failure);
}
case TokenKind.RightCurlyBracket:
done = true;
break;
default:
tokenReader.readToken();
break;
}
}
const tagContentExcerpt: TokenSequence | undefined = tokenReader.tryExtractAccumulatedSequence();
// Read the right curly bracket
tokenReader.readToken();
const closingDelimiterExcerpt: TokenSequence = tokenReader.extractAccumulatedSequence();
const docInlineTagParsedParameters: IDocInlineTagParsedParameters = {
parsed: true,
configuration: this._configuration,
openingDelimiterExcerpt,
tagNameExcerpt,
tagName,
spacingAfterTagNameExcerpt,
tagContentExcerpt,
closingDelimiterExcerpt
};
const tagNameWithUpperCase: string = tagName.toUpperCase();
// Create a new TokenReader that will reparse the tokens corresponding to the tagContent.
const embeddedTokenReader: TokenReader = new TokenReader(
this._parserContext,
tagContentExcerpt ? tagContentExcerpt : TokenSequence.createEmpty(this._parserContext)
);
let docNode: DocNode;
switch (tagNameWithUpperCase) {
case StandardTags.inheritDoc.tagNameWithUpperCase:
docNode = this._parseInheritDocTag(docInlineTagParsedParameters, embeddedTokenReader);
break;
case StandardTags.link.tagNameWithUpperCase:
docNode = this._parseLinkTag(docInlineTagParsedParameters, embeddedTokenReader);
break;
default:
docNode = new DocInlineTag(docInlineTagParsedParameters);
}
// Validate the tag
const tagDefinition:
| TSDocTagDefinition
| undefined = this._parserContext.configuration.tryGetTagDefinitionWithUpperCase(tagNameWithUpperCase);
this._validateTagDefinition(
tagDefinition,
tagName,
/* expectingInlineTag */ true,
tagNameExcerpt,
docNode
);
return docNode;
}
private _parseInheritDocTag(
docInlineTagParsedParameters: IDocInlineTagParsedParameters,
embeddedTokenReader: TokenReader
): DocInlineTagBase {
// If an error occurs, then return a generic DocInlineTag instead of DocInheritDocTag
const errorTag: DocInlineTag = new DocInlineTag(docInlineTagParsedParameters);
const parameters: IDocInheritDocTagParameters = {
...docInlineTagParsedParameters
};
if (embeddedTokenReader.peekTokenKind() !== TokenKind.EndOfInput) {
parameters.declarationReference = this._parseDeclarationReference(
embeddedTokenReader,
docInlineTagParsedParameters.tagNameExcerpt,
errorTag
);
if (!parameters.declarationReference) {
return errorTag;
}
if (embeddedTokenReader.peekTokenKind() !== TokenKind.EndOfInput) {
embeddedTokenReader.readToken();
this._parserContext.log.addMessageForTokenSequence(
TSDocMessageId.InheritDocTagSyntax,
'Unexpected character after declaration reference',
embeddedTokenReader.extractAccumulatedSequence(),
errorTag
);
return errorTag;
}
}
return new DocInheritDocTag(parameters);
}
private _parseLinkTag(
docInlineTagParsedParameters: IDocInlineTagParsedParameters,
embeddedTokenReader: TokenReader
): DocInlineTagBase {
// If an error occurs, then return a generic DocInlineTag instead of DocInheritDocTag
const errorTag: DocInlineTag = new DocInlineTag(docInlineTagParsedParameters);
const parameters: IDocLinkTagParsedParameters = {
...docInlineTagParsedParameters
};
if (!docInlineTagParsedParameters.tagContentExcerpt) {
this._parserContext.log.addMessageForTokenSequence(
TSDocMessageId.LinkTagEmpty,
'The @link tag content is missing',
parameters.tagNameExcerpt,
errorTag
);
return errorTag;
}
// Is the link destination a URL or a declaration reference?
//
// The JSDoc "@link" tag allows URLs, however supporting full URLs would be highly
// ambiguous, for example "microsoft.windows.camera:" is an actual valid URI scheme,
// and even the common "mailto:example.com" looks suspiciously like a declaration reference.
// In practice JSDoc URLs are nearly always HTTP or HTTPS, so it seems fairly reasonable to
// require the URL to have "://" and a scheme without any punctuation in it. If a more exotic
// URL is needed, the HTML "<a>" tag can always be used.
// We start with a fairly broad classifier heuristic, and then the parsers will refine this:
// 1. Does it start with "//"?
// 2. Does it contain "://"?
let looksLikeUrl: boolean =
embeddedTokenReader.peekTokenKind() === TokenKind.Slash &&
embeddedTokenReader.peekTokenAfterKind() === TokenKind.Slash;
const marker: number = embeddedTokenReader.createMarker();
let done: boolean = looksLikeUrl;
while (!done) {
switch (embeddedTokenReader.peekTokenKind()) {
// An URI scheme can contain letters, numbers, minus, plus, and periods
case TokenKind.AsciiWord:
case TokenKind.Period:
case TokenKind.Hyphen:
case TokenKind.Plus:
embeddedTokenReader.readToken();
break;
case TokenKind.Colon:
embeddedTokenReader.readToken();
// Once we a reach a colon, then it's a URL only if we see "://"
looksLikeUrl =
embeddedTokenReader.peekTokenKind() === TokenKind.Slash &&
embeddedTokenReader.peekTokenAfterKind() === TokenKind.Slash;
done = true;
break;
default:
done = true;
}
}
embeddedTokenReader.backtrackToMarker(marker);
// Is the hyperlink a URL or a declaration reference?
if (looksLikeUrl) {
// It starts with something like "http://", so parse it as a URL
if (
!this._parseLinkTagUrlDestination(
embeddedTokenReader,
parameters,
docInlineTagParsedParameters.tagNameExcerpt,
errorTag
)
) {
return errorTag;
}
} else {
// Otherwise, assume it's a declaration reference
if (
!this._parseLinkTagCodeDestination(
embeddedTokenReader,
parameters,
docInlineTagParsedParameters.tagNameExcerpt,
errorTag
)
) {
return errorTag;
}
}
if (embeddedTokenReader.peekTokenKind() === TokenKind.Spacing) {
// The above parser rules should have consumed any spacing before the pipe
throw new Error('Unconsumed spacing encountered after construct');
}
if (embeddedTokenReader.peekTokenKind() === TokenKind.Pipe) {
// Read the link text
embeddedTokenReader.readToken();
parameters.pipeExcerpt = embeddedTokenReader.extractAccumulatedSequence();
parameters.spacingAfterPipeExcerpt = this._tryReadSpacingAndNewlines(embeddedTokenReader);
// Read everything until the end
// NOTE: Because we're using an embedded TokenReader, the TokenKind.EndOfInput occurs
// when we reach the "}", not the end of the original input
done = false;
let spacingAfterLinkTextMarker: number | undefined = undefined;
while (!done) {
switch (embeddedTokenReader.peekTokenKind()) {
case TokenKind.EndOfInput:
done = true;
break;
case TokenKind.Pipe:
case TokenKind.LeftCurlyBracket:
const badCharacter: string = embeddedTokenReader.readToken().toString();
this._parserContext.log.addMessageForTokenSequence(
TSDocMessageId.LinkTagUnescapedText,
`The "${badCharacter}" character may not be used in the link text without escaping it`,
embeddedTokenReader.extractAccumulatedSequence(),
errorTag
);
return errorTag;
case TokenKind.Spacing:
case TokenKind.Newline:
embeddedTokenReader.readToken();
break;
default:
// We found a non-spacing character, so move the spacingAfterLinkTextMarker
spacingAfterLinkTextMarker = embeddedTokenReader.createMarker() + 1;
embeddedTokenReader.readToken();
}
}
const linkTextAndSpacing:
| TokenSequence
| undefined = embeddedTokenReader.tryExtractAccumulatedSequence();
if (linkTextAndSpacing) {
if (spacingAfterLinkTextMarker === undefined) {
// We never found any non-spacing characters, so everything is trailing spacing
parameters.spacingAfterLinkTextExcerpt = linkTextAndSpacing;
} else if (spacingAfterLinkTextMarker >= linkTextAndSpacing.endIndex) {
// We found no trailing spacing, so everything we found is the text
parameters.linkTextExcerpt = linkTextAndSpacing;
} else {
// Split the trailing spacing from the link text
parameters.linkTextExcerpt = linkTextAndSpacing.getNewSequence(
linkTextAndSpacing.startIndex,
spacingAfterLinkTextMarker
);
parameters.spacingAfterLinkTextExcerpt = linkTextAndSpacing.getNewSequence(
spacingAfterLinkTextMarker,
linkTextAndSpacing.endIndex
);
}
}
} else if (embeddedTokenReader.peekTokenKind() !== TokenKind.EndOfInput) {
embeddedTokenReader.readToken();
this._parserContext.log.addMessageForTokenSequence(
TSDocMessageId.LinkTagDestinationSyntax,
'Unexpected character after link destination',
embeddedTokenReader.extractAccumulatedSequence(),
errorTag
);
return errorTag;
}
return new DocLinkTag(parameters);
}
private _parseLinkTagUrlDestination(
embeddedTokenReader: TokenReader,
parameters: IDocLinkTagParsedParameters,
tokenSequenceForErrorContext: TokenSequence,
nodeForErrorContext: DocNode
): boolean {
// Simply accumulate everything up to the next space. We won't try to implement a proper
// URI parser here.
let urlDestination: string = '';
let done: boolean = false;
while (!done) {
switch (embeddedTokenReader.peekTokenKind()) {
case TokenKind.Spacing:
case TokenKind.Newline:
case TokenKind.EndOfInput:
case TokenKind.Pipe:
case TokenKind.RightCurlyBracket:
done = true;
break;
default:
urlDestination += embeddedTokenReader.readToken();
break;
}
}
if (urlDestination.length === 0) {
// This should be impossible since the caller ensures that peekTokenKind() === TokenKind.AsciiWord
throw new Error('Missing URL in _parseLinkTagUrlDestination()');
}
const urlDestinationExcerpt: TokenSequence = embeddedTokenReader.extractAccumulatedSequence();
const invalidUrlExplanation: string | undefined = StringChecks.explainIfInvalidLinkUrl(urlDestination);
if (invalidUrlExplanation) {
this._parserContext.log.addMessageForTokenSequence(
TSDocMessageId.LinkTagInvalidUrl,
invalidUrlExplanation,
urlDestinationExcerpt,
nodeForErrorContext
);
return false;
}
parameters.urlDestinationExcerpt = urlDestinationExcerpt;
parameters.spacingAfterDestinationExcerpt = this._tryReadSpacingAndNewlines(embeddedTokenReader);
return true;
}
private _parseLinkTagCodeDestination(
embeddedTokenReader: TokenReader,
parameters: IDocLinkTagParameters,
tokenSequenceForErrorContext: TokenSequence,
nodeForErrorContext: DocNode
): boolean {
parameters.codeDestination = this._parseDeclarationReference(
embeddedTokenReader,
tokenSequenceForErrorContext,
nodeForErrorContext
);
return !!parameters.codeDestination;
}
private _parseDeclarationReference(
tokenReader: TokenReader,
tokenSequenceForErrorContext: TokenSequence,
nodeForErrorContext: DocNode
): DocDeclarationReference | undefined {
tokenReader.assertAccumulatedSequenceIsEmpty();
// The package name can contain characters that look like a member reference. This means we need to scan forwards
// to see if there is a "#". However, we need to be careful not to match a "#" that is part of a quoted expression.
const marker: number = tokenReader.createMarker();
let hasHash: boolean = false;
// A common mistake is to forget the "#" for package name or import path. The telltale sign
// of this is mistake is that we see path-only characters such as "@" or "/" in the beginning
// where this would be a syntax error for a member reference.
let lookingForImportCharacters: boolean = true;
let sawImportCharacters: boolean = false;
let done: boolean = false;
while (!done) {
switch (tokenReader.peekTokenKind()) {
case TokenKind.DoubleQuote:
case TokenKind.EndOfInput:
case TokenKind.LeftCurlyBracket:
case TokenKind.LeftParenthesis:
case TokenKind.LeftSquareBracket:
case TokenKind.Newline:
case TokenKind.Pipe:
case TokenKind.RightCurlyBracket:
case TokenKind.RightParenthesis:
case TokenKind.RightSquareBracket:
case TokenKind.SingleQuote:
case TokenKind.Spacing:
done = true;
break;
case TokenKind.PoundSymbol:
hasHash = true;
done = true;
break;
case TokenKind.Slash:
case TokenKind.AtSign:
if (lookingForImportCharacters) {
sawImportCharacters = true;
}
tokenReader.readToken();
break;
case TokenKind.AsciiWord:
case TokenKind.Period:
case TokenKind.Hyphen:
// It's a character that looks like part of a package name or import path,
// so don't set lookingForImportCharacters = false
tokenReader.readToken();
break;
default:
// Once we reach something other than AsciiWord and Period, then the meaning of
// slashes and at-signs is no longer obvious.
lookingForImportCharacters = false;
tokenReader.readToken();
}
}
if (!hasHash && sawImportCharacters) {
// We saw characters that will be a syntax error if interpreted as a member reference,
// but would make sense as a package name or import path, but we did not find a "#"
this._parserContext.log.addMessageForTokenSequence(
TSDocMessageId.ReferenceMissingHash,
'The declaration reference appears to contain a package name or import path,' +
' but it is missing the "#" delimiter',
tokenReader.extractAccumulatedSequence(),
nodeForErrorContext
);
return undefined;
}
tokenReader.backtrackToMarker(marker);
let packageNameExcerpt: TokenSequence | undefined;
let importPathExcerpt: TokenSequence | undefined;
let importHashExcerpt: TokenSequence | undefined;
let spacingAfterImportHashExcerpt: TokenSequence | undefined;
if (hasHash) {
// If it starts with a "." then it's a relative path, not a package name
if (tokenReader.peekTokenKind() !== TokenKind.Period) {
// Read the package name:
const scopedPackageName: boolean = tokenReader.peekTokenKind() === TokenKind.AtSign;
let finishedScope: boolean = false;
done = false;
while (!done) {
switch (tokenReader.peekTokenKind()) {
case TokenKind.EndOfInput:
// If hasHash=true, then we are expecting to stop when we reach the hash
throw new Error('Expecting pound symbol');
case TokenKind.Slash:
// Stop at the first slash, unless this is a scoped package, in which case we stop at the second slash
if (scopedPackageName && !finishedScope) {
tokenReader.readToken();
finishedScope = true;
} else {
done = true;
}
break;
case TokenKind.PoundSymbol:
done = true;
break;
default:
tokenReader.readToken();
}
}
if (!tokenReader.isAccumulatedSequenceEmpty()) {
packageNameExcerpt = tokenReader.extractAccumulatedSequence();
// Check that the packageName is syntactically valid
const explanation: string | undefined = StringChecks.explainIfInvalidPackageName(
packageNameExcerpt.toString()
);
if (explanation) {
this._parserContext.log.addMessageForTokenSequence(
TSDocMessageId.ReferenceMalformedPackageName,
explanation,
packageNameExcerpt,
nodeForErrorContext
);
return undefined;
}
}
}
// Read the import path:
done = false;
while (!done) {
switch (tokenReader.peekTokenKind()) {
case TokenKind.EndOfInput:
// If hasHash=true, then we are expecting to stop when we reach the hash
throw new Error('Expecting pound symbol');
case TokenKind.PoundSymbol:
done = true;
break;
default:
tokenReader.readToken();
}
}
if (!tokenReader.isAccumulatedSequenceEmpty()) {
importPathExcerpt = tokenReader.extractAccumulatedSequence();
// Check that the importPath is syntactically valid
const explanation: string | undefined = StringChecks.explainIfInvalidImportPath(
importPathExcerpt.toString(),
!!packageNameExcerpt
);
if (explanation) {
this._parserContext.log.addMessageForTokenSequence(
TSDocMessageId.ReferenceMalformedImportPath,
explanation,
importPathExcerpt,
nodeForErrorContext
);
return undefined;
}
}
// Read the import hash
if (tokenReader.peekTokenKind() !== TokenKind.PoundSymbol) {
// The above logic should have left us at the PoundSymbol
throw new Error('Expecting pound symbol');
}
tokenReader.readToken();
importHashExcerpt = tokenReader.extractAccumulatedSequence();
spacingAfterImportHashExcerpt = this._tryReadSpacingAndNewlines(tokenReader);
if (packageNameExcerpt === undefined && importPathExcerpt === undefined) {
this._parserContext.log.addMessageForTokenSequence(
TSDocMessageId.ReferenceHashSyntax,
'The hash character must be preceded by a package name or import path',
importHashExcerpt,
nodeForErrorContext
);
return undefined;
}
}
// Read the member references:
const memberReferences: DocMemberReference[] = [];
done = false;
while (!done) {
switch (tokenReader.peekTokenKind()) {
case TokenKind.Period:
case TokenKind.LeftParenthesis:
case TokenKind.AsciiWord:
case TokenKind.Colon:
case TokenKind.LeftSquareBracket:
case TokenKind.DoubleQuote:
const expectingDot: boolean = memberReferences.length > 0;
const memberReference: DocMemberReference | undefined = this._parseMemberReference(
tokenReader,
expectingDot,
tokenSequenceForErrorContext,
nodeForErrorContext
);
if (!memberReference) {
return undefined;
}
memberReferences.push(memberReference);
break;
default:
done = true;
}
}
if (
packageNameExcerpt === undefined &&
importPathExcerpt === undefined &&
memberReferences.length === 0
) {
// We didn't find any parts of a declaration reference
this._parserContext.log.addMessageForTokenSequence(
TSDocMessageId.MissingReference,
'Expecting a declaration reference',
tokenSequenceForErrorContext,
nodeForErrorContext
);
return undefined;
}
return new DocDeclarationReference({
parsed: true,
configuration: this._configuration,
packageNameExcerpt,
importPathExcerpt,
importHashExcerpt,
spacingAfterImportHashExcerpt,
memberReferences
});
}
private _parseMemberReference(
tokenReader: TokenReader,
expectingDot: boolean,
tokenSequenceForErrorContext: TokenSequence,
nodeForErrorContext: DocNode
): DocMemberReference | undefined {
const parameters: IDocMemberReferenceParsedParameters = {
parsed: true,
configuration: this._configuration
};
// Read the dot operator
if (expectingDot) {
if (tokenReader.peekTokenKind() !== TokenKind.Period) {
this._parserContext.log.addMessageForTokenSequence(
TSDocMessageId.ReferenceMissingDot,
'Expecting a period before the next component of a declaration reference',
tokenSequenceForErrorContext,
nodeForErrorContext
);
return undefined;
}
tokenReader.readToken();
parameters.dotExcerpt = tokenReader.extractAccumulatedSequence();
parameters.spacingAfterDotExcerpt = this._tryReadSpacingAndNewlines(tokenReader);
}
// Read the left parenthesis if there is one
if (tokenReader.peekTokenKind() === TokenKind.LeftParenthesis) {
tokenReader.readToken();
parameters.leftParenthesisExcerpt = tokenReader.extractAccumulatedSequence();
parameters.spacingAfterLeftParenthesisExcerpt = this._tryReadSpacingAndNewlines(tokenReader);
}
// Read the member identifier or symbol
if (tokenReader.peekTokenKind() === TokenKind.LeftSquareBracket) {
parameters.memberSymbol = this._parseMemberSymbol(tokenReader, nodeForErrorContext);
if (!parameters.memberSymbol) {
return undefined;
}
} else {
parameters.memberIdentifier = this._parseMemberIdentifier(
tokenReader,
tokenSequenceForErrorContext,
nodeForErrorContext
);
if (!parameters.memberIdentifier) {
return undefined;
}
}
parameters.spacingAfterMemberExcerpt = this._tryReadSpacingAndNewlines(tokenReader);
// Read the colon
if (tokenReader.peekTokenKind() === TokenKind.Colon) {
tokenReader.readToken();
parameters.colonExcerpt = tokenReader.extractAccumulatedSequence();
parameters.spacingAfterColonExcerpt = this._tryReadSpacingAndNewlines(tokenReader);
if (!parameters.leftParenthesisExcerpt) {
// In the current TSDoc draft standard, a member reference with a selector requires the parentheses.
// It would be reasonable to make the parentheses optional, and we are contemplating simplifying the
// notation in the future. But for now the parentheses are required.
this._parserContext.log.addMessageForTokenSequence(
TSDocMessageId.ReferenceSelectorMissingParens,
'Syntax error in declaration reference: the member selector must be enclosed in parentheses',
parameters.colonExcerpt,
nodeForErrorContext
);
return undefined;
}
// If there is a colon, then read the selector
parameters.selector = this._parseMemberSelector(
tokenReader,
parameters.colonExcerpt,
nodeForErrorContext
);
if (!parameters.selector) {
return undefined;
}
parameters.spacingAfterSelectorExcerpt = this._tryReadSpacingAndNewlines(tokenReader);
} else {
if (parameters.leftParenthesisExcerpt) {
this._parserContext.log.addMessageForTokenSequence(
TSDocMessageId.ReferenceMissingColon,
'Expecting a colon after the identifier because the expression is in parentheses',
parameters.leftParenthesisExcerpt,
nodeForErrorContext
);
return undefined;
}
}
// Read the right parenthesis
if (parameters.leftParenthesisExcerpt) {
if (tokenReader.peekTokenKind() !== TokenKind.RightParenthesis) {
this._parserContext.log.addMessageForTokenSequence(
TSDocMessageId.ReferenceMissingRightParen,
'Expecting a matching right parenthesis',
parameters.leftParenthesisExcerpt,
nodeForErrorContext
);
return undefined;
}
tokenReader.readToken();
parameters.rightParenthesisExcerpt = tokenReader.extractAccumulatedSequence();
parameters.spacingAfterRightParenthesisExcerpt = this._tryReadSpacingAndNewlines(tokenReader);
}
return new DocMemberReference(parameters);
}
private _parseMemberSymbol(
tokenReader: TokenReader,
nodeForErrorContext: DocNode
): DocMemberSymbol | undefined {
// Read the "["
if (tokenReader.peekTokenKind() !== TokenKind.LeftSquareBracket) {
// This should be impossible since the caller ensures that peekTokenKind() === TokenKind.LeftSquareBracket
throw new Error('Expecting "["');
}
tokenReader.readToken();
const leftBracketExcerpt: TokenSequence = tokenReader.extractAccumulatedSequence();
const spacingAfterLeftBracketExcerpt: TokenSequence | undefined = this._tryReadSpacingAndNewlines(
tokenReader
);
// Read the declaration reference
const declarationReference: DocDeclarationReference | undefined = this._parseDeclarationReference(
tokenReader,
leftBracketExcerpt,
nodeForErrorContext
);
if (!declarationReference) {
this._parserContext.log.addMessageForTokenSequence(
TSDocMessageId.ReferenceSymbolSyntax,
'Missing declaration reference in symbol reference',
leftBracketExcerpt,
nodeForErrorContext
);
return undefined;
}
// (We don't need to worry about spacing here since _parseDeclarationReference() absorbs trailing spaces)
// Read the "]"
if (tokenReader.peekTokenKind() !== TokenKind.RightSquareBracket) {
this._parserContext.log.addMessageForTokenSequence(
TSDocMessageId.ReferenceMissingRightBracket,
'Missing closing square bracket for symbol reference',
leftBracketExcerpt,
nodeForErrorContext
);
return undefined;
}
tokenReader.readToken();
const rightBracketExcerpt: TokenSequence = tokenReader.extractAccumulatedSequence();
return new DocMemberSymbol({
parsed: true,
configuration: this._configuration,
leftBracketExcerpt,
spacingAfterLeftBracketExcerpt,
symbolReference: declarationReference,
rightBracketExcerpt
});
}
private _parseMemberIdentifier(
tokenReader: TokenReader,
tokenSequenceForErrorContext: TokenSequence,
nodeForErrorContext: DocNode
): DocMemberIdentifier | undefined {
let leftQuoteExcerpt: TokenSequence | undefined = undefined;
let rightQuoteExcerpt: TokenSequence | undefined = undefined;
// Is this a quoted identifier?
if (tokenReader.peekTokenKind() === TokenKind.DoubleQuote) {
// Read the opening '"'
tokenReader.readToken();
leftQuoteExcerpt = tokenReader.extractAccumulatedSequence();
// Read the text inside the quotes
while (tokenReader.peekTokenKind() !== TokenKind.DoubleQuote) {
if (tokenReader.peekTokenKind() === TokenKind.EndOfInput) {
this._parserContext.log.addMessageForTokenSequence(
TSDocMessageId.ReferenceMissingQuote,
'Unexpected end of input inside quoted member identifier',
leftQuoteExcerpt,
nodeForErrorContext
);
return undefined;
}
tokenReader.readToken();
}
if (tokenReader.isAccumulatedSequenceEmpty()) {
this._parserContext.log.addMessageForTokenSequence(
TSDocMessageId.ReferenceEmptyIdentifier,
'The quoted identifier cannot be empty',
leftQuoteExcerpt,
nodeForErrorContext
);
return undefined;
}
const identifierExcerpt: TokenSequence = tokenReader.extractAccumulatedSequence();
// Read the closing '""
tokenReader.readToken(); // read the quote
rightQuoteExcerpt = tokenReader.extractAccumulatedSequence();
return new DocMemberIdentifier({
parsed: true,
configuration: this._configuration,
leftQuoteExcerpt,
identifierExcerpt,
rightQuoteExcerpt
});
} else {
// Otherwise assume it's a valid TypeScript identifier
let done: boolean = false;
while (!done) {
switch (tokenReader.peekTokenKind()) {
case TokenKind.AsciiWord:
case TokenKind.DollarSign:
tokenReader.readToken();
break;
default:
done = true;
break;
}
}
if (tokenReader.isAccumulatedSequenceEmpty()) {
this._parserContext.log.addMessageForTokenSequence(
TSDocMessageId.ReferenceMissingIdentifier,
'Syntax error in declaration reference: expecting a member identifier',
tokenSequenceForErrorContext,
nodeForErrorContext
);
return undefined;
}
const identifierExcerpt: TokenSequence = tokenReader.extractAccumulatedSequence();
const identifier: string = identifierExcerpt.toString();
const explanation: string | undefined = StringChecks.explainIfInvalidUnquotedMemberIdentifier(
identifier
);
if (explanation) {
this._parserContext.log.addMessageForTokenSequence(
TSDocMessageId.ReferenceUnquotedIdentifier,
explanation,
identifierExcerpt,
nodeForErrorContext
);
return undefined;
}
return new DocMemberIdentifier({
parsed: true,
configuration: this._configuration,
leftQuoteExcerpt,
identifierExcerpt,
rightQuoteExcerpt
});
}
}
private _parseMemberSelector(
tokenReader: TokenReader,
tokenSequenceForErrorContext: TokenSequence,
nodeForErrorContext: DocNode
): DocMemberSelector | undefined {
if (tokenReader.peekTokenKind() !== TokenKind.AsciiWord) {
this._parserContext.log.addMessageForTokenSequence(
TSDocMessageId.ReferenceMissingLabel,
'Expecting a selector label after the colon',
tokenSequenceForErrorContext,
nodeForErrorContext
);
}
const selector: string = tokenReader.readToken().toString();
const selectorExcerpt: TokenSequence = tokenReader.extractAccumulatedSequence();
const docMemberSelector: DocMemberSelector = new DocMemberSelector({
parsed: true,
configuration: this._configuration,
selectorExcerpt,
selector
});
if (docMemberSelector.errorMessage) {
this._parserContext.log.addMessageForTokenSequence(
TSDocMessageId.ReferenceSelectorSyntax,
docMemberSelector.errorMessage,
selectorExcerpt,
nodeForErrorContext
);
return undefined;
}
return docMemberSelector;
}
private _parseHtmlStartTag(tokenReader: TokenReader): DocNode {
tokenReader.assertAccumulatedSequenceIsEmpty();
const marker: number = tokenReader.createMarker();
// Read the "<" delimiter
const lessThanToken: Token = tokenReader.readToken();
if (lessThanToken.kind !== TokenKind.LessThan) {
// This would be a parser bug -- the caller of _parseHtmlStartTag() should have verified this while
// looking ahead
throw new Error('Expecting an HTML tag starting with "<"');
}
// NOTE: CommonMark does not permit whitespace after the "<"
const openingDelimiterExcerpt: TokenSequence = tokenReader.extractAccumulatedSequence();
// Read the element name
const nameExcerpt: ResultOrFailure<TokenSequence> = this._parseHtmlName(tokenReader);
if (isFailure(nameExcerpt)) {
return this._backtrackAndCreateErrorForFailure(
tokenReader,
marker,
'Invalid HTML element: ',
nameExcerpt
);
}
const spacingAfterNameExcerpt: TokenSequence | undefined = this._tryReadSpacingAndNewlines(tokenReader);
const htmlAttributes: DocHtmlAttribute[] = [];
// Read the attributes until we see a ">" or "/>"
while (tokenReader.peekTokenKind() === TokenKind.AsciiWord) {
// Read the attribute
const attributeNode: ResultOrFailure<DocHtmlAttribute> = this._parseHtmlAttribute(tokenReader);
if (isFailure(attributeNode)) {
return this._backtrackAndCreateErrorForFailure(
tokenReader,
marker,
'The HTML element has an invalid attribute: ',
attributeNode
);
}
htmlAttributes.push(attributeNode);
}
// Read the closing "/>" or ">" as the Excerpt.suffix
tokenReader.assertAccumulatedSequenceIsEmpty();
const endDelimiterMarker: number = tokenReader.createMarker();
let selfClosingTag: boolean = false;
if (tokenReader.peekTokenKind() === TokenKind.Slash) {
tokenReader.readToken();
selfClosingTag = true;
}
if (tokenReader.peekTokenKind() !== TokenKind.GreaterThan) {
const failure: IFailure = this._createFailureForTokensSince(
tokenReader,
TSDocMessageId.HtmlTagMissingGreaterThan,
'Expecting an attribute or ">" or "/>"',
endDelimiterMarker
);
return this._backtrackAndCreateErrorForFailure(
tokenReader,
marker,
'The HTML tag has invalid syntax: ',
failure
);
}
tokenReader.readToken();
const closingDelimiterExcerpt: TokenSequence = tokenReader.extractAccumulatedSequence();
// NOTE: We don't read excerptParameters.separator here, since if there is any it
// will be represented as DocPlainText.
return new DocHtmlStartTag({
parsed: true,
configuration: this._configuration,
openingDelimiterExcerpt,
nameExcerpt,
spacingAfterNameExcerpt,
htmlAttributes,
selfClosingTag,
closingDelimiterExcerpt
});
}
private _parseHtmlAttribute(tokenReader: TokenReader): ResultOrFailure<DocHtmlAttribute> {
tokenReader.assertAccumulatedSequenceIsEmpty();
// Read the attribute name
const nameExcerpt: ResultOrFailure<TokenSequence> = this._parseHtmlName(tokenReader);
if (isFailure(nameExcerpt)) {
return nameExcerpt;
}
const spacingAfterNameExcerpt: TokenSequence | undefined = this._tryReadSpacingAndNewlines(tokenReader);
// Read the equals
if (tokenReader.peekTokenKind() !== TokenKind.Equals) {
return this._createFailureForToken(
tokenReader,
TSDocMessageId.HtmlTagMissingEquals,
'Expecting "=" after HTML attribute name'
);
}
tokenReader.readToken();
const equalsExcerpt: TokenSequence = tokenReader.extractAccumulatedSequence();
const spacingAfterEqualsExcerpt: TokenSequence | undefined = this._tryReadSpacingAndNewlines(tokenReader);
// Read the attribute value
const attributeValue: ResultOrFailure<string> = this._parseHtmlString(tokenReader);
if (isFailure(attributeValue)) {
return attributeValue;
}
const valueExcerpt: TokenSequence = tokenReader.extractAccumulatedSequence();
const spacingAfterValueExcerpt: TokenSequence | undefined = this._tryReadSpacingAndNewlines(tokenReader);
return new DocHtmlAttribute({
parsed: true,
configuration: this._configuration,
nameExcerpt,
spacingAfterNameExcerpt,
equalsExcerpt,
spacingAfterEqualsExcerpt,
valueExcerpt,
spacingAfterValueExcerpt
});
}
private _parseHtmlString(tokenReader: TokenReader): ResultOrFailure<string> {
const marker: number = tokenReader.createMarker();
const quoteTokenKind: TokenKind = tokenReader.peekTokenKind();
if (quoteTokenKind !== TokenKind.DoubleQuote && quoteTokenKind !== TokenKind.SingleQuote) {
return this._createFailureForToken(
tokenReader,
TSDocMessageId.HtmlTagMissingString,
'Expecting an HTML string starting with a single-quote or double-quote character'
);
}
tokenReader.readToken();
let textWithoutQuotes: string = '';
for (;;) {
const peekedTokenKind: TokenKind = tokenReader.peekTokenKind();
// Did we find the matching token?
if (peekedTokenKind === quoteTokenKind) {
tokenReader.readToken(); // extract the quote
break;
}
if (peekedTokenKind === TokenKind.EndOfInput || peekedTokenKind === TokenKind.Newline) {
return this._createFailureForToken(
tokenReader,
TSDocMessageId.HtmlStringMissingQuote,
'The HTML string is missing its closing quote',
marker
);
}
textWithoutQuotes += tokenReader.readToken().toString();
}
// The next attribute cannot start immediately after this one
if (tokenReader.peekTokenKind() === TokenKind.AsciiWord) {
return this._createFailureForToken(
tokenReader,
TSDocMessageId.TextAfterHtmlString,
'The next character after a closing quote must be spacing or punctuation'
);
}
return textWithoutQuotes;
}
private _parseHtmlEndTag(tokenReader: TokenReader): DocNode {
tokenReader.assertAccumulatedSequenceIsEmpty();
const marker: number = tokenReader.createMarker();
// Read the "</" delimiter
const lessThanToken: Token = tokenReader.peekToken();
if (lessThanToken.kind !== TokenKind.LessThan) {
return this._backtrackAndCreateError(
tokenReader,
marker,
TSDocMessageId.MissingHtmlEndTag,
'Expecting an HTML tag starting with "</"'
);
}
tokenReader.readToken();
const slashToken: Token = tokenReader.peekToken();
if (slashToken.kind !== TokenKind.Slash) {
return this._backtrackAndCreateError(
tokenReader,
marker,
TSDocMessageId.MissingHtmlEndTag,
'Expecting an HTML tag starting with "</"'
);
}
tokenReader.readToken();
// NOTE: Spaces are not permitted here
// https://www.w3.org/TR/html5/syntax.html#end-tags
const openingDelimiterExcerpt: TokenSequence = tokenReader.extractAccumulatedSequence();
// Read the tag name
const nameExcerpt: ResultOrFailure<TokenSequence> = this._parseHtmlName(tokenReader);
if (isFailure(nameExcerpt)) {
return this._backtrackAndCreateErrorForFailure(
tokenReader,
marker,
'Expecting an HTML element name: ',
nameExcerpt
);
}
const spacingAfterNameExcerpt: TokenSequence | undefined = this._tryReadSpacingAndNewlines(tokenReader);
// Read the closing ">"
if (tokenReader.peekTokenKind() !== TokenKind.GreaterThan) {
const failure: IFailure = this._createFailureForToken(
tokenReader,
TSDocMessageId.HtmlTagMissingGreaterThan,
'Expecting a closing ">" for the HTML tag'
);
return this._backtrackAndCreateErrorForFailure(tokenReader, marker, '', failure);
}
tokenReader.readToken();
const closingDelimiterExcerpt: TokenSequence = tokenReader.extractAccumulatedSequence();
return new DocHtmlEndTag({
parsed: true,
configuration: this._configuration,
openingDelimiterExcerpt,
nameExcerpt,
spacingAfterNameExcerpt,
closingDelimiterExcerpt
});
}
/**
* Parses an HTML name such as an element name or attribute name.
*/
private _parseHtmlName(tokenReader: TokenReader): ResultOrFailure<TokenSequence> {
const marker: number = tokenReader.createMarker();
if (tokenReader.peekTokenKind() === TokenKind.Spacing) {
return this._createFailureForTokensSince(
tokenReader,
TSDocMessageId.MalformedHtmlName,
'A space is not allowed here',
marker
);
}
let done: boolean = false;
while (!done) {
switch (tokenReader.peekTokenKind()) {
case TokenKind.Hyphen:
case TokenKind.Period:
case TokenKind.AsciiWord:
tokenReader.readToken();
break;
default:
done = true;
break;
}
}
const excerpt: TokenSequence | undefined = tokenReader.tryExtractAccumulatedSequence();
if (!excerpt) {
return this._createFailureForToken(
tokenReader,
TSDocMessageId.MalformedHtmlName,
'Expecting an HTML name'
);
}
const htmlName: string = excerpt.toString();
const explanation: string | undefined = StringChecks.explainIfInvalidHtmlName(htmlName);
if (explanation) {
return this._createFailureForTokensSince(
tokenReader,
TSDocMessageId.MalformedHtmlName,
explanation,
marker
);
}
return excerpt;
}
private _parseFencedCode(tokenReader: TokenReader): DocNode {
tokenReader.assertAccumulatedSequenceIsEmpty();
const startMarker: number = tokenReader.createMarker();
const endOfOpeningDelimiterMarker: number = startMarker + 2;
switch (tokenReader.peekPreviousTokenKind()) {
case TokenKind.Newline:
case TokenKind.EndOfInput:
break;
default:
return this._backtrackAndCreateErrorRange(
tokenReader,
startMarker,
// include the three backticks so they don't get reinterpreted as a code span
endOfOpeningDelimiterMarker,
TSDocMessageId.CodeFenceOpeningIndent,
'The opening backtick for a code fence must appear at the start of the line'
);
}
// Read the opening ``` delimiter
let openingDelimiter: string = '';
openingDelimiter += tokenReader.readToken();
openingDelimiter += tokenReader.readToken();
openingDelimiter += tokenReader.readToken();
if (openingDelimiter !== '```') {
// This would be a parser bug -- the caller of _parseFencedCode() should have verified this while
// looking ahead to distinguish code spans/fences
throw new Error('Expecting three backticks');
}
const openingFenceExcerpt: TokenSequence = tokenReader.extractAccumulatedSequence();
// Read any spaces after the delimiter,
// but NOT the Newline since that goes with the spacingAfterLanguageExcerpt
while (tokenReader.peekTokenKind() === TokenKind.Spacing) {
tokenReader.readToken();
}
const spacingAfterOpeningFenceExcerpt:
| TokenSequence
| undefined = tokenReader.tryExtractAccumulatedSequence();
// Read the language specifier (if present) and newline
let done: boolean = false;
let startOfPaddingMarker: number | undefined = undefined;
while (!done) {
switch (tokenReader.peekTokenKind()) {
case TokenKind.Spacing:
case TokenKind.Newline:
if (startOfPaddingMarker === undefined) {
// Starting a new run of spacing characters
startOfPaddingMarker = tokenReader.createMarker();
}
if (tokenReader.peekTokenKind() === TokenKind.Newline) {
done = true;
}
tokenReader.readToken();
break;
case TokenKind.Backtick:
const failure: IFailure = this._createFailureForToken(
tokenReader,
TSDocMessageId.CodeFenceSpecifierSyntax,
'The language specifier cannot contain backtick characters'
);
return this._backtrackAndCreateErrorRangeForFailure(
tokenReader,
startMarker,
endOfOpeningDelimiterMarker,
'Error parsing code fence: ',
failure
);
case TokenKind.EndOfInput:
const failure2: IFailure = this._createFailureForToken(
tokenReader,
TSDocMessageId.CodeFenceMissingDelimiter,
'Missing closing delimiter'
);
return this._backtrackAndCreateErrorRangeForFailure(
tokenReader,
startMarker,
endOfOpeningDelimiterMarker,
'Error parsing code fence: ',
failure2
);
default:
// more non-spacing content
startOfPaddingMarker = undefined;
tokenReader.readToken();
break;
}
}
// At this point, we must have accumulated at least a newline token.
// Example: "pov-ray sdl \n"
const restOfLineExcerpt: TokenSequence = tokenReader.extractAccumulatedSequence();
// Example: "pov-ray sdl"
const languageExcerpt: TokenSequence = restOfLineExcerpt.getNewSequence(
restOfLineExcerpt.startIndex,
startOfPaddingMarker!
);
// Example: " \n"
const spacingAfterLanguageExcerpt: TokenSequence | undefined = restOfLineExcerpt.getNewSequence(
startOfPaddingMarker!,
restOfLineExcerpt.endIndex
);
// Read the code content until we see the closing ``` delimiter
let codeEndMarker: number = -1;
let closingFenceStartMarker: number = -1;
done = false;
let tokenBeforeDelimiter: Token;
while (!done) {
switch (tokenReader.peekTokenKind()) {
case TokenKind.EndOfInput:
const failure2: IFailure = this._createFailureForToken(
tokenReader,
TSDocMessageId.CodeFenceMissingDelimiter,
'Missing closing delimiter'
);
return this._backtrackAndCreateErrorRangeForFailure(
tokenReader,
startMarker,
endOfOpeningDelimiterMarker,
'Error parsing code fence: ',
failure2
);
case TokenKind.Newline:
tokenBeforeDelimiter = tokenReader.readToken();
codeEndMarker = tokenReader.createMarker();
while (tokenReader.peekTokenKind() === TokenKind.Spacing) {
tokenBeforeDelimiter = tokenReader.readToken();
}
if (tokenReader.peekTokenKind() !== TokenKind.Backtick) {
break;
}
closingFenceStartMarker = tokenReader.createMarker();
tokenReader.readToken(); // first backtick
if (tokenReader.peekTokenKind() !== TokenKind.Backtick) {
break;
}
tokenReader.readToken(); // second backtick
if (tokenReader.peekTokenKind() !== TokenKind.Backtick) {
break;
}
tokenReader.readToken(); // third backtick
done = true;
break;
default:
tokenReader.readToken();
break;
}
}
if (tokenBeforeDelimiter!.kind !== TokenKind.Newline) {
this._parserContext.log.addMessageForTextRange(
TSDocMessageId.CodeFenceClosingIndent,
'The closing delimiter for a code fence must not be indented',
tokenBeforeDelimiter!.range
);
}
// Example: "code 1\ncode 2\n ```"
const codeAndDelimiterExcerpt: TokenSequence = tokenReader.extractAccumulatedSequence();
// Example: "code 1\ncode 2\n"
const codeExcerpt: TokenSequence = codeAndDelimiterExcerpt.getNewSequence(
codeAndDelimiterExcerpt.startIndex,
codeEndMarker
);
// Example: " "
const spacingBeforeClosingFenceExcerpt:
| TokenSequence
| undefined = codeAndDelimiterExcerpt.getNewSequence(codeEndMarker, closingFenceStartMarker);
// Example: "```"
const closingFenceExcerpt: TokenSequence = codeAndDelimiterExcerpt.getNewSequence(
closingFenceStartMarker,
codeAndDelimiterExcerpt.endIndex
);
// Read the spacing and newline after the closing delimiter
done = false;
while (!done) {
switch (tokenReader.peekTokenKind()) {
case TokenKind.Spacing:
tokenReader.readToken();
break;
case TokenKind.Newline:
done = true;
tokenReader.readToken();
break;
case TokenKind.EndOfInput:
done = true;
break;
default:
this._parserContext.log.addMessageForTextRange(
TSDocMessageId.CodeFenceClosingSyntax,
'Unexpected characters after closing delimiter for code fence',
tokenReader.peekToken().range
);
done = true;
break;
}
}
// Example: " \n"
const spacingAfterClosingFenceExcerpt:
| TokenSequence
| undefined = tokenReader.tryExtractAccumulatedSequence();
return new DocFencedCode({
parsed: true,
configuration: this._configuration,
openingFenceExcerpt,
spacingAfterOpeningFenceExcerpt,
languageExcerpt,
spacingAfterLanguageExcerpt,
codeExcerpt,
spacingBeforeClosingFenceExcerpt,
closingFenceExcerpt,
spacingAfterClosingFenceExcerpt
});
}
private _parseCodeSpan(tokenReader: TokenReader): DocNode {
tokenReader.assertAccumulatedSequenceIsEmpty();
const marker: number = tokenReader.createMarker();
// Parse the opening backtick
if (tokenReader.peekTokenKind() !== TokenKind.Backtick) {
// This would be a parser bug -- the caller of _parseCodeSpan() should have verified this while
// looking ahead to distinguish code spans/fences
throw new Error('Expecting a code span starting with a backtick character "`"');
}
tokenReader.readToken(); // read the backtick
const openingDelimiterExcerpt: TokenSequence = tokenReader.extractAccumulatedSequence();
let codeExcerpt: TokenSequence | undefined = undefined;
let closingDelimiterExcerpt: TokenSequence | undefined = undefined;
// Parse the content backtick
for (;;) {
const peekedTokenKind: TokenKind = tokenReader.peekTokenKind();
// Did we find the matching token?
if (peekedTokenKind === TokenKind.Backtick) {
if (tokenReader.isAccumulatedSequenceEmpty()) {
return this._backtrackAndCreateErrorRange(
tokenReader,
marker,
marker + 1,
TSDocMessageId.CodeSpanEmpty,
'A code span must contain at least one character between the backticks'
);
}
codeExcerpt = tokenReader.extractAccumulatedSequence();
tokenReader.readToken();
closingDelimiterExcerpt = tokenReader.extractAccumulatedSequence();
break;
}
if (peekedTokenKind === TokenKind.EndOfInput || peekedTokenKind === TokenKind.Newline) {
return this._backtrackAndCreateError(
tokenReader,
marker,
TSDocMessageId.CodeSpanMissingDelimiter,
'The code span is missing its closing backtick'
);
}
tokenReader.readToken();
}
return new DocCodeSpan({
parsed: true,
configuration: this._configuration,
openingDelimiterExcerpt,
codeExcerpt,
closingDelimiterExcerpt
});
}
private _tryReadSpacingAndNewlines(tokenReader: TokenReader): TokenSequence | undefined {
let done: boolean = false;
do {
switch (tokenReader.peekTokenKind()) {
case TokenKind.Spacing:
case TokenKind.Newline:
tokenReader.readToken();
break;
default:
done = true;
break;
}
} while (!done);
return tokenReader.tryExtractAccumulatedSequence();
}
/**
* Read the next token, and report it as a DocErrorText node.
*/
private _createError(
tokenReader: TokenReader,
messageId: TSDocMessageId,
errorMessage: string
): DocErrorText {
tokenReader.readToken();
const textExcerpt: TokenSequence = tokenReader.extractAccumulatedSequence();
const docErrorText: DocErrorText = new DocErrorText({
parsed: true,
configuration: this._configuration,
textExcerpt,
messageId,
errorMessage,
errorLocation: textExcerpt
});
this._parserContext.log.addMessageForDocErrorText(docErrorText);
return docErrorText;
}
/**
* Rewind to the specified marker, read the next token, and report it as a DocErrorText node.
*/
private _backtrackAndCreateError(
tokenReader: TokenReader,
marker: number,
messageId: TSDocMessageId,
errorMessage: string
): DocErrorText {
tokenReader.backtrackToMarker(marker);
return this._createError(tokenReader, messageId, errorMessage);
}
/**
* Rewind to the errorStartMarker, read the tokens up to and including errorInclusiveEndMarker,
* and report it as a DocErrorText node.
*/
private _backtrackAndCreateErrorRange(
tokenReader: TokenReader,
errorStartMarker: number,
errorInclusiveEndMarker: number,
messageId: TSDocMessageId,
errorMessage: string
): DocErrorText {
tokenReader.backtrackToMarker(errorStartMarker);
while (tokenReader.createMarker() !== errorInclusiveEndMarker) {
tokenReader.readToken();
}
if (tokenReader.peekTokenKind() !== TokenKind.EndOfInput) {
tokenReader.readToken();
}
const textExcerpt: TokenSequence = tokenReader.extractAccumulatedSequence();
const docErrorText: DocErrorText = new DocErrorText({
parsed: true,
configuration: this._configuration,
textExcerpt,
messageId,
errorMessage: errorMessage,
errorLocation: textExcerpt
});
this._parserContext.log.addMessageForDocErrorText(docErrorText);
return docErrorText;
}
/**
* Rewind to the specified marker, read the next token, and report it as a DocErrorText node
* whose location is based on an IFailure.
*/
private _backtrackAndCreateErrorForFailure(
tokenReader: TokenReader,
marker: number,
errorMessagePrefix: string,
failure: IFailure
): DocErrorText {
tokenReader.backtrackToMarker(marker);
tokenReader.readToken();
const textExcerpt: TokenSequence = tokenReader.extractAccumulatedSequence();
const docErrorText: DocErrorText = new DocErrorText({
parsed: true,
configuration: this._configuration,
textExcerpt,
messageId: failure.failureMessageId,
errorMessage: errorMessagePrefix + failure.failureMessage,
errorLocation: failure.failureLocation
});
this._parserContext.log.addMessageForDocErrorText(docErrorText);
return docErrorText;
}
/**
* Rewind to the errorStartMarker, read the tokens up to and including errorInclusiveEndMarker,
* and report it as a DocErrorText node whose location is based on an IFailure.
*/
private _backtrackAndCreateErrorRangeForFailure(
tokenReader: TokenReader,
errorStartMarker: number,
errorInclusiveEndMarker: number,
errorMessagePrefix: string,
failure: IFailure
): DocErrorText {
tokenReader.backtrackToMarker(errorStartMarker);
while (tokenReader.createMarker() !== errorInclusiveEndMarker) {
tokenReader.readToken();
}
if (tokenReader.peekTokenKind() !== TokenKind.EndOfInput) {
tokenReader.readToken();
}
const textExcerpt: TokenSequence = tokenReader.extractAccumulatedSequence();
const docErrorText: DocErrorText = new DocErrorText({
parsed: true,
configuration: this._configuration,
textExcerpt,
messageId: failure.failureMessageId,
errorMessage: errorMessagePrefix + failure.failureMessage,
errorLocation: failure.failureLocation
});
this._parserContext.log.addMessageForDocErrorText(docErrorText);
return docErrorText;
}
/**
* Creates an IFailure whose TokenSequence is a single token. If a marker is not specified,
* then it is the current token.
*/
private _createFailureForToken(
tokenReader: TokenReader,
failureMessageId: TSDocMessageId,
failureMessage: string,
tokenMarker?: number
): IFailure {
if (!tokenMarker) {
tokenMarker = tokenReader.createMarker();
}
const tokenSequence: TokenSequence = new TokenSequence({
parserContext: this._parserContext,
startIndex: tokenMarker,
endIndex: tokenMarker + 1
});
return {
failureMessageId,
failureMessage,
failureLocation: tokenSequence
};
}
/**
* Creates an IFailure whose TokenSequence starts from the specified marker and
* encompasses all tokens read since then. If none were read, then the next token used.
*/
private _createFailureForTokensSince(
tokenReader: TokenReader,
failureMessageId: TSDocMessageId,
failureMessage: string,
startMarker: number
): IFailure {
let endMarker: number = tokenReader.createMarker();
if (endMarker < startMarker) {
// This would be a parser bug
throw new Error('Invalid startMarker');
}
if (endMarker === startMarker) {
++endMarker;
}
const tokenSequence: TokenSequence = new TokenSequence({
parserContext: this._parserContext,
startIndex: startMarker,
endIndex: endMarker
});
return {
failureMessageId,
failureMessage,
failureLocation: tokenSequence
};
}
} | the_stack |
import {
CURRENCY_USD,
LANGUAGE_EN,
} from '../../../helpers/site-context-selector';
import {
waitForCategoryPage,
waitForPage,
waitForProductPage,
} from '../../checkout-flow';
interface StrategyRequestContext {
language?: string;
category?: string;
productIds?: string[];
facets?: string[];
containsConsentReference?: boolean;
}
const site = 'electronics-spa';
export const DEFAULT_LANGUAGE = LANGUAGE_EN;
export const DEFAULT_CURRENCY = CURRENCY_USD;
const productDisplayCount = 10;
export const japaneseLanguage = 'ja';
export const englishFilmProductText = 'Film';
export const japaneseFilmProductText = 'プ';
export const dollarCurrencySymbol = '$';
export const yenCurrencySymbol = '¥';
export const canonBrandName = 'Canon';
export const canonBrandCode = 'brand_10';
export const canonBrandPagePath = `Brands/${canonBrandName}/c/${canonBrandCode}`;
export const chibaStoreName = 'Chiba';
export const filmCamerasCategoryName = 'Film Cameras';
export const filmCamerasCategoryCode = '574';
export const filmCamerasCategoryPagePath = `Open-Catalogue/Cameras/Film-Cameras/c/${filmCamerasCategoryCode}`;
export const camcordersCategoryName = 'Camcorders';
export const camcordersCategoryCode = '584';
export const slrCategoryCode = '578';
export const slrNonProductListCategoryPagePath = `Open-Catalogue/Cameras/Digital-Cameras/Digital-SLR/c/${slrCategoryCode}`;
export const requestContainsConsentReference = true;
export const checkForCarouselClickEvent = true;
const merchandisingCarouselTagName = 'cx-merchandising-carousel';
const carouselViewedEventSchema = 'context/commerce/carouselViewed';
const carouselClickedEventSchema = 'context/commerce/carouselClicked';
export const carouselEventRequestAlias = 'carouselEventApiRequest';
/*
* NOTE: Ids of actual products in the storefront need to be returned by the stub CDS strategy service
* response as the products need to be retrieved from the spartacus product service
*/
export const STRATEGY_RESPONSE = {
metadata: {
mixcardId: 'cypress-test-mixcard',
},
products: [
{
id: '779864',
metadata: {
'cypress-test-product-metadata': 'product-1-metadata-value',
},
},
{
id: '832382',
metadata: {
'cypress-test-product-metadata': 'product-2-metadata-value',
},
},
{
id: '779866',
metadata: {
'cypress-test-product-metadata': 'product-3-metadata-value',
},
},
],
};
function verifyCarouselLevelMetadata(
$merchandisingCarousel: JQuery<HTMLElement>
): void {
cy.wrap($merchandisingCarousel).within(() => {
cy.get('.data-cx-merchandising-carousel').should(
($merchandisingCarouselMetadata) => {
expect($merchandisingCarouselMetadata)
.to.have.attr('data-cx-merchandising-carousel-slots')
.equal(STRATEGY_RESPONSE.products.length.toString());
expect($merchandisingCarouselMetadata)
.to.have.attr('data-cx-merchandising-carousel-mixcardid')
.equal(STRATEGY_RESPONSE.metadata.mixcardId);
}
);
});
}
function verifyCarouselItemRendered(
$carouselItem: JQuery<HTMLElement>,
index: number
): void {
const product = STRATEGY_RESPONSE.products[index];
cy.wrap($carouselItem).within(() => {
cy.get('.data-cx-merchandising-product').should(($productMetadata) => {
expect($productMetadata)
.to.have.attr('data-cx-merchandising-product-slot')
.equal((index + 1).toString());
expect($productMetadata)
.to.have.attr('data-cx-merchandising-product-id')
.equal(product.id);
expect($productMetadata)
.to.have.attr(
'data-cx-merchandising-product-cypress-test-product-metadata'
)
.equal(product.metadata['cypress-test-product-metadata']);
});
cy.get('a').within(() => {
cy.root().should('have.attr', 'href');
cy.get('h4').should('not.be.empty');
cy.get('.price').should('not.be.empty');
});
});
}
function verifyCarouselItemsRendered(
$merchandisingCarousel: JQuery<HTMLElement>
): void {
cy.wrap($merchandisingCarousel)
.get('.item')
.should('have.length', STRATEGY_RESPONSE.products.length)
.each(($carouselItem, index) => {
verifyCarouselItemRendered($carouselItem, index);
});
}
function verifyMerchandisingCarouselRendersProducts(): void {
cy.get(merchandisingCarouselTagName)
/*
* There could be multiple merchandising carousels on the page being used to test them,
* but as we are stubbing the product retrieval response all of them will show the same products.
* Limit our tests to the first carousel on the page by using first() and then within()
*/
.first()
.should('be.visible')
.within(($merchandisingCarousel) => {
verifyCarouselLevelMetadata($merchandisingCarousel);
verifyCarouselItemsRendered($merchandisingCarousel);
return cy.wrap($merchandisingCarousel);
})
/*
* If we scroll immediately (i.e. before checking for the carousel item DOM elements) then sometimes
* the carousel item observable has not finished emitting with the full product details, so not all of the products may be rendered.
* This will then cause a view event to be sent without all the product skus, which then makes the tests flakey.
* If we check the DOM for the carousel is as we expect first and then scroll it into view we get more reilable behaviour
*/
.scrollIntoView({ offset: { top: 50, left: 50 } })
.should('be.visible');
}
function verifyCarouselEvent(carouselEvent: any) {
expect(carouselEvent['strategyId']).to.be.ok;
expect(carouselEvent['carouselId']).to.be.ok;
expect(carouselEvent['carouselName']).to.be.ok;
expect(carouselEvent['mixCardId']).to.equal(
STRATEGY_RESPONSE.metadata.mixcardId
);
}
function verifyCarouselViewEvent(carouselEvent: any) {
verifyCarouselEvent(carouselEvent);
const expectedProductSkus = STRATEGY_RESPONSE.products.map(
(product) => product.id
);
expect(carouselEvent['productSkus']).to.have.members(expectedProductSkus);
}
function verifyCarouselClickEvent(productSku: string, carouselEvent: any) {
verifyCarouselEvent(carouselEvent);
expect(carouselEvent['imageUrl']).to.be.ok;
expect(carouselEvent['sku']).to.equal(productSku);
}
export function verifyRequestToStrategyService(
requestAlias: string,
strategyRequestContext: StrategyRequestContext
): void {
cy.wait(`@${requestAlias}`).its('response.statusCode').should('eq', 200);
cy.get<Cypress.WaitXHR>(`@${requestAlias}`).then(({ request }: any) => {
expect(request.url).to.contain(`site=${site}`);
expect(request.url).to.contain(
`language=${
strategyRequestContext.language
? strategyRequestContext.language
: DEFAULT_LANGUAGE
}`
);
expect(request.url).to.contain(`pageSize=${productDisplayCount}`);
if (strategyRequestContext.category) {
expect(request.url).to.contain(
`category=${strategyRequestContext.category}`
);
} else {
expect(request.url).not.to.contain('category=');
}
if (strategyRequestContext.productIds) {
expect(request.url).to.contain(
`products=${strategyRequestContext.productIds}`
);
} else {
expect(request.url).not.to.contain('products=');
}
if (strategyRequestContext.facets) {
expect(request.url).to.contain(
`facets=${strategyRequestContext.facets.join(':')}`
);
} else {
expect(request.url).not.to.contain('facets=');
}
strategyRequestContext.containsConsentReference
? expect(request.headers).to.have.property('consent-reference')
: expect(request.headers).to.not.have.property('consent-reference');
});
}
export function verifyMerchandisingCarouselRendersOnHomePage(
strategyRequestAlias: string,
language?: string,
containsConsentReference?: boolean
): void {
verifyRequestToStrategyService(strategyRequestAlias, {
language,
containsConsentReference,
});
verifyMerchandisingCarouselRendersProducts();
}
export function verifyMerchandisingCarouselRendersOnCategoryPage(
strategyRequestAlias: string,
categoryCode: string,
language?: string,
additionalFacets?: string[],
containsConsentReference?: boolean
): void {
verifyRequestToStrategyService(strategyRequestAlias, {
language,
category: categoryCode,
facets: additionalFacets,
containsConsentReference,
});
verifyMerchandisingCarouselRendersProducts();
}
export function verifyMerchandisingCarouselRendersOnBrandPage(
strategyRequestAlias: string,
brandCode: string,
language?: string,
additionalFacets?: string[]
): void {
verifyRequestToStrategyService(strategyRequestAlias, {
language,
category: brandCode,
facets: additionalFacets,
});
verifyMerchandisingCarouselRendersProducts();
}
export function verifyMerchandisingCarouselRendersOnPDPPage(
strategyRequestAlias: string,
productId: string,
language?: string
): void {
const strategyRequestContext: StrategyRequestContext = {
language,
};
if (productId) {
strategyRequestContext.productIds = [productId];
}
verifyRequestToStrategyService(strategyRequestAlias, strategyRequestContext);
verifyMerchandisingCarouselRendersProducts();
}
export function applyFacet(facetGroup: string, facetName: string): void {
cy.get('cx-facet .heading')
.contains(facetGroup)
.parents('cx-facet')
.within(() => {
cy.get('a.value').contains(facetName).click();
});
}
export function verifyFirstCarouselItemTextContent(
toContain: string,
toNotContain: string
): void {
cy.get(`${merchandisingCarouselTagName} .item h4`)
.first()
.should('contain.text', toContain)
.and('not.contain.text', toNotContain);
}
export function verifyFirstCarouselItemPrice(currencySymbol: string): void {
cy.get(`${merchandisingCarouselTagName} .item .price`)
.first()
.should('be.visible')
.and('contain.text', currencySymbol);
}
export function clickOnCarouselItem(
productId: string,
checkForCarouselEvent?: boolean
): void {
cy.get(
`.data-cx-merchandising-product[data-cx-merchandising-product-id='${productId}'`
)
.parent()
.within(() => {
cy.root().should('be.visible');
const productPage = waitForProductPage(productId, 'getProductPage');
cy.get('a').click();
cy.wait(`@${productPage}`).its('response.statusCode').should('eq', 200);
});
if (checkForCarouselEvent) {
cy.waitForCarouselEvent(carouselClickedEventSchema).should((sentEvent) => {
verifyCarouselClickEvent(productId, sentEvent);
});
}
}
export function navigateToHomepage(): void {
const homePage = waitForPage('homepage', 'getHomePage');
cy.get('cx-page-slot.SiteLogo').click();
cy.wait(`@${homePage}`).its('response.statusCode').should('eq', 200);
}
export function navigateToCategory(
categoryName: string,
categoryCode: string
): void {
const categoryPage = waitForCategoryPage(categoryCode, 'getCategory');
cy.get('cx-category-navigation cx-generic-link a')
.contains(categoryName)
.click({ force: true });
cy.wait(`@${categoryPage}`).its('response.statusCode').should('eq', 200);
}
export function waitForCarouselViewEvent(): void {
cy.waitForCarouselEvent(carouselViewedEventSchema).should((sentEvent) => {
verifyCarouselViewEvent(sentEvent);
});
} | the_stack |
import { config } from "../config";
import { Logger } from "../utils/logger";
import { db, privateDB } from "../databases/databases";
import { getMaxResThumbnail, YouTubeAPI } from "../utils/youtubeApi";
import { getSubmissionUUID } from "../utils/getSubmissionUUID";
import { getHash } from "../utils/getHash";
import { getIP } from "../utils/getIP";
import { getFormattedTime } from "../utils/getFormattedTime";
import { isUserTrustworthy } from "../utils/isUserTrustworthy";
import { dispatchEvent } from "../utils/webhookUtils";
import { Request, Response } from "express";
import { ActionType, Category, CategoryActionType, IncomingSegment, SegmentUUID, Service, VideoDuration, VideoID } from "../types/segments.model";
import { deleteLockCategories } from "./deleteLockCategories";
import { getCategoryActionType } from "../utils/categoryInfo";
import { QueryCacher } from "../utils/queryCacher";
import { getReputation } from "../utils/reputation";
import { APIVideoData, APIVideoInfo } from "../types/youtubeApi.model";
import { UserID } from "../types/user.model";
import { isUserVIP } from "../utils/isUserVIP";
import { parseUserAgent } from "../utils/userAgent";
import { getService } from "../utils/getService";
import axios from "axios";
type CheckResult = {
pass: boolean,
errorMessage: string,
errorCode: number
};
const CHECK_PASS: CheckResult = {
pass: true,
errorMessage: "",
errorCode: 0
};
async function sendWebhookNotification(userID: string, videoID: string, UUID: string, submissionCount: number, youtubeData: APIVideoData, { submissionStart, submissionEnd }: { submissionStart: number; submissionEnd: number; }, segmentInfo: any) {
const row = await db.prepare("get", `SELECT "userName" FROM "userNames" WHERE "userID" = ?`, [userID]);
const userName = row !== undefined ? row.userName : null;
let scopeName = "submissions.other";
if (submissionCount <= 1) {
scopeName = "submissions.new";
}
dispatchEvent(scopeName, {
"video": {
"id": videoID,
"title": youtubeData?.title,
"thumbnail": getMaxResThumbnail(youtubeData) || null,
"url": `https://www.youtube.com/watch?v=${videoID}`,
},
"submission": {
"UUID": UUID,
"category": segmentInfo.category,
"startTime": submissionStart,
"endTime": submissionEnd,
"user": {
"UUID": userID,
"username": userName,
},
},
});
}
async function sendWebhooks(apiVideoInfo: APIVideoInfo, userID: string, videoID: string, UUID: string, segmentInfo: any, service: Service) {
if (apiVideoInfo && service == Service.YouTube) {
const userSubmissionCountRow = await db.prepare("get", `SELECT count(*) as "submissionCount" FROM "sponsorTimes" WHERE "userID" = ?`, [userID]);
const { data, err } = apiVideoInfo;
if (err) return;
const startTime = parseFloat(segmentInfo.segment[0]);
const endTime = parseFloat(segmentInfo.segment[1]);
sendWebhookNotification(userID, videoID, UUID, userSubmissionCountRow.submissionCount, data, {
submissionStart: startTime,
submissionEnd: endTime,
}, segmentInfo);
// If it is a first time submission
// Then send a notification to discord
if (config.discordFirstTimeSubmissionsWebhookURL === null || userSubmissionCountRow.submissionCount > 1) return;
axios.post(config.discordFirstTimeSubmissionsWebhookURL, {
"embeds": [{
"title": data?.title,
"url": `https://www.youtube.com/watch?v=${videoID}&t=${(parseInt(startTime.toFixed(0)) - 2)}s#requiredSegment=${UUID}`,
"description": `Submission ID: ${UUID}\
\n\nTimestamp: \
${getFormattedTime(startTime)} to ${getFormattedTime(endTime)}\
\n\nCategory: ${segmentInfo.category}`,
"color": 10813440,
"author": {
"name": userID,
},
"thumbnail": {
"url": getMaxResThumbnail(data) || "",
},
}],
})
.then(res => {
if (res.status >= 400) {
Logger.error("Error sending first time submission Discord hook");
Logger.error(JSON.stringify(res));
Logger.error("\n");
}
})
.catch(err => {
Logger.error("Failed to send first time submission Discord hook.");
Logger.error(JSON.stringify(err));
Logger.error("\n");
});
}
}
async function sendWebhooksNB(userID: string, videoID: string, UUID: string, startTime: number, endTime: number, category: string, probability: number, ytData: any) {
const submissionInfoRow = await db.prepare("get", `SELECT
(select count(1) from "sponsorTimes" where "userID" = ?) count,
(select count(1) from "sponsorTimes" where "userID" = ? and "votes" <= -2) disregarded,
coalesce((select "userName" FROM "userNames" WHERE "userID" = ?), ?) "userName"`,
[userID, userID, userID, userID]);
let submittedBy: string;
// If a userName was created then show both
if (submissionInfoRow.userName !== userID) {
submittedBy = `${submissionInfoRow.userName}\n${userID}`;
} else {
submittedBy = userID;
}
// Send discord message
if (config.discordNeuralBlockRejectWebhookURL === null) return;
axios.post(config.discordNeuralBlockRejectWebhookURL, {
"embeds": [{
"title": ytData.items[0].snippet.title,
"url": `https://www.youtube.com/watch?v=${videoID}&t=${(parseFloat(startTime.toFixed(0)) - 2)}`,
"description": `**Submission ID:** ${UUID}\
\n**Timestamp:** ${getFormattedTime(startTime)} to ${getFormattedTime(endTime)}\
\n**Predicted Probability:** ${probability}\
\n**Category:** ${category}\
\n**Submitted by:** ${submittedBy}\
\n**Total User Submissions:** ${submissionInfoRow.count}\
\n**Ignored User Submissions:** ${submissionInfoRow.disregarded}`,
"color": 10813440,
"thumbnail": {
"url": ytData.items[0].snippet.thumbnails.maxres ? ytData.items[0].snippet.thumbnails.maxres.url : "",
},
}]
})
.then(res => {
if (res.status >= 400) {
Logger.error("Error sending NeuralBlock Discord hook");
Logger.error(JSON.stringify(res));
Logger.error("\n");
}
})
.catch(err => {
Logger.error("Failed to send NeuralBlock Discord hook.");
Logger.error(JSON.stringify(err));
Logger.error("\n");
});
}
// callback: function(reject: "String containing reason the submission was rejected")
// returns: string when an error, false otherwise
// Looks like this was broken for no defined youtube key - fixed but IMO we shouldn't return
// false for a pass - it was confusing and lead to this bug - any use of this function in
// the future could have the same problem.
async function autoModerateSubmission(apiVideoInfo: APIVideoInfo,
submission: { videoID: VideoID; userID: UserID; segments: IncomingSegment[], service: Service }) {
if (apiVideoInfo) {
const { err, data } = apiVideoInfo;
if (err) return false;
const duration = apiVideoInfo?.data?.lengthSeconds;
const segments = submission.segments;
let nbString = "";
for (let i = 0; i < segments.length; i++) {
if (duration == 0) {
// Allow submission if the duration is 0 (bug in youtube api)
return false;
} else {
if (segments[i].category === "sponsor") {
//Prepare timestamps to send to NB all at once
nbString = `${nbString}${segments[i].segment[0]},${segments[i].segment[1]};`;
}
}
}
// Get all submissions for this user
const allSubmittedByUser = await db.prepare("all", `SELECT "startTime", "endTime" FROM "sponsorTimes" WHERE "userID" = ? and "videoID" = ? and "votes" > -1`, [submission.userID, submission.videoID]);
const allSegmentTimes = [];
if (allSubmittedByUser !== undefined) {
//add segments the user has previously submitted
for (const segmentInfo of allSubmittedByUser) {
allSegmentTimes.push([parseFloat(segmentInfo.startTime), parseFloat(segmentInfo.endTime)]);
}
}
//add segments they are trying to add in this submission
for (let i = 0; i < segments.length; i++) {
const startTime = parseFloat(segments[i].segment[0]);
const endTime = parseFloat(segments[i].segment[1]);
allSegmentTimes.push([startTime, endTime]);
}
//merge all the times into non-overlapping arrays
const allSegmentsSorted = mergeTimeSegments(allSegmentTimes.sort(function (a, b) {
return a[0] - b[0] || a[1] - b[1];
}));
const videoDuration = data?.lengthSeconds;
if (videoDuration != 0) {
let allSegmentDuration = 0;
//sum all segment times together
allSegmentsSorted.forEach(segmentInfo => allSegmentDuration += segmentInfo[1] - segmentInfo[0]);
if (allSegmentDuration > (videoDuration / 100) * 80) {
// Reject submission if all segments combine are over 80% of the video
return "Total length of your submitted segments are over 80% of the video.";
}
}
// Check NeuralBlock
const neuralBlockURL = config.neuralBlockURL;
if (!neuralBlockURL) return false;
const response = await axios.get(`${neuralBlockURL}/api/checkSponsorSegments?vid=${submission.videoID}
&segments=${nbString.substring(0, nbString.length - 1)}`, { validateStatus: () => true });
if (response.status !== 200) return false;
const nbPredictions = response.data;
let nbDecision = false;
let predictionIdx = 0; //Keep track because only sponsor categories were submitted
for (let i = 0; i < segments.length; i++) {
if (segments[i].category === "sponsor") {
if (nbPredictions.probabilities[predictionIdx] < 0.70) {
nbDecision = true; // At least one bad entry
const startTime = parseFloat(segments[i].segment[0]);
const endTime = parseFloat(segments[i].segment[1]);
const UUID = getSubmissionUUID(submission.videoID, segments[i].actionType, submission.userID, startTime, endTime, submission.service);
// Send to Discord
// Note, if this is too spammy. Consider sending all the segments as one Webhook
sendWebhooksNB(submission.userID, submission.videoID, UUID, startTime, endTime, segments[i].category, nbPredictions.probabilities[predictionIdx], data);
}
predictionIdx++;
}
}
if (nbDecision) {
return "Rejected based on NeuralBlock predictions.";
} else {
return false;
}
} else {
Logger.debug("Skipped YouTube API");
// Can't moderate the submission without calling the youtube API
// so allow by default.
return false;
}
}
function getYouTubeVideoInfo(videoID: VideoID, ignoreCache = false): Promise<APIVideoInfo> {
if (config.newLeafURLs !== null) {
return YouTubeAPI.listVideos(videoID, ignoreCache);
} else {
return null;
}
}
async function checkUserActiveWarning(userID: string): Promise<CheckResult> {
const MILLISECONDS_IN_HOUR = 3600000;
const now = Date.now();
const warnings = (await db.prepare("all",
`SELECT "reason"
FROM warnings
WHERE "userID" = ? AND "issueTime" > ? AND enabled = 1
ORDER BY "issueTime" DESC`,
[
userID,
Math.floor(now - (config.hoursAfterWarningExpires * MILLISECONDS_IN_HOUR))
],
) as {reason: string}[]).sort((a, b) => (b?.reason?.length ?? 0) - (a?.reason?.length ?? 0));
if (warnings?.length >= config.maxNumberOfActiveWarnings) {
const defaultMessage = "Submission rejected due to a warning from a moderator. This means that we noticed you were making some common mistakes"
+ " that are not malicious, and we just want to clarify the rules. "
+ "Could you please send a message in discord.gg/SponsorBlock or matrix.to/#/#sponsor:ajay.app so we can further help you? "
+ `Your userID is ${userID}.`;
return {
pass: false,
errorMessage: defaultMessage + (warnings[0]?.reason?.length > 0 ? `\n\nWarning reason: '${warnings[0].reason}'` : ""),
errorCode: 403
};
}
return CHECK_PASS;
}
function checkInvalidFields(videoID: any, userID: any, segments: Array<any>): CheckResult {
const invalidFields = [];
const errors = [];
if (typeof videoID !== "string") {
invalidFields.push("videoID");
}
if (typeof userID !== "string" || userID?.length < 30) {
invalidFields.push("userID");
if (userID?.length < 30) errors.push(`userID must be at least 30 characters long`);
}
if (!Array.isArray(segments) || segments.length < 1) {
invalidFields.push("segments");
}
// validate start and end times (no : marks)
for (const segmentPair of segments) {
const startTime = segmentPair.segment[0];
const endTime = segmentPair.segment[1];
if ((typeof startTime === "string" && startTime.includes(":")) ||
(typeof endTime === "string" && endTime.includes(":"))) {
invalidFields.push("segment time");
}
}
if (invalidFields.length !== 0) {
// invalid request
const formattedFields = invalidFields.reduce((p, c, i) => p + (i !== 0 ? ", " : "") + c, "");
const formattedErrors = errors.reduce((p, c, i) => p + (i !== 0 ? ". " : " ") + c, "");
return {
pass: false,
errorMessage: `No valid ${formattedFields} field(s) provided.${formattedErrors}`,
errorCode: 400
};
}
return CHECK_PASS;
}
async function checkEachSegmentValid(userID: string, videoID: VideoID,
segments: IncomingSegment[], service: string, isVIP: boolean, lockedCategoryList: Array<any>): Promise<CheckResult> {
for (let i = 0; i < segments.length; i++) {
if (segments[i] === undefined || segments[i].segment === undefined || segments[i].category === undefined) {
//invalid request
return { pass: false, errorMessage: "One of your segments are invalid", errorCode: 400 };
}
if (!config.categoryList.includes(segments[i].category)) {
return { pass: false, errorMessage: "Category doesn't exist.", errorCode: 400 };
}
// Reject segment if it's in the locked categories list
const lockIndex = lockedCategoryList.findIndex(c => segments[i].category === c.category);
if (!isVIP && lockIndex !== -1) {
// TODO: Do something about the fradulent submission
Logger.warn(`Caught a submission for a locked category. userID: '${userID}', videoID: '${videoID}', category: '${segments[i].category}', times: ${segments[i].segment}`);
return {
pass: false,
errorCode: 403,
errorMessage:
`New submissions are not allowed for the following category: ` +
`'${segments[i].category}'. A moderator has decided that no new segments are needed on this video and that all current segments of this category are timed perfectly.\n` +
`${lockedCategoryList[lockIndex].reason?.length !== 0 ? `\nLock reason: '${lockedCategoryList[lockIndex].reason}'` : ""}\n` +
`${(segments[i].category === "sponsor" ? "\nMaybe the segment you are submitting is a different category that you have not enabled and is not a sponsor. " +
"Categories that aren't sponsor, such as self-promotion can be enabled in the options.\n" : "")}` +
`\nIf you believe this is incorrect, please contact someone on discord.gg/SponsorBlock or matrix.to/#/#sponsor:ajay.app`
};
}
if (!config.categorySupport[segments[i].category]?.includes(segments[i].actionType)) {
return { pass: false, errorMessage: "ActionType is not supported with this category.", errorCode: 400 };
}
const startTime = parseFloat(segments[i].segment[0]);
const endTime = parseFloat(segments[i].segment[1]);
if (isNaN(startTime) || isNaN(endTime)
|| startTime === Infinity || endTime === Infinity || startTime < 0 || startTime > endTime
|| (getCategoryActionType(segments[i].category) === CategoryActionType.Skippable && startTime === endTime)
|| (getCategoryActionType(segments[i].category) === CategoryActionType.POI && startTime !== endTime)) {
//invalid request
return { pass: false, errorMessage: "One of your segments times are invalid (too short, startTime before endTime, etc.)", errorCode: 400 };
}
// Check for POI segments before some seconds
if (!isVIP && getCategoryActionType(segments[i].category) === CategoryActionType.POI && startTime < config.poiMinimumStartTime) {
return { pass: false, errorMessage: `POI cannot be that early`, errorCode: 400 };
}
if (!isVIP && segments[i].category === "sponsor" && Math.abs(startTime - endTime) < 1) {
// Too short
return { pass: false, errorMessage: "Sponsors must be longer than 1 second long", errorCode: 400 };
}
//check if this info has already been submitted before
const duplicateCheck2Row = await db.prepare("get", `SELECT COUNT(*) as count FROM "sponsorTimes" WHERE "startTime" = ?
and "endTime" = ? and "category" = ? and "actionType" = ? and "videoID" = ? and "service" = ?`, [startTime, endTime, segments[i].category, segments[i].actionType, videoID, service]);
if (duplicateCheck2Row.count > 0) {
return { pass: false, errorMessage: "Sponsors has already been submitted before.", errorCode: 409 };
}
}
return CHECK_PASS;
}
async function checkByAutoModerator(videoID: any, userID: any, segments: Array<any>, isVIP: boolean, service:string, apiVideoInfo: APIVideoInfo, decreaseVotes: number): Promise<CheckResult & { decreaseVotes: number; } > {
// Auto moderator check
if (!isVIP && service == Service.YouTube) {
const autoModerateResult = await autoModerateSubmission(apiVideoInfo, { userID, videoID, segments, service });//startTime, endTime, category: segments[i].category});
if (autoModerateResult == "Rejected based on NeuralBlock predictions.") {
// If NB automod rejects, the submission will start with -2 votes.
// Note, if one submission is bad all submissions will be affected.
// However, this behavior is consistent with other automod functions
// already in place.
//decreaseVotes = -2; //Disable for now
} else if (autoModerateResult) {
//Normal automod behavior
return {
pass: false,
errorCode: 403,
errorMessage: `Request rejected by auto moderator: ${autoModerateResult} If this is an issue, send a message on Discord.`,
decreaseVotes
};
}
}
return {
...CHECK_PASS,
decreaseVotes
};
}
async function updateDataIfVideoDurationChange(videoID: VideoID, service: Service, videoDuration: VideoDuration, videoDurationParam: VideoDuration) {
let lockedCategoryList = await db.prepare("all", 'SELECT category, reason from "lockCategories" where "videoID" = ? AND "service" = ?', [videoID, service]);
const previousSubmissions = await db.prepare("all",
`SELECT "videoDuration", "UUID"
FROM "sponsorTimes"
WHERE "videoID" = ? AND "service" = ? AND
"hidden" = 0 AND "shadowHidden" = 0 AND
"votes" > -2 AND "videoDuration" != 0`,
[videoID, service]
) as {videoDuration: VideoDuration, UUID: SegmentUUID}[];
// If the video's duration is changed, then the video should be unlocked and old submissions should be hidden
const videoDurationChanged = (videoDuration: number) => videoDuration != 0
&& previousSubmissions.length > 0 && !previousSubmissions.some((e) => Math.abs(videoDuration - e.videoDuration) < 2);
let apiVideoInfo: APIVideoInfo = null;
if (service == Service.YouTube) {
// Don't use cache if we don't know the video duration, or the client claims that it has changed
apiVideoInfo = await getYouTubeVideoInfo(videoID, !videoDurationParam || previousSubmissions.length === 0 || videoDurationChanged(videoDurationParam));
}
const apiVideoDuration = apiVideoInfo?.data?.lengthSeconds as VideoDuration;
if (!videoDurationParam || (apiVideoDuration && Math.abs(videoDurationParam - apiVideoDuration) > 2)) {
// If api duration is far off, take that one instead (it is only precise to seconds, not millis)
videoDuration = apiVideoDuration || 0 as VideoDuration;
}
// Only treat as difference if both the api duration and submitted duration have changed
if (videoDurationChanged(videoDuration) && (!videoDurationParam || videoDurationChanged(videoDurationParam))) {
// Hide all previous submissions
for (const submission of previousSubmissions) {
await db.prepare("run", `UPDATE "sponsorTimes" SET "hidden" = 1 WHERE "UUID" = ?`, [submission.UUID]);
}
lockedCategoryList = [];
deleteLockCategories(videoID, null, service);
}
return {
videoDuration,
apiVideoInfo,
lockedCategoryList
};
}
// Disable max submissions for now
// Disable IP ratelimiting for now
// eslint-disable-next-line @typescript-eslint/no-unused-vars
async function checkRateLimit(userID:string, videoID: VideoID, service: Service, timeSubmitted: number, hashedIP: string, options: {
enableCheckByIP: boolean;
enableCheckByUserID: boolean;
} = {
enableCheckByIP: false,
enableCheckByUserID: false
}): Promise<CheckResult> {
const yesterday = timeSubmitted - 86400000;
if (options.enableCheckByIP) {
//check to see if this ip has submitted too many sponsors today
const rateLimitCheckRow = await privateDB.prepare("get", `SELECT COUNT(*) as count FROM "sponsorTimes" WHERE "hashedIP" = ? AND "videoID" = ? AND "timeSubmitted" > ? AND "service" = ?`, [hashedIP, videoID, yesterday, service]);
if (rateLimitCheckRow.count >= 10) {
//too many sponsors for the same video from the same ip address
return {
pass: false,
errorCode: 429,
errorMessage: "Have submited many sponsors for the same video."
};
}
}
if (options.enableCheckByUserID) {
//check to see if the user has already submitted sponsors for this video
const duplicateCheckRow = await db.prepare("get", `SELECT COUNT(*) as count FROM "sponsorTimes" WHERE "userID" = ? and "videoID" = ?`, [userID, videoID]);
if (duplicateCheckRow.count >= 16) {
//too many sponsors for the same video from the same user
return {
pass: false,
errorCode: 429,
errorMessage: "Have submited many sponsors for the same video."
};
}
}
return CHECK_PASS;
}
function proxySubmission(req: Request) {
axios.post(`${config.proxySubmission}/api/skipSegments?userID=${req.query.userID}&videoID=${req.query.videoID}`, req.body)
.then(res => {
Logger.debug(`Proxy Submission: ${res.status} (${res.data})`);
})
.catch(() => {
Logger.error("Proxy Submission: Failed to make call");
});
}
function preprocessInput(req: Request) {
const videoID = req.query.videoID || req.body.videoID;
const userID = req.query.userID || req.body.userID;
const service = getService(req.query.service, req.body.service);
const videoDurationParam: VideoDuration = (parseFloat(req.query.videoDuration || req.body.videoDuration) || 0) as VideoDuration;
const videoDuration = videoDurationParam;
let segments = req.body.segments as IncomingSegment[];
if (segments === undefined) {
// Use query instead
segments = [{
segment: [req.query.startTime as string, req.query.endTime as string],
category: req.query.category as Category,
actionType: (req.query.actionType as ActionType) ?? ActionType.Skip
}];
}
// Add default action type
segments.forEach((segment) => {
if (!Object.values(ActionType).some((val) => val === segment.actionType)){
segment.actionType = ActionType.Skip;
}
segment.segment = segment.segment.map((time) => typeof segment.segment[0] === "string" ? time?.replace(",", ".") : time);
});
const userAgent = req.query.userAgent ?? req.body.userAgent ?? parseUserAgent(req.get("user-agent")) ?? "";
return { videoID, userID, service, videoDuration, videoDurationParam, segments, userAgent };
}
export async function postSkipSegments(req: Request, res: Response): Promise<Response> {
if (config.proxySubmission) {
proxySubmission(req);
}
// eslint-disable-next-line prefer-const
let { videoID, userID, service, videoDuration, videoDurationParam, segments, userAgent } = preprocessInput(req);
const invalidCheckResult = checkInvalidFields(videoID, userID, segments);
if (!invalidCheckResult.pass) {
return res.status(invalidCheckResult.errorCode).send(invalidCheckResult.errorMessage);
}
//hash the userID
userID = getHash(userID);
const userWarningCheckResult = await checkUserActiveWarning(userID);
if (!userWarningCheckResult.pass) {
Logger.warn(`Caught a submission for for a warned user. userID: '${userID}', videoID: '${videoID}', category: '${segments.reduce<string>((prev, val) => `${prev} ${val.category}`, "")}', times: ${segments.reduce<string>((prev, val) => `${prev} ${val.segment}`, "")}`);
return res.status(userWarningCheckResult.errorCode).send(userWarningCheckResult.errorMessage);
}
//check if this user is on the vip list
const isVIP = await isUserVIP(userID);
const newData = await updateDataIfVideoDurationChange(videoID, service, videoDuration, videoDurationParam);
videoDuration = newData.videoDuration;
const { lockedCategoryList, apiVideoInfo } = newData;
// Check if all submissions are correct
const segmentCheckResult = await checkEachSegmentValid(userID, videoID, segments, service, isVIP, lockedCategoryList);
if (!segmentCheckResult.pass) {
return res.status(segmentCheckResult.errorCode).send(segmentCheckResult.errorMessage);
}
let decreaseVotes = 0;
// Auto check by NB
const autoModerateCheckResult = await checkByAutoModerator(videoID, userID, segments, isVIP, service, apiVideoInfo, decreaseVotes);
if (!autoModerateCheckResult.pass) {
return res.status(autoModerateCheckResult.errorCode).send(autoModerateCheckResult.errorMessage);
} else {
decreaseVotes = autoModerateCheckResult.decreaseVotes;
}
// Will be filled when submitting
const UUIDs = [];
const newSegments = [];
//hash the ip 5000 times so no one can get it from the database
const hashedIP = getHash(getIP(req) + config.globalSalt);
try {
//get current time
const timeSubmitted = Date.now();
// const rateLimitCheckResult = checkRateLimit(userID, videoID, service, timeSubmitted, hashedIP);
// if (!rateLimitCheckResult.pass) {
// return res.status(rateLimitCheckResult.errorCode).send(rateLimitCheckResult.errorMessage);
// }
//check to see if this user is shadowbanned
const shadowBanRow = await db.prepare("get", `SELECT count(*) as "userCount" FROM "shadowBannedUsers" WHERE "userID" = ? LIMIT 1`, [userID]);
let shadowBanned = shadowBanRow.userCount;
if (!(await isUserTrustworthy(userID))) {
//hide this submission as this user is untrustworthy
shadowBanned = 1;
}
const startingVotes = 0 + decreaseVotes;
const reputation = await getReputation(userID);
for (const segmentInfo of segments) {
//this can just be a hash of the data
//it's better than generating an actual UUID like what was used before
//also better for duplication checking
const UUID = getSubmissionUUID(videoID, segmentInfo.actionType, userID, parseFloat(segmentInfo.segment[0]), parseFloat(segmentInfo.segment[1]), service);
const hashedVideoID = getHash(videoID, 1);
const startingLocked = isVIP ? 1 : 0;
try {
await db.prepare("run", `INSERT INTO "sponsorTimes"
("videoID", "startTime", "endTime", "votes", "locked", "UUID", "userID", "timeSubmitted", "views", "category", "actionType", "service", "videoDuration", "reputation", "shadowHidden", "hashedVideoID", "userAgent")
VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, [
videoID, segmentInfo.segment[0], segmentInfo.segment[1], startingVotes, startingLocked, UUID, userID, timeSubmitted, 0, segmentInfo.category, segmentInfo.actionType, service, videoDuration, reputation, shadowBanned, hashedVideoID, userAgent
],
);
//add to private db as well
await privateDB.prepare("run", `INSERT INTO "sponsorTimes" VALUES(?, ?, ?, ?)`, [videoID, hashedIP, timeSubmitted, service]);
await db.prepare("run", `INSERT INTO "videoInfo" ("videoID", "channelID", "title", "published", "genreUrl")
SELECT ?, ?, ?, ?, ?
WHERE NOT EXISTS (SELECT 1 FROM "videoInfo" WHERE "videoID" = ?)`, [
videoID, apiVideoInfo?.data?.authorId || "", apiVideoInfo?.data?.title || "", apiVideoInfo?.data?.published || 0, apiVideoInfo?.data?.genreUrl || "", videoID]);
// Clear redis cache for this video
QueryCacher.clearVideoCache({
videoID,
hashedVideoID,
service,
userID
});
} catch (err) {
//a DB change probably occurred
Logger.error(`Error when putting sponsorTime in the DB: ${videoID}, ${segmentInfo.segment[0]}, ${segmentInfo.segment[1]}, ${userID}, ${segmentInfo.category}. ${err}`);
return res.sendStatus(500);
}
UUIDs.push(UUID);
newSegments.push({
UUID: UUID,
category: segmentInfo.category,
segment: segmentInfo.segment,
});
}
} catch (err) {
Logger.error(err as string);
return res.sendStatus(500);
}
for (let i = 0; i < segments.length; i++) {
sendWebhooks(apiVideoInfo, userID, videoID, UUIDs[i], segments[i], service);
}
return res.json(newSegments);
}
// Takes an array of arrays:
// ex)
// [
// [3, 40],
// [50, 70],
// [60, 80],
// [100, 150]
// ]
// => transforms to combining overlapping segments
// [
// [3, 40],
// [50, 80],
// [100, 150]
// ]
function mergeTimeSegments(ranges: number[][]) {
const result: number[][] = [];
let last: number[];
ranges.forEach(function (r) {
if (!last || r[0] > last[1])
result.push(last = r);
else if (r[1] > last[1])
last[1] = r[1];
});
return result;
} | the_stack |
declare namespace GoogleAppsScript {
namespace AdminDirectory {
namespace Collection {
namespace Groups {
interface AliasesCollection {
// Add a alias for the group
insert(resource: Schema.Alias, groupKey: string): AdminDirectory.Schema.Alias;
// List all aliases for a group
list(groupKey: string): AdminDirectory.Schema.Aliases;
// Remove a alias for the group
remove(groupKey: string, alias: string): void;
}
}
namespace Resources {
interface BuildingsCollection {
// Retrieves a building.
get(customer: string, buildingId: string): AdminDirectory.Schema.Building;
// Inserts a building.
insert(resource: Schema.Building, customer: string): AdminDirectory.Schema.Building;
// Inserts a building.
insert(resource: Schema.Building, customer: string, optionalArgs: object): AdminDirectory.Schema.Building;
// Retrieves a list of buildings for an account.
list(customer: string): AdminDirectory.Schema.Buildings;
// Retrieves a list of buildings for an account.
list(customer: string, optionalArgs: object): AdminDirectory.Schema.Buildings;
// Updates a building. This method supports patch semantics.
patch(resource: Schema.Building, customer: string, buildingId: string): AdminDirectory.Schema.Building;
// Updates a building. This method supports patch semantics.
patch(resource: Schema.Building, customer: string, buildingId: string, optionalArgs: object): AdminDirectory.Schema.Building;
// Deletes a building.
remove(customer: string, buildingId: string): void;
// Updates a building.
update(resource: Schema.Building, customer: string, buildingId: string): AdminDirectory.Schema.Building;
// Updates a building.
update(resource: Schema.Building, customer: string, buildingId: string, optionalArgs: object): AdminDirectory.Schema.Building;
}
interface CalendarsCollection {
// Retrieves a calendar resource.
get(customer: string, calendarResourceId: string): AdminDirectory.Schema.CalendarResource;
// Inserts a calendar resource.
insert(resource: Schema.CalendarResource, customer: string): AdminDirectory.Schema.CalendarResource;
// Retrieves a list of calendar resources for an account.
list(customer: string): AdminDirectory.Schema.CalendarResources;
// Retrieves a list of calendar resources for an account.
list(customer: string, optionalArgs: object): AdminDirectory.Schema.CalendarResources;
// Updates a calendar resource.
// This method supports patch semantics, meaning you only need to include the fields you wish to update. Fields that are not present in the request will be preserved. This method supports patch semantics.
patch(resource: Schema.CalendarResource, customer: string, calendarResourceId: string): AdminDirectory.Schema.CalendarResource;
// Deletes a calendar resource.
remove(customer: string, calendarResourceId: string): void;
// Updates a calendar resource.
// This method supports patch semantics, meaning you only need to include the fields you wish to update. Fields that are not present in the request will be preserved.
update(resource: Schema.CalendarResource, customer: string, calendarResourceId: string): AdminDirectory.Schema.CalendarResource;
}
interface FeaturesCollection {
// Retrieves a feature.
get(customer: string, featureKey: string): AdminDirectory.Schema.Feature;
// Inserts a feature.
insert(resource: Schema.Feature, customer: string): AdminDirectory.Schema.Feature;
// Retrieves a list of features for an account.
list(customer: string): AdminDirectory.Schema.Features;
// Retrieves a list of features for an account.
list(customer: string, optionalArgs: object): AdminDirectory.Schema.Features;
// Updates a feature. This method supports patch semantics.
patch(resource: Schema.Feature, customer: string, featureKey: string): AdminDirectory.Schema.Feature;
// Deletes a feature.
remove(customer: string, featureKey: string): void;
// Renames a feature.
rename(resource: Schema.FeatureRename, customer: string, oldName: string): void;
// Updates a feature.
update(resource: Schema.Feature, customer: string, featureKey: string): AdminDirectory.Schema.Feature;
}
}
namespace Users {
interface AliasesCollection {
// Add a alias for the user
insert(resource: Schema.Alias, userKey: string): AdminDirectory.Schema.Alias;
// List all aliases for a user
list(userKey: string): AdminDirectory.Schema.Aliases;
// List all aliases for a user
list(userKey: string, optionalArgs: object): AdminDirectory.Schema.Aliases;
// Remove a alias for the user
remove(userKey: string, alias: string): void;
// Watch for changes in user aliases list
watch(resource: Schema.Channel, userKey: string): AdminDirectory.Schema.Channel;
// Watch for changes in user aliases list
watch(resource: Schema.Channel, userKey: string, optionalArgs: object): AdminDirectory.Schema.Channel;
}
interface PhotosCollection {
// Retrieve photo of a user
get(userKey: string): AdminDirectory.Schema.UserPhoto;
// Add a photo for the user. This method supports patch semantics.
patch(resource: Schema.UserPhoto, userKey: string): AdminDirectory.Schema.UserPhoto;
// Remove photos for the user
remove(userKey: string): void;
// Add a photo for the user
update(resource: Schema.UserPhoto, userKey: string): AdminDirectory.Schema.UserPhoto;
}
}
interface AspsCollection {
// Get information about an ASP issued by a user.
get(userKey: string, codeId: number): AdminDirectory.Schema.Asp;
// List the ASPs issued by a user.
list(userKey: string): AdminDirectory.Schema.Asps;
// Delete an ASP issued by a user.
remove(userKey: string, codeId: number): void;
}
interface ChannelsCollection {
// Stop watching resources through this channel
stop(resource: Schema.Channel): void;
}
interface ChromeosdevicesCollection {
// Take action on Chrome OS Device
action(resource: Schema.ChromeOsDeviceAction, customerId: string, resourceId: string): void;
// Retrieve Chrome OS Device
get(customerId: string, deviceId: string): AdminDirectory.Schema.ChromeOsDevice;
// Retrieve Chrome OS Device
get(customerId: string, deviceId: string, optionalArgs: object): AdminDirectory.Schema.ChromeOsDevice;
// Retrieve all Chrome OS Devices of a customer (paginated)
list(customerId: string): AdminDirectory.Schema.ChromeOsDevices;
// Retrieve all Chrome OS Devices of a customer (paginated)
list(customerId: string, optionalArgs: object): AdminDirectory.Schema.ChromeOsDevices;
// Move or insert multiple Chrome OS Devices to organizational unit
moveDevicesToOu(resource: Schema.ChromeOsMoveDevicesToOu, customerId: string, orgUnitPath: string): void;
// Update Chrome OS Device. This method supports patch semantics.
patch(resource: Schema.ChromeOsDevice, customerId: string, deviceId: string): AdminDirectory.Schema.ChromeOsDevice;
// Update Chrome OS Device. This method supports patch semantics.
patch(resource: Schema.ChromeOsDevice, customerId: string, deviceId: string, optionalArgs: object): AdminDirectory.Schema.ChromeOsDevice;
// Update Chrome OS Device
update(resource: Schema.ChromeOsDevice, customerId: string, deviceId: string): AdminDirectory.Schema.ChromeOsDevice;
// Update Chrome OS Device
update(resource: Schema.ChromeOsDevice, customerId: string, deviceId: string, optionalArgs: object): AdminDirectory.Schema.ChromeOsDevice;
}
interface CustomersCollection {
// Retrieves a customer.
get(customerKey: string): AdminDirectory.Schema.Customer;
// Updates a customer. This method supports patch semantics.
patch(resource: Schema.Customer, customerKey: string): AdminDirectory.Schema.Customer;
// Updates a customer.
update(resource: Schema.Customer, customerKey: string): AdminDirectory.Schema.Customer;
}
interface DomainAliasesCollection {
// Retrieves a domain alias of the customer.
get(customer: string, domainAliasName: string): AdminDirectory.Schema.DomainAlias;
// Inserts a Domain alias of the customer.
insert(resource: Schema.DomainAlias, customer: string): AdminDirectory.Schema.DomainAlias;
// Lists the domain aliases of the customer.
list(customer: string): AdminDirectory.Schema.DomainAliases;
// Lists the domain aliases of the customer.
list(customer: string, optionalArgs: object): AdminDirectory.Schema.DomainAliases;
// Deletes a Domain Alias of the customer.
remove(customer: string, domainAliasName: string): void;
}
interface DomainsCollection {
// Retrieves a domain of the customer.
get(customer: string, domainName: string): AdminDirectory.Schema.Domains;
// Inserts a domain of the customer.
insert(resource: Schema.Domains, customer: string): AdminDirectory.Schema.Domains;
// Lists the domains of the customer.
list(customer: string): AdminDirectory.Schema.Domains2;
// Deletes a domain of the customer.
remove(customer: string, domainName: string): void;
}
interface GroupsCollection {
Aliases?: AdminDirectory.Collection.Groups.AliasesCollection;
// Retrieve Group
get(groupKey: string): AdminDirectory.Schema.Group;
// Create Group
insert(resource: Schema.Group): AdminDirectory.Schema.Group;
// Retrieve all groups of a domain or of a user given a userKey (paginated)
list(): AdminDirectory.Schema.Groups;
// Retrieve all groups of a domain or of a user given a userKey (paginated)
list(optionalArgs: object): AdminDirectory.Schema.Groups;
// Update Group. This method supports patch semantics.
patch(resource: Schema.Group, groupKey: string): AdminDirectory.Schema.Group;
// Delete Group
remove(groupKey: string): void;
// Update Group
update(resource: Schema.Group, groupKey: string): AdminDirectory.Schema.Group;
}
interface MembersCollection {
// Retrieve Group Member
get(groupKey: string, memberKey: string): AdminDirectory.Schema.Member;
// Checks whether the given user is a member of the group. Membership can be direct or nested.
hasMember(groupKey: string, memberKey: string): AdminDirectory.Schema.MembersHasMember;
// Add user to the specified group.
insert(resource: Schema.Member, groupKey: string): AdminDirectory.Schema.Member;
// Retrieve all members in a group (paginated)
list(groupKey: string): AdminDirectory.Schema.Members;
// Retrieve all members in a group (paginated)
list(groupKey: string, optionalArgs: object): AdminDirectory.Schema.Members;
// Update membership of a user in the specified group. This method supports patch semantics.
patch(resource: Schema.Member, groupKey: string, memberKey: string): AdminDirectory.Schema.Member;
// Remove membership.
remove(groupKey: string, memberKey: string): void;
// Update membership of a user in the specified group.
update(resource: Schema.Member, groupKey: string, memberKey: string): AdminDirectory.Schema.Member;
}
interface MobiledevicesCollection {
// Take action on Mobile Device
action(resource: Schema.MobileDeviceAction, customerId: string, resourceId: string): void;
// Retrieve Mobile Device
get(customerId: string, resourceId: string): AdminDirectory.Schema.MobileDevice;
// Retrieve Mobile Device
get(customerId: string, resourceId: string, optionalArgs: object): AdminDirectory.Schema.MobileDevice;
// Retrieve all Mobile Devices of a customer (paginated)
list(customerId: string): AdminDirectory.Schema.MobileDevices;
// Retrieve all Mobile Devices of a customer (paginated)
list(customerId: string, optionalArgs: object): AdminDirectory.Schema.MobileDevices;
// Delete Mobile Device
remove(customerId: string, resourceId: string): void;
}
interface NotificationsCollection {
// Retrieves a notification.
get(customer: string, notificationId: string): AdminDirectory.Schema.Notification;
// Retrieves a list of notifications.
list(customer: string): AdminDirectory.Schema.Notifications;
// Retrieves a list of notifications.
list(customer: string, optionalArgs: object): AdminDirectory.Schema.Notifications;
// Updates a notification. This method supports patch semantics.
patch(resource: Schema.Notification, customer: string, notificationId: string): AdminDirectory.Schema.Notification;
// Deletes a notification
remove(customer: string, notificationId: string): void;
// Updates a notification.
update(resource: Schema.Notification, customer: string, notificationId: string): AdminDirectory.Schema.Notification;
}
interface OrgunitsCollection {
// Retrieve organizational unit
get(customerId: string, orgUnitPath: string[]): AdminDirectory.Schema.OrgUnit;
// Add organizational unit
insert(resource: Schema.OrgUnit, customerId: string): AdminDirectory.Schema.OrgUnit;
// Retrieve all organizational units
list(customerId: string): AdminDirectory.Schema.OrgUnits;
// Retrieve all organizational units
list(customerId: string, optionalArgs: object): AdminDirectory.Schema.OrgUnits;
// Update organizational unit. This method supports patch semantics.
patch(resource: Schema.OrgUnit, customerId: string, orgUnitPath: string[]): AdminDirectory.Schema.OrgUnit;
// Remove organizational unit
remove(customerId: string, orgUnitPath: string[]): void;
// Update organizational unit
update(resource: Schema.OrgUnit, customerId: string, orgUnitPath: string[]): AdminDirectory.Schema.OrgUnit;
}
interface PrivilegesCollection {
// Retrieves a paginated list of all privileges for a customer.
list(customer: string): AdminDirectory.Schema.Privileges;
}
interface ResolvedAppAccessSettingsCollection {
// Retrieves resolved app access settings of the logged in user.
GetSettings(): AdminDirectory.Schema.AppAccessCollections;
// Retrieves the list of apps trusted by the admin of the logged in user.
ListTrustedApps(): AdminDirectory.Schema.TrustedApps;
}
interface ResourcesCollection {
Buildings?: AdminDirectory.Collection.Resources.BuildingsCollection;
Calendars?: AdminDirectory.Collection.Resources.CalendarsCollection;
Features?: AdminDirectory.Collection.Resources.FeaturesCollection;
}
interface RoleAssignmentsCollection {
// Retrieve a role assignment.
get(customer: string, roleAssignmentId: string): AdminDirectory.Schema.RoleAssignment;
// Creates a role assignment.
insert(resource: Schema.RoleAssignment, customer: string): AdminDirectory.Schema.RoleAssignment;
// Retrieves a paginated list of all roleAssignments.
list(customer: string): AdminDirectory.Schema.RoleAssignments;
// Retrieves a paginated list of all roleAssignments.
list(customer: string, optionalArgs: object): AdminDirectory.Schema.RoleAssignments;
// Deletes a role assignment.
remove(customer: string, roleAssignmentId: string): void;
}
interface RolesCollection {
// Retrieves a role.
get(customer: string, roleId: string): AdminDirectory.Schema.Role;
// Creates a role.
insert(resource: Schema.Role, customer: string): AdminDirectory.Schema.Role;
// Retrieves a paginated list of all the roles in a domain.
list(customer: string): AdminDirectory.Schema.Roles;
// Retrieves a paginated list of all the roles in a domain.
list(customer: string, optionalArgs: object): AdminDirectory.Schema.Roles;
// Updates a role. This method supports patch semantics.
patch(resource: Schema.Role, customer: string, roleId: string): AdminDirectory.Schema.Role;
// Deletes a role.
remove(customer: string, roleId: string): void;
// Updates a role.
update(resource: Schema.Role, customer: string, roleId: string): AdminDirectory.Schema.Role;
}
interface SchemasCollection {
// Retrieve schema
get(customerId: string, schemaKey: string): AdminDirectory.Schema.Schema;
// Create schema.
insert(resource: Schema.Schema, customerId: string): AdminDirectory.Schema.Schema;
// Retrieve all schemas for a customer
list(customerId: string): AdminDirectory.Schema.Schemas;
// Update schema. This method supports patch semantics.
patch(resource: Schema.Schema, customerId: string, schemaKey: string): AdminDirectory.Schema.Schema;
// Delete schema
remove(customerId: string, schemaKey: string): void;
// Update schema
update(resource: Schema.Schema, customerId: string, schemaKey: string): AdminDirectory.Schema.Schema;
}
interface TokensCollection {
// Get information about an access token issued by a user.
get(userKey: string, clientId: string): AdminDirectory.Schema.Token;
// Returns the set of tokens specified user has issued to 3rd party applications.
list(userKey: string): AdminDirectory.Schema.Tokens;
// Delete all access tokens issued by a user for an application.
remove(userKey: string, clientId: string): void;
}
interface UsersCollection {
Aliases?: AdminDirectory.Collection.Users.AliasesCollection;
Photos?: AdminDirectory.Collection.Users.PhotosCollection;
// retrieve user
get(userKey: string): AdminDirectory.Schema.User;
// retrieve user
get(userKey: string, optionalArgs: object): AdminDirectory.Schema.User;
// create user.
insert(resource: Schema.User): AdminDirectory.Schema.User;
// Retrieve either deleted users or all users in a domain (paginated)
list(): AdminDirectory.Schema.Users;
// Retrieve either deleted users or all users in a domain (paginated)
list(optionalArgs: object): AdminDirectory.Schema.Users;
// change admin status of a user
makeAdmin(resource: Schema.UserMakeAdmin, userKey: string): void;
// update user. This method supports patch semantics.
patch(resource: Schema.User, userKey: string): AdminDirectory.Schema.User;
// Delete user
remove(userKey: string): void;
// Undelete a deleted user
undelete(resource: Schema.UserUndelete, userKey: string): void;
// update user
update(resource: Schema.User, userKey: string): AdminDirectory.Schema.User;
// Watch for changes in users list
watch(resource: Schema.Channel): AdminDirectory.Schema.Channel;
// Watch for changes in users list
watch(resource: Schema.Channel, optionalArgs: object): AdminDirectory.Schema.Channel;
}
interface VerificationCodesCollection {
// Generate new backup verification codes for the user.
generate(userKey: string): void;
// Invalidate the current backup verification codes for the user.
invalidate(userKey: string): void;
// Returns the current set of valid backup verification codes for the specified user.
list(userKey: string): AdminDirectory.Schema.VerificationCodes;
}
}
namespace Schema {
interface Alias {
alias?: string;
etag?: string;
id?: string;
kind?: string;
primaryEmail?: string;
}
interface Aliases {
aliases?: any[];
etag?: string;
kind?: string;
}
interface AppAccessCollections {
blockedApiAccessBuckets?: string[];
enforceSettingsForAndroidDrive?: boolean;
errorMessage?: string;
etag?: string;
kind?: string;
resourceId?: string;
resourceName?: string;
trustDomainOwnedApps?: boolean;
}
interface Asp {
codeId?: number;
creationTime?: string;
etag?: string;
kind?: string;
lastTimeUsed?: string;
name?: string;
userKey?: string;
}
interface Asps {
etag?: string;
items?: AdminDirectory.Schema.Asp[];
kind?: string;
}
interface Building {
address?: AdminDirectory.Schema.BuildingAddress;
buildingId?: string;
buildingName?: string;
coordinates?: AdminDirectory.Schema.BuildingCoordinates;
description?: string;
etags?: string;
floorNames?: string[];
kind?: string;
}
interface BuildingAddress {
addressLines?: string[];
administrativeArea?: string;
languageCode?: string;
locality?: string;
postalCode?: string;
regionCode?: string;
sublocality?: string;
}
interface BuildingCoordinates {
latitude?: number;
longitude?: number;
}
interface Buildings {
buildings?: AdminDirectory.Schema.Building[];
etag?: string;
kind?: string;
nextPageToken?: string;
}
interface CalendarResource {
buildingId?: string;
capacity?: number;
etags?: string;
featureInstances?: object;
floorName?: string;
floorSection?: string;
generatedResourceName?: string;
kind?: string;
resourceCategory?: string;
resourceDescription?: string;
resourceEmail?: string;
resourceId?: string;
resourceName?: string;
resourceType?: string;
userVisibleDescription?: string;
}
interface CalendarResources {
etag?: string;
items?: AdminDirectory.Schema.CalendarResource[];
kind?: string;
nextPageToken?: string;
}
interface Channel {
address?: string;
expiration?: string;
id?: string;
kind?: string;
params?: object;
payload?: boolean;
resourceId?: string;
resourceUri?: string;
token?: string;
type?: string;
}
interface ChromeOsDevice {
activeTimeRanges?: AdminDirectory.Schema.ChromeOsDeviceActiveTimeRanges[];
annotatedAssetId?: string;
annotatedLocation?: string;
annotatedUser?: string;
autoUpdateExpiration?: string;
bootMode?: string;
cpuStatusReports?: AdminDirectory.Schema.ChromeOsDeviceCpuStatusReports[];
deviceFiles?: AdminDirectory.Schema.ChromeOsDeviceDeviceFiles[];
deviceId?: string;
diskVolumeReports?: AdminDirectory.Schema.ChromeOsDeviceDiskVolumeReports[];
etag?: string;
ethernetMacAddress?: string;
firmwareVersion?: string;
kind?: string;
lastEnrollmentTime?: string;
lastSync?: string;
macAddress?: string;
meid?: string;
model?: string;
notes?: string;
orderNumber?: string;
orgUnitPath?: string;
osVersion?: string;
platformVersion?: string;
recentUsers?: AdminDirectory.Schema.ChromeOsDeviceRecentUsers[];
serialNumber?: string;
status?: string;
supportEndDate?: string;
systemRamFreeReports?: AdminDirectory.Schema.ChromeOsDeviceSystemRamFreeReports[];
systemRamTotal?: string;
tpmVersionInfo?: AdminDirectory.Schema.ChromeOsDeviceTpmVersionInfo;
willAutoRenew?: boolean;
}
interface ChromeOsDeviceAction {
action?: string;
deprovisionReason?: string;
}
interface ChromeOsDeviceActiveTimeRanges {
activeTime?: number;
date?: string;
}
interface ChromeOsDeviceCpuStatusReports {
cpuTemperatureInfo?: AdminDirectory.Schema.ChromeOsDeviceCpuStatusReportsCpuTemperatureInfo[];
cpuUtilizationPercentageInfo?: number[];
reportTime?: string;
}
interface ChromeOsDeviceCpuStatusReportsCpuTemperatureInfo {
label?: string;
temperature?: number;
}
interface ChromeOsDeviceDeviceFiles {
createTime?: string;
downloadUrl?: string;
name?: string;
type?: string;
}
interface ChromeOsDeviceDiskVolumeReports {
volumeInfo?: AdminDirectory.Schema.ChromeOsDeviceDiskVolumeReportsVolumeInfo[];
}
interface ChromeOsDeviceDiskVolumeReportsVolumeInfo {
storageFree?: string;
storageTotal?: string;
volumeId?: string;
}
interface ChromeOsDeviceRecentUsers {
email?: string;
type?: string;
}
interface ChromeOsDeviceSystemRamFreeReports {
reportTime?: string;
systemRamFreeInfo?: string[];
}
interface ChromeOsDeviceTpmVersionInfo {
family?: string;
firmwareVersion?: string;
manufacturer?: string;
specLevel?: string;
tpmModel?: string;
vendorSpecific?: string;
}
interface ChromeOsDevices {
chromeosdevices?: AdminDirectory.Schema.ChromeOsDevice[];
etag?: string;
kind?: string;
nextPageToken?: string;
}
interface ChromeOsMoveDevicesToOu {
deviceIds?: string[];
}
interface Customer {
alternateEmail?: string;
customerCreationTime?: string;
customerDomain?: string;
etag?: string;
id?: string;
kind?: string;
language?: string;
phoneNumber?: string;
postalAddress?: AdminDirectory.Schema.CustomerPostalAddress;
}
interface CustomerPostalAddress {
addressLine1?: string;
addressLine2?: string;
addressLine3?: string;
contactName?: string;
countryCode?: string;
locality?: string;
organizationName?: string;
postalCode?: string;
region?: string;
}
interface DomainAlias {
creationTime?: string;
domainAliasName?: string;
etag?: string;
kind?: string;
parentDomainName?: string;
verified?: boolean;
}
interface DomainAliases {
domainAliases?: AdminDirectory.Schema.DomainAlias[];
etag?: string;
kind?: string;
}
interface Domains {
creationTime?: string;
domainAliases?: AdminDirectory.Schema.DomainAlias[];
domainName?: string;
etag?: string;
isPrimary?: boolean;
kind?: string;
verified?: boolean;
}
interface Domains2 {
domains?: AdminDirectory.Schema.Domains[];
etag?: string;
kind?: string;
}
interface Feature {
etags?: string;
kind?: string;
name?: string;
}
interface FeatureInstance {
feature?: AdminDirectory.Schema.Feature;
}
interface FeatureRename {
newName?: string;
}
interface Features {
etag?: string;
features?: AdminDirectory.Schema.Feature[];
kind?: string;
nextPageToken?: string;
}
interface Group {
adminCreated?: boolean;
aliases?: string[];
description?: string;
directMembersCount?: string;
email?: string;
etag?: string;
id?: string;
kind?: string;
name?: string;
nonEditableAliases?: string[];
}
interface Groups {
etag?: string;
groups?: AdminDirectory.Schema.Group[];
kind?: string;
nextPageToken?: string;
}
interface Member {
delivery_settings?: string;
email?: string;
etag?: string;
id?: string;
kind?: string;
role?: string;
status?: string;
type?: string;
}
interface Members {
etag?: string;
kind?: string;
members?: AdminDirectory.Schema.Member[];
nextPageToken?: string;
}
interface MembersHasMember {
isMember?: boolean;
}
interface MobileDevice {
adbStatus?: boolean;
applications?: AdminDirectory.Schema.MobileDeviceApplications[];
basebandVersion?: string;
bootloaderVersion?: string;
brand?: string;
buildNumber?: string;
defaultLanguage?: string;
developerOptionsStatus?: boolean;
deviceCompromisedStatus?: string;
deviceId?: string;
devicePasswordStatus?: string;
email?: string[];
encryptionStatus?: string;
etag?: string;
firstSync?: string;
hardware?: string;
hardwareId?: string;
imei?: string;
kernelVersion?: string;
kind?: string;
lastSync?: string;
managedAccountIsOnOwnerProfile?: boolean;
manufacturer?: string;
meid?: string;
model?: string;
name?: string[];
networkOperator?: string;
os?: string;
otherAccountsInfo?: string[];
privilege?: string;
releaseVersion?: string;
resourceId?: string;
securityPatchLevel?: string;
serialNumber?: string;
status?: string;
supportsWorkProfile?: boolean;
type?: string;
unknownSourcesStatus?: boolean;
userAgent?: string;
wifiMacAddress?: string;
}
interface MobileDeviceAction {
action?: string;
}
interface MobileDeviceApplications {
displayName?: string;
packageName?: string;
permission?: string[];
versionCode?: number;
versionName?: string;
}
interface MobileDevices {
etag?: string;
kind?: string;
mobiledevices?: AdminDirectory.Schema.MobileDevice[];
nextPageToken?: string;
}
interface Notification {
body?: string;
etag?: string;
fromAddress?: string;
isUnread?: boolean;
kind?: string;
notificationId?: string;
sendTime?: string;
subject?: string;
}
interface Notifications {
etag?: string;
items?: AdminDirectory.Schema.Notification[];
kind?: string;
nextPageToken?: string;
unreadNotificationsCount?: number;
}
interface OrgUnit {
blockInheritance?: boolean;
description?: string;
etag?: string;
kind?: string;
name?: string;
orgUnitId?: string;
orgUnitPath?: string;
parentOrgUnitId?: string;
parentOrgUnitPath?: string;
}
interface OrgUnits {
etag?: string;
kind?: string;
organizationUnits?: AdminDirectory.Schema.OrgUnit[];
}
interface Privilege {
childPrivileges?: AdminDirectory.Schema.Privilege[];
etag?: string;
isOuScopable?: boolean;
kind?: string;
privilegeName?: string;
serviceId?: string;
serviceName?: string;
}
interface Privileges {
etag?: string;
items?: AdminDirectory.Schema.Privilege[];
kind?: string;
}
interface Role {
etag?: string;
isSuperAdminRole?: boolean;
isSystemRole?: boolean;
kind?: string;
roleDescription?: string;
roleId?: string;
roleName?: string;
rolePrivileges?: AdminDirectory.Schema.RoleRolePrivileges[];
}
interface RoleAssignment {
assignedTo?: string;
etag?: string;
kind?: string;
orgUnitId?: string;
roleAssignmentId?: string;
roleId?: string;
scopeType?: string;
}
interface RoleAssignments {
etag?: string;
items?: AdminDirectory.Schema.RoleAssignment[];
kind?: string;
nextPageToken?: string;
}
interface RoleRolePrivileges {
privilegeName?: string;
serviceId?: string;
}
interface Roles {
etag?: string;
items?: AdminDirectory.Schema.Role[];
kind?: string;
nextPageToken?: string;
}
interface Schema {
displayName?: string;
etag?: string;
fields?: AdminDirectory.Schema.SchemaFieldSpec[];
kind?: string;
schemaId?: string;
schemaName?: string;
}
interface SchemaFieldSpec {
displayName?: string;
etag?: string;
fieldId?: string;
fieldName?: string;
fieldType?: string;
indexed?: boolean;
kind?: string;
multiValued?: boolean;
numericIndexingSpec?: AdminDirectory.Schema.SchemaFieldSpecNumericIndexingSpec;
readAccessType?: string;
}
interface SchemaFieldSpecNumericIndexingSpec {
maxValue?: number;
minValue?: number;
}
interface Schemas {
etag?: string;
kind?: string;
schemas?: AdminDirectory.Schema.Schema[];
}
interface Token {
anonymous?: boolean;
clientId?: string;
displayText?: string;
etag?: string;
kind?: string;
nativeApp?: boolean;
scopes?: string[];
userKey?: string;
}
interface Tokens {
etag?: string;
items?: AdminDirectory.Schema.Token[];
kind?: string;
}
interface TrustedAppId {
androidPackageName?: string;
certificateHashSHA1?: string;
certificateHashSHA256?: string;
etag?: string;
kind?: string;
}
interface TrustedApps {
etag?: string;
kind?: string;
nextPageToken?: string;
trustedApps?: AdminDirectory.Schema.TrustedAppId[];
}
interface User {
addresses?: object;
agreedToTerms?: boolean;
aliases?: string[];
archived?: boolean;
changePasswordAtNextLogin?: boolean;
creationTime?: string;
customSchemas?: object;
customerId?: string;
deletionTime?: string;
emails?: object;
etag?: string;
externalIds?: object;
gender?: object;
hashFunction?: string;
id?: string;
ims?: object;
includeInGlobalAddressList?: boolean;
ipWhitelisted?: boolean;
isAdmin?: boolean;
isDelegatedAdmin?: boolean;
isEnforcedIn2Sv?: boolean;
isEnrolledIn2Sv?: boolean;
isMailboxSetup?: boolean;
keywords?: object;
kind?: string;
languages?: object;
lastLoginTime?: string;
locations?: object;
name?: AdminDirectory.Schema.UserName;
nonEditableAliases?: string[];
notes?: object;
orgUnitPath?: string;
organizations?: object;
password?: string;
phones?: object;
posixAccounts?: object;
primaryEmail?: string;
relations?: object;
sshPublicKeys?: object;
suspended?: boolean;
suspensionReason?: string;
thumbnailPhotoEtag?: string;
thumbnailPhotoUrl?: string;
websites?: object;
}
interface UserAbout {
contentType?: string;
value?: string;
}
interface UserAddress {
country?: string;
countryCode?: string;
customType?: string;
extendedAddress?: string;
formatted?: string;
locality?: string;
poBox?: string;
postalCode?: string;
primary?: boolean;
region?: string;
sourceIsStructured?: boolean;
streetAddress?: string;
type?: string;
}
interface UserEmail {
address?: string;
customType?: string;
primary?: boolean;
type?: string;
}
interface UserExternalId {
customType?: string;
type?: string;
value?: string;
}
interface UserGender {
addressMeAs?: string;
customGender?: string;
type?: string;
}
interface UserIm {
customProtocol?: string;
customType?: string;
im?: string;
primary?: boolean;
protocol?: string;
type?: string;
}
interface UserKeyword {
customType?: string;
type?: string;
value?: string;
}
interface UserLanguage {
customLanguage?: string;
languageCode?: string;
}
interface UserLocation {
area?: string;
buildingId?: string;
customType?: string;
deskCode?: string;
floorName?: string;
floorSection?: string;
type?: string;
}
interface UserMakeAdmin {
status?: boolean;
}
interface UserName {
familyName?: string;
fullName?: string;
givenName?: string;
}
interface UserOrganization {
costCenter?: string;
customType?: string;
department?: string;
description?: string;
domain?: string;
fullTimeEquivalent?: number;
location?: string;
name?: string;
primary?: boolean;
symbol?: string;
title?: string;
type?: string;
}
interface UserPhone {
customType?: string;
primary?: boolean;
type?: string;
value?: string;
}
interface UserPhoto {
etag?: string;
height?: number;
id?: string;
kind?: string;
mimeType?: string;
photoData?: string;
primaryEmail?: string;
width?: number;
}
interface UserPosixAccount {
accountId?: string;
gecos?: string;
gid?: string;
homeDirectory?: string;
operatingSystemType?: string;
primary?: boolean;
shell?: string;
systemId?: string;
uid?: string;
username?: string;
}
interface UserRelation {
customType?: string;
type?: string;
value?: string;
}
interface UserSshPublicKey {
expirationTimeUsec?: string;
fingerprint?: string;
key?: string;
}
interface UserUndelete {
orgUnitPath?: string;
}
interface UserWebsite {
customType?: string;
primary?: boolean;
type?: string;
value?: string;
}
interface Users {
etag?: string;
kind?: string;
nextPageToken?: string;
trigger_event?: string;
users?: AdminDirectory.Schema.User[];
}
interface VerificationCode {
etag?: string;
kind?: string;
userId?: string;
verificationCode?: string;
}
interface VerificationCodes {
etag?: string;
items?: AdminDirectory.Schema.VerificationCode[];
kind?: string;
}
}
}
interface AdminDirectory {
Asps?: AdminDirectory.Collection.AspsCollection;
Channels?: AdminDirectory.Collection.ChannelsCollection;
Chromeosdevices?: AdminDirectory.Collection.ChromeosdevicesCollection;
Customers?: AdminDirectory.Collection.CustomersCollection;
DomainAliases?: AdminDirectory.Collection.DomainAliasesCollection;
Domains?: AdminDirectory.Collection.DomainsCollection;
Groups?: AdminDirectory.Collection.GroupsCollection;
Members?: AdminDirectory.Collection.MembersCollection;
Mobiledevices?: AdminDirectory.Collection.MobiledevicesCollection;
Notifications?: AdminDirectory.Collection.NotificationsCollection;
Orgunits?: AdminDirectory.Collection.OrgunitsCollection;
Privileges?: AdminDirectory.Collection.PrivilegesCollection;
ResolvedAppAccessSettings?: AdminDirectory.Collection.ResolvedAppAccessSettingsCollection;
Resources?: AdminDirectory.Collection.ResourcesCollection;
RoleAssignments?: AdminDirectory.Collection.RoleAssignmentsCollection;
Roles?: AdminDirectory.Collection.RolesCollection;
Schemas?: AdminDirectory.Collection.SchemasCollection;
Tokens?: AdminDirectory.Collection.TokensCollection;
Users?: AdminDirectory.Collection.UsersCollection;
VerificationCodes?: AdminDirectory.Collection.VerificationCodesCollection;
// Create a new instance of Alias
newAlias(): AdminDirectory.Schema.Alias;
// Create a new instance of Building
newBuilding(): AdminDirectory.Schema.Building;
// Create a new instance of BuildingAddress
newBuildingAddress(): AdminDirectory.Schema.BuildingAddress;
// Create a new instance of BuildingCoordinates
newBuildingCoordinates(): AdminDirectory.Schema.BuildingCoordinates;
// Create a new instance of CalendarResource
newCalendarResource(): AdminDirectory.Schema.CalendarResource;
// Create a new instance of Channel
newChannel(): AdminDirectory.Schema.Channel;
// Create a new instance of ChromeOsDevice
newChromeOsDevice(): AdminDirectory.Schema.ChromeOsDevice;
// Create a new instance of ChromeOsDeviceAction
newChromeOsDeviceAction(): AdminDirectory.Schema.ChromeOsDeviceAction;
// Create a new instance of ChromeOsDeviceActiveTimeRanges
newChromeOsDeviceActiveTimeRanges(): AdminDirectory.Schema.ChromeOsDeviceActiveTimeRanges;
// Create a new instance of ChromeOsDeviceCpuStatusReports
newChromeOsDeviceCpuStatusReports(): AdminDirectory.Schema.ChromeOsDeviceCpuStatusReports;
// Create a new instance of ChromeOsDeviceCpuStatusReportsCpuTemperatureInfo
newChromeOsDeviceCpuStatusReportsCpuTemperatureInfo(): AdminDirectory.Schema.ChromeOsDeviceCpuStatusReportsCpuTemperatureInfo;
// Create a new instance of ChromeOsDeviceDeviceFiles
newChromeOsDeviceDeviceFiles(): AdminDirectory.Schema.ChromeOsDeviceDeviceFiles;
// Create a new instance of ChromeOsDeviceDiskVolumeReports
newChromeOsDeviceDiskVolumeReports(): AdminDirectory.Schema.ChromeOsDeviceDiskVolumeReports;
// Create a new instance of ChromeOsDeviceDiskVolumeReportsVolumeInfo
newChromeOsDeviceDiskVolumeReportsVolumeInfo(): AdminDirectory.Schema.ChromeOsDeviceDiskVolumeReportsVolumeInfo;
// Create a new instance of ChromeOsDeviceRecentUsers
newChromeOsDeviceRecentUsers(): AdminDirectory.Schema.ChromeOsDeviceRecentUsers;
// Create a new instance of ChromeOsDeviceSystemRamFreeReports
newChromeOsDeviceSystemRamFreeReports(): AdminDirectory.Schema.ChromeOsDeviceSystemRamFreeReports;
// Create a new instance of ChromeOsDeviceTpmVersionInfo
newChromeOsDeviceTpmVersionInfo(): AdminDirectory.Schema.ChromeOsDeviceTpmVersionInfo;
// Create a new instance of ChromeOsMoveDevicesToOu
newChromeOsMoveDevicesToOu(): AdminDirectory.Schema.ChromeOsMoveDevicesToOu;
// Create a new instance of Customer
newCustomer(): AdminDirectory.Schema.Customer;
// Create a new instance of CustomerPostalAddress
newCustomerPostalAddress(): AdminDirectory.Schema.CustomerPostalAddress;
// Create a new instance of DomainAlias
newDomainAlias(): AdminDirectory.Schema.DomainAlias;
// Create a new instance of Domains
newDomains(): AdminDirectory.Schema.Domains;
// Create a new instance of Feature
newFeature(): AdminDirectory.Schema.Feature;
// Create a new instance of FeatureRename
newFeatureRename(): AdminDirectory.Schema.FeatureRename;
// Create a new instance of Group
newGroup(): AdminDirectory.Schema.Group;
// Create a new instance of Member
newMember(): AdminDirectory.Schema.Member;
// Create a new instance of MobileDeviceAction
newMobileDeviceAction(): AdminDirectory.Schema.MobileDeviceAction;
// Create a new instance of Notification
newNotification(): AdminDirectory.Schema.Notification;
// Create a new instance of OrgUnit
newOrgUnit(): AdminDirectory.Schema.OrgUnit;
// Create a new instance of Role
newRole(): AdminDirectory.Schema.Role;
// Create a new instance of RoleAssignment
newRoleAssignment(): AdminDirectory.Schema.RoleAssignment;
// Create a new instance of RoleRolePrivileges
newRoleRolePrivileges(): AdminDirectory.Schema.RoleRolePrivileges;
// Create a new instance of Schema
newSchema(): AdminDirectory.Schema.Schema;
// Create a new instance of SchemaFieldSpec
newSchemaFieldSpec(): AdminDirectory.Schema.SchemaFieldSpec;
// Create a new instance of SchemaFieldSpecNumericIndexingSpec
newSchemaFieldSpecNumericIndexingSpec(): AdminDirectory.Schema.SchemaFieldSpecNumericIndexingSpec;
// Create a new instance of User
newUser(): AdminDirectory.Schema.User;
// Create a new instance of UserMakeAdmin
newUserMakeAdmin(): AdminDirectory.Schema.UserMakeAdmin;
// Create a new instance of UserName
newUserName(): AdminDirectory.Schema.UserName;
// Create a new instance of UserPhoto
newUserPhoto(): AdminDirectory.Schema.UserPhoto;
// Create a new instance of UserUndelete
newUserUndelete(): AdminDirectory.Schema.UserUndelete;
}
}
declare var AdminDirectory: GoogleAppsScript.AdminDirectory; | the_stack |
import { rebuildLinode } from '@linode/api-v4/lib/linodes';
import { UserDefinedField } from '@linode/api-v4/lib/stackscripts';
import { APIError } from '@linode/api-v4/lib/types';
import { RebuildLinodeFromStackScriptSchema } from '@linode/validation/lib/linodes.schema';
import { Formik, FormikProps } from 'formik';
import { withSnackbar, WithSnackbarProps } from 'notistack';
import { isEmpty } from 'ramda';
import * as React from 'react';
import { RouteComponentProps, withRouter } from 'react-router-dom';
import { compose } from 'recompose';
import AccessPanel from 'src/components/AccessPanel';
import ActionsPanel from 'src/components/ActionsPanel';
import Button from 'src/components/Button';
import Paper from 'src/components/core/Paper';
import { makeStyles, Theme } from 'src/components/core/styles';
import Typography from 'src/components/core/Typography';
import Grid from 'src/components/Grid';
import ImageSelect from 'src/components/ImageSelect';
import Notice from 'src/components/Notice';
import TextField from 'src/components/TextField';
import withImages, { WithImages } from 'src/containers/withImages.container';
import { resetEventsPolling } from 'src/eventsPolling';
import userSSHKeyHoc, {
UserSSHKeyProps,
} from 'src/features/linodes/userSSHKeyHoc';
import SelectStackScriptPanel from 'src/features/StackScripts/SelectStackScriptPanel';
import StackScriptDialog from 'src/features/StackScripts/StackScriptDialog';
import {
getCommunityStackscripts,
getMineAndAccountStackScripts,
} from 'src/features/StackScripts/stackScriptUtils';
import UserDefinedFieldsPanel from 'src/features/StackScripts/UserDefinedFieldsPanel';
import { useStackScript } from 'src/hooks/useStackScript';
import { filterImagesByType } from 'src/store/image/image.helpers';
import { getAPIErrorOrDefault } from 'src/utilities/errorUtils';
import {
handleFieldErrors,
handleGeneralErrors,
} from 'src/utilities/formikErrorUtils';
import scrollErrorIntoView from 'src/utilities/scrollErrorIntoView';
import { extendValidationSchema } from 'src/utilities/validatePassword';
const useStyles = makeStyles((theme: Theme) => ({
root: {
paddingTop: theme.spacing(3),
},
error: {
marginTop: theme.spacing(2),
},
emptyImagePanel: {
padding: theme.spacing(3),
},
emptyImagePanelText: {
marginTop: theme.spacing(1),
padding: `${theme.spacing(1)}px 0`,
},
actionPanel: {
flexDirection: 'column',
'& button': {
alignSelf: 'flex-end',
},
},
}));
interface Props {
type: 'community' | 'account';
disabled: boolean;
passwordHelperText: string;
linodeId: number;
linodeLabel?: string;
handleRebuildError: (status: string) => void;
onClose: () => void;
}
export type CombinedProps = Props &
WithImages &
UserSSHKeyProps &
RouteComponentProps &
WithSnackbarProps;
interface RebuildFromStackScriptForm {
image: string;
root_pass: string;
stackscript_id: string;
}
const initialValues: RebuildFromStackScriptForm = {
image: '',
root_pass: '',
stackscript_id: '',
};
export const RebuildFromStackScript: React.FC<CombinedProps> = (props) => {
const {
imagesData,
userSSHKeys,
sshError,
requestKeys,
linodeId,
linodeLabel,
handleRebuildError,
onClose,
enqueueSnackbar,
passwordHelperText,
} = props;
const classes = useStyles();
/**
* Dynamic validation schema, with password validation
* dependent on a value from a feature flag. Remove this
* once API password validation is stable.
*/
const RebuildSchema = () =>
extendValidationSchema(RebuildLinodeFromStackScriptSchema);
const [confirmationText, setConfirmationText] = React.useState<string>('');
const submitButtonDisabled = confirmationText !== linodeLabel;
const [
ss,
handleSelectStackScript,
handleChangeUDF,
resetStackScript,
] = useStackScript(
Object.keys(imagesData).map((eachKey) => imagesData[eachKey])
);
// In this component, most errors are handled by Formik. This is not
// possible with UDFs, since they are dynamic. Their errors need to
// be handled separately.
const [udfErrors, setUdfErrors] = React.useState<APIError[] | undefined>(
undefined
);
const handleFormSubmit = (
{ image, root_pass }: RebuildFromStackScriptForm,
{
setSubmitting,
setStatus,
setErrors,
}: FormikProps<RebuildFromStackScriptForm>
) => {
setSubmitting(true);
rebuildLinode(linodeId, {
stackscript_id: ss.id,
stackscript_data: ss.udf_data,
root_pass,
image,
authorized_users: userSSHKeys
.filter((u) => u.selected)
.map((u) => u.username),
})
.then((_) => {
// Reset events polling since an in-progress event (rebuild) is happening.
resetEventsPolling();
setSubmitting(false);
enqueueSnackbar('Linode rebuild started', {
variant: 'info',
});
onClose();
})
.catch((errorResponse) => {
const APIErrors = getAPIErrorOrDefault(errorResponse);
setUdfErrors(getUDFErrors(APIErrors));
const defaultMessage = `There was an issue rebuilding your Linode.`;
const mapErrorToStatus = (generalError: string) =>
setStatus({ generalError });
setSubmitting(false);
const modifiedErrors = APIErrors.map((thisError) => {
/**
* Errors returned for attempting to rebuild from an invalid
* StackScript will have a field of 'script' (and an unhelpful
* error message). Since nothing in our form is listening to this
* field, the error will slip through without being shown to the user.
*
* If we have one of those, change the field to stackscriptId, which
* we're listening for in Formik, and use a more helpful message.
*/
if (thisError.field === 'script') {
const reason = thisError.reason.match(/invalid stackscript/i)
? 'The selected StackScript is invalid.'
: thisError.reason;
return { field: 'stackscript_id', reason };
} else {
return thisError;
}
});
handleFieldErrors(setErrors, modifiedErrors);
handleGeneralErrors(mapErrorToStatus, modifiedErrors, defaultMessage);
scrollErrorIntoView();
});
};
// Since UDFs are dynamic, they are not handled by Formik. They need
// to be validated separately. This functions checks if we've got values
// for all REQUIRED UDFs, and sets errors appropriately.
const validateUdfs = () => {
const maybeErrors: APIError[] = [];
// Walk through the defined UDFs
ss.user_defined_fields.forEach((eachUdf) => {
// Is it required? Do we have a value?
if (isUDFRequired(eachUdf) && !ss.udf_data[eachUdf.name]) {
// If not, we've got an error.
maybeErrors.push({
field: eachUdf.name,
reason: `A value for the ${eachUdf.name} is required.`,
});
}
});
return maybeErrors;
};
return (
<Formik
initialValues={initialValues}
validationSchema={RebuildSchema}
validateOnChange={false}
onSubmit={handleFormSubmit}
>
{({
errors,
handleSubmit,
setFieldValue,
status, // holds generalError messages
values,
validateForm,
}) => {
// We'd like to validate the form before submitting.
const handleRebuildButtonClick = () => {
// Validate stackscript_id, image, & root_pass
validateForm().then((maybeErrors) => {
// UDFs are not part of Formik - validate separately.
const maybeUDFErrors = validateUdfs();
setUdfErrors(maybeUDFErrors);
// If there aren't any errors, we can proceed.
if (isEmpty(maybeErrors) && maybeUDFErrors.length === 0) {
handleSubmit();
// The form receives the errors automatically, and we scroll them into view.
} else {
scrollErrorIntoView();
}
});
};
const handleSelect = (
id: number,
label: string,
username: string,
stackScriptImages: string[],
user_defined_fields: UserDefinedField[]
) => {
handleSelectStackScript(
id,
label,
username,
stackScriptImages,
user_defined_fields
);
// Reset Image ID so that that an incompatible image can't be submitted accidentally
setFieldValue('stackscript_id', id);
setFieldValue('image', '');
};
if (status) {
handleRebuildError(status.generalError);
}
return (
<Grid item className={classes.root}>
<form>
<SelectStackScriptPanel
error={errors.stackscript_id}
selectedId={ss.id}
selectedUsername={ss.username}
updateFor={[classes, ss.id, errors]}
onSelect={handleSelect}
publicImages={filterImagesByType(imagesData, 'public')}
resetSelectedStackScript={resetStackScript}
data-qa-select-stackscript
category={props.type}
header="Select StackScript"
request={
props.type === 'account'
? getMineAndAccountStackScripts
: getCommunityStackscripts
}
/>
{ss.user_defined_fields && ss.user_defined_fields.length > 0 && (
<UserDefinedFieldsPanel
errors={udfErrors}
selectedLabel={ss.label}
selectedUsername={ss.username}
handleChange={handleChangeUDF}
userDefinedFields={ss.user_defined_fields}
updateFor={[
classes,
ss.user_defined_fields,
ss.udf_data,
udfErrors,
]}
udf_data={ss.udf_data}
/>
)}
{ss.images && ss.images.length > 0 ? (
<ImageSelect
variant="public"
title="Choose Image"
images={ss.images}
handleSelectImage={(selected) =>
setFieldValue('image', selected)
}
selectedImageID={values.image}
error={errors.image}
/>
) : (
<Paper className={classes.emptyImagePanel}>
{/* empty state for images */}
{errors.image && <Notice error={true} text={errors.image} />}
<Typography variant="h2" data-qa-tp="Select Image">
Select Image
</Typography>
<Typography
variant="body1"
className={classes.emptyImagePanelText}
data-qa-no-compatible-images
>
No Compatible Images Available
</Typography>
</Paper>
)}
<AccessPanel
password={values.root_pass}
handleChange={(value) => setFieldValue('root_pass', value)}
updateFor={[values.root_pass, errors, userSSHKeys, ss.id]}
error={errors.root_pass}
users={userSSHKeys}
sshKeyError={sshError}
requestKeys={requestKeys}
data-qa-access-panel
passwordHelperText={passwordHelperText}
/>
<ActionsPanel className={classes.actionPanel}>
<Typography variant="h2">Confirm</Typography>
<Typography style={{ marginBottom: 8 }}>
To confirm these changes, type the label of the Linode{' '}
<strong>({linodeLabel})</strong> in the field below:
</Typography>
<TextField
label="Linode Label"
hideLabel
onChange={(e) => setConfirmationText(e.target.value)}
style={{ marginBottom: 16 }}
/>
<Button
buttonType="primary"
disabled={submitButtonDisabled}
onClick={handleRebuildButtonClick}
data-qa-rebuild
data-testid="rebuild-button"
>
Rebuild Linode
</Button>
</ActionsPanel>
</form>
<StackScriptDialog />
</Grid>
);
}}
</Formik>
);
};
const enhanced = compose<CombinedProps, Props>(
userSSHKeyHoc,
withSnackbar,
withImages(),
withRouter
);
export default enhanced(RebuildFromStackScript);
// =============================================================================
// Helpers
// =============================================================================
const getUDFErrors = (errors: APIError[] | undefined) => {
const fixedErrorFields = ['stackscript_id', 'root_pass', 'image', 'none'];
return errors
? errors.filter((error) => {
// ensure the error isn't a root_pass, image, or none
const isNotUDFError = fixedErrorFields.some((errorKey) => {
return errorKey === error.field;
});
// if the 'field' prop exists and isn't any other error
return !!error.field && !isNotUDFError;
})
: undefined;
};
const isUDFRequired = (udf: UserDefinedField) => !udf.hasOwnProperty('default'); | the_stack |
import {OutputFormat} from './OutputFormat';
import {Writer} from "ion-js";
import {IonComparisonReport} from "./ComparisonReport";
import {ComparisonContext} from "./ComparisonContext";
import {IonCompareArgs} from "./CliCompareArgs";
import {ComparisonResult, ComparisonResultType} from "ion-js";
import {IonEvent, IonEventType} from "ion-js";
import {IonTypes} from "ion-js";
import {makeReader} from "ion-js";
import {IonEventStream} from "ion-js";
/**
* The `command`, `describe`, and `handler` exports below are part of the yargs command module API
* See: https://github.com/yargs/yargs/blob/master/docs/advanced.md#providing-a-command-module
*/
export const command = 'compare <input-file..>'
export const describe = "Compare all inputs (which may contain Ion streams and/or EventStreams) against all other inputs " +
"using the Ion data model's definition of equality. Write a ComparisonReport to the output.";
export const builder = {
'comparison-type': {
alias: "y",
default: 'basic',
choices: ['basic', 'equivs', 'non-equivs', 'equiv-timeline'],
describe: "Comparison semantics to be used with the compare command, from the set (basic | equivs | non-equivs |" +
"equiv-timeline). Any embedded streams in the inputs are compared for EventStream equality. 'basic' performs" +
"a standard data-model comparison between the corresponding events (or embedded streams) in the inputs." +
"'equivs' verifies that each value (or embedded stream) in a top-level sequence is equivalent to every other" +
"value (or embedded stream) in that sequence. 'non-equivs' does the same, but verifies that the values (or" +
"embedded streams) are not equivalent. 'equiv-timeline' is the same as 'equivs', except that when top-level" +
"sequences contain timestamp values, they are considered equivalent if they represent the same instant" +
"regardless of whether they are considered equivalent by the Ion data model. [default: basic]"
}
}
export const handler = function (argv) {
let args = new IonCompareArgs(argv);
new Compare(args);
}
/** Comparison semantics to be used with compare command */
export enum ComparisonType {
BASIC = "basic",
EQUIVS = "equivs",
NON_EQUIVS ="non-equivs",
EQUIV_TIMELINE = "equiv-timeline"
}
/**
* Compare all inputs (which may contain Ion streams and/or EventStreams) against all other inputs
* using the Ion data model's definition of equality. Write a ComparisonReport to the output.
**/
export class Compare {
constructor(parsedArgs: IonCompareArgs) {
let output_writer = OutputFormat.createIonWriter(parsedArgs.getOutputFormatName());
if (output_writer) {
this.compareFiles(output_writer, parsedArgs);
}
}
// compares files according to comparison type
compareFiles(ionOutputWriter: Writer, args: IonCompareArgs): void {
for (let pathFirst of args.getInputFiles()) {
for (let pathSecond of args.getInputFiles()) {
let comparisonType = args.getComparisonType();
if (comparisonType == ComparisonType.BASIC && pathFirst === pathSecond) {
continue;
}
this.compareFilePair(ionOutputWriter, pathFirst, pathSecond, args);
}
}
}
private compareFilePair(ionOutputWriter: Writer, pathFirst: string, pathSecond: string, args: IonCompareArgs): void {
let lhs = new ComparisonContext(pathFirst, args);
let rhs = new ComparisonContext(pathSecond, args);
ionOutputWriter.close();
let comparisonType = args.getComparisonType();
let comparisonReport = new IonComparisonReport(lhs, rhs, args.getOutputFile(), comparisonType);
let result: ComparisonResult = new ComparisonResult();
let lhsEventStream = lhs.getEventStream();
let rhsEventStream = rhs.getEventStream();
if(comparisonType == ComparisonType.BASIC) {
if(lhsEventStream && rhsEventStream) {
result = lhs.getEventStream().compare(rhs.getEventStream());
if(result.result == ComparisonResultType.NOT_EQUAL) {
comparisonReport.writeComparisonReport(result.result, result.message, result.actualIndex, result.expectedIndex);
}
}
}
else if(comparisonType == ComparisonType.EQUIVS || comparisonType == ComparisonType.EQUIV_TIMELINE
|| comparisonType == ComparisonType.NON_EQUIVS) {
if(lhsEventStream && rhsEventStream) {
result = this.compareEquivs(lhs.getEventStream(), rhs.getEventStream(), comparisonType, comparisonReport);
if(comparisonType == ComparisonType.NON_EQUIVS && result.result == ComparisonResultType.EQUAL) {
comparisonReport.writeComparisonReport(result.result, result.message, result.actualIndex, result.expectedIndex);
}
else if((comparisonType == ComparisonType.EQUIVS || comparisonType == ComparisonType.EQUIV_TIMELINE) && result.result == ComparisonResultType.NOT_EQUAL) {
comparisonReport.writeComparisonReport(result.result, result.message, result.actualIndex, result.expectedIndex);
}
}
}
}
/**
* equivs, non-equivs & equiv-timeline comparison of eventstreams
*
* @param comparisonReport: optional argument to write a comparison report for the equivalence result
*/
compareEquivs(actual: IonEventStream, expected: IonEventStream , comparisonType: ComparisonType, comparisonReport?: IonComparisonReport): ComparisonResult {
let actualIndex: number = 0;
let expectedIndex: number = 0;
while (actualIndex < actual.getEvents().length && expectedIndex < expected.getEvents().length) {
let actualEvent = actual.getEvents()[actualIndex];
let expectedEvent = expected.getEvents()[expectedIndex];
if(actualEvent.eventType == IonEventType.STREAM_END && expectedEvent.eventType == IonEventType.STREAM_END) {
break;
} else if(actualEvent.eventType == IonEventType.STREAM_END || expectedEvent.eventType == IonEventType.STREAM_END) {
throw new Error("Different number of comparison sets.");
} else if (!(actualEvent.ionType == IonTypes.LIST || actualEvent.ionType == IonTypes.SEXP)
|| !(expectedEvent.ionType == IonTypes.LIST || expectedEvent.ionType == IonTypes.SEXP)) {
throw new Error("Comparison sets must be lists or s-expressions.");
} else if(actual.isEmbedded(actualEvent) as any ^ expected.isEmbedded(expectedEvent) as any) {
throw new Error("Both streams should be embedded streams.");
}
// both containers has type any as it maybe IonEventStream[] or IonEvent[]
// depending on the type compare method will be called
let actualContainer: any = [];
let expectedContainer: any = [];
if (actual.isEmbedded(actualEvent) && expected.isEmbedded(expectedEvent)) {
//we found a list of strings that we need to interpret as top level ion text streams.
actualContainer = this.parseEmbeddedStream(actualEvent)
expectedContainer = this.parseEmbeddedStream(expectedEvent)
} else {//we're in an sexp/list
actualContainer = this.parseContainer(actualEvent);
expectedContainer = this.parseContainer(expectedEvent);
}
for (let i = 0; i < actualContainer.length; i++) {
for (let j = 0; j < expectedContainer.length; j++) {
// for non-equivs: not comparing same index's value as it will always be same.
if (comparisonType == ComparisonType.NON_EQUIVS && i == j)
continue;
let actualContainerEvent: IonEvent = actualContainer[i];
let expectedContainerEvent: IonEvent = expectedContainer[j];
let eventResult;
if (comparisonType == ComparisonType.EQUIV_TIMELINE && actualContainerEvent.ionType == IonTypes.TIMESTAMP) {
let ionTimestampActual = actualContainerEvent.ionValue;
let ionTimestampExpected = expectedContainerEvent.ionValue;
eventResult = ionTimestampActual.compareTo(ionTimestampExpected) == 0 ? new ComparisonResult(ComparisonResultType.EQUAL)
: new ComparisonResult(ComparisonResultType.NOT_EQUAL, ionTimestampActual + " vs. " + ionTimestampExpected);
} else {
eventResult = actualContainerEvent.compare(expectedContainerEvent);
}
if ((comparisonType == ComparisonType.EQUIVS || comparisonType == ComparisonType.EQUIV_TIMELINE)
&& eventResult.result == ComparisonResultType.NOT_EQUAL) {
if(comparisonReport) {
// set event-index as the index of container + (i/j) representing the event-index inside container
// + 1 to skip the first event (the container event)
comparisonReport.writeComparisonReport(ComparisonResultType.NOT_EQUAL, eventResult.message, actualIndex+ i + 1, expectedIndex + j + 1);
}
return new ComparisonResult(ComparisonResultType.NOT_EQUAL);
} else if (comparisonType == ComparisonType.NON_EQUIVS && eventResult.result == ComparisonResultType.EQUAL) {
if(comparisonReport) {
// set event-index as the index of container + (i/j) representing the event-index inside container
// + 1 to skip the first event (the container event)
comparisonReport.writeComparisonReport(ComparisonResultType.EQUAL,
"Both values are equal in non-equivs comparison.", actualIndex + i + 1, expectedIndex + j + 1);
}
return new ComparisonResult(ComparisonResultType.EQUAL);
}
}
}
// set indices to the next container/event stream
actualIndex = actualIndex + actualEvent.ionValue.length + 1;
expectedIndex = expectedIndex + expectedEvent.ionValue.length + 1;
}
return new ComparisonResult(comparisonType == ComparisonType.NON_EQUIVS ? ComparisonResultType.NOT_EQUAL : ComparisonResultType.EQUAL);
}
// parse container into events
private parseContainer(event: IonEvent): IonEvent[] {
let container: IonEvent[] = [];
for (let j = 0; j < event.ionValue.length - 1; j++) {
container.push(event.ionValue[j]);
if (event.ionValue[j].eventType === IonEventType.CONTAINER_START) {
j += event.ionValue[j].ionValue.length;
}
}
return container;
}
// parse embedded stream into events
private parseEmbeddedStream(event: IonEvent): any {
let container: any = [];
let value = "";
for (let j = 0; j < event.ionValue.length - 1; j++) {
if(event.ionValue[j].eventType == IonEventType.STREAM_END) {
container.push(new IonEventStream(makeReader(value)));
value = "";
continue;
}
value += event.ionValue[j].ionValue + " ";
}
return container;
}
} | the_stack |
import * as net from 'net';
import * as tls from 'tls';
import { Connection, ConnectionOptions, CryptoConnection } from './connection';
import {
MongoNetworkError,
MongoNetworkTimeoutError,
AnyError,
MongoCompatibilityError,
MongoInvalidArgumentError,
MongoServerError,
MongoRuntimeError
} from '../error';
import { AUTH_PROVIDERS, AuthMechanism } from './auth/defaultAuthProviders';
import { AuthContext } from './auth/auth_provider';
import { makeClientMetadata, ClientMetadata, Callback, CallbackWithType, ns } from '../utils';
import {
MAX_SUPPORTED_WIRE_VERSION,
MAX_SUPPORTED_SERVER_VERSION,
MIN_SUPPORTED_WIRE_VERSION,
MIN_SUPPORTED_SERVER_VERSION
} from './wire_protocol/constants';
import type { Document } from '../bson';
import { Int32 } from '../bson';
import type { Socket, SocketConnectOpts } from 'net';
import type { TLSSocket, ConnectionOptions as TLSConnectionOpts } from 'tls';
const FAKE_MONGODB_SERVICE_ID =
typeof process.env.FAKE_MONGODB_SERVICE_ID === 'string' &&
process.env.FAKE_MONGODB_SERVICE_ID.toLowerCase() === 'true';
/** @public */
export type Stream = Socket | TLSSocket;
export function connect(options: ConnectionOptions, callback: Callback<Connection>): void {
makeConnection(options, (err, socket) => {
if (err || !socket) {
return callback(err);
}
let ConnectionType = options.connectionType ?? Connection;
if (options.autoEncrypter) {
ConnectionType = CryptoConnection;
}
performInitialHandshake(new ConnectionType(socket, options), options, callback);
});
}
function checkSupportedServer(ismaster: Document, options: ConnectionOptions) {
const serverVersionHighEnough =
ismaster &&
(typeof ismaster.maxWireVersion === 'number' || ismaster.maxWireVersion instanceof Int32) &&
ismaster.maxWireVersion >= MIN_SUPPORTED_WIRE_VERSION;
const serverVersionLowEnough =
ismaster &&
(typeof ismaster.minWireVersion === 'number' || ismaster.minWireVersion instanceof Int32) &&
ismaster.minWireVersion <= MAX_SUPPORTED_WIRE_VERSION;
if (serverVersionHighEnough) {
if (serverVersionLowEnough) {
return null;
}
const message = `Server at ${options.hostAddress} reports minimum wire version ${JSON.stringify(
ismaster.minWireVersion
)}, but this version of the Node.js Driver requires at most ${MAX_SUPPORTED_WIRE_VERSION} (MongoDB ${MAX_SUPPORTED_SERVER_VERSION})`;
return new MongoCompatibilityError(message);
}
const message = `Server at ${options.hostAddress} reports maximum wire version ${
JSON.stringify(ismaster.maxWireVersion) ?? 0
}, but this version of the Node.js Driver requires at least ${MIN_SUPPORTED_WIRE_VERSION} (MongoDB ${MIN_SUPPORTED_SERVER_VERSION})`;
return new MongoCompatibilityError(message);
}
function performInitialHandshake(
conn: Connection,
options: ConnectionOptions,
_callback: Callback
) {
const callback: Callback<Document> = function (err, ret) {
if (err && conn) {
conn.destroy();
}
_callback(err, ret);
};
const credentials = options.credentials;
if (credentials) {
if (
!(credentials.mechanism === AuthMechanism.MONGODB_DEFAULT) &&
!AUTH_PROVIDERS.get(credentials.mechanism)
) {
callback(
new MongoInvalidArgumentError(`AuthMechanism '${credentials.mechanism}' not supported`)
);
return;
}
}
const authContext = new AuthContext(conn, credentials, options);
prepareHandshakeDocument(authContext, (err, handshakeDoc) => {
if (err || !handshakeDoc) {
return callback(err);
}
const handshakeOptions: Document = Object.assign({}, options);
if (typeof options.connectTimeoutMS === 'number') {
// The handshake technically is a monitoring check, so its socket timeout should be connectTimeoutMS
handshakeOptions.socketTimeoutMS = options.connectTimeoutMS;
}
const start = new Date().getTime();
conn.command(ns('admin.$cmd'), handshakeDoc, handshakeOptions, (err, response) => {
if (err) {
callback(err);
return;
}
if (response?.ok === 0) {
callback(new MongoServerError(response));
return;
}
if ('isWritablePrimary' in response) {
// Provide pre-hello-style response document.
response.ismaster = response.isWritablePrimary;
}
if (response.helloOk) {
conn.helloOk = true;
}
const supportedServerErr = checkSupportedServer(response, options);
if (supportedServerErr) {
callback(supportedServerErr);
return;
}
if (options.loadBalanced) {
// TODO: Durran: Remove when server support exists. (NODE-3431)
if (FAKE_MONGODB_SERVICE_ID) {
response.serviceId = response.topologyVersion.processId;
}
if (!response.serviceId) {
return callback(
new MongoCompatibilityError(
'Driver attempted to initialize in load balancing mode, ' +
'but the server does not support this mode.'
)
);
}
}
// NOTE: This is metadata attached to the connection while porting away from
// handshake being done in the `Server` class. Likely, it should be
// relocated, or at very least restructured.
conn.ismaster = response;
conn.lastIsMasterMS = new Date().getTime() - start;
if (!response.arbiterOnly && credentials) {
// store the response on auth context
authContext.response = response;
const resolvedCredentials = credentials.resolveAuthMechanism(response);
const provider = AUTH_PROVIDERS.get(resolvedCredentials.mechanism);
if (!provider) {
return callback(
new MongoInvalidArgumentError(
`No AuthProvider for ${resolvedCredentials.mechanism} defined.`
)
);
}
provider.auth(authContext, err => {
if (err) return callback(err);
callback(undefined, conn);
});
return;
}
callback(undefined, conn);
});
});
}
export interface HandshakeDocument extends Document {
ismaster?: boolean;
hello?: boolean;
helloOk?: boolean;
client: ClientMetadata;
compression: string[];
saslSupportedMechs?: string;
loadBalanced: boolean;
}
function prepareHandshakeDocument(authContext: AuthContext, callback: Callback<HandshakeDocument>) {
const options = authContext.options;
const compressors = options.compressors ? options.compressors : [];
const { serverApi } = authContext.connection;
const handshakeDoc: HandshakeDocument = {
[serverApi?.version ? 'hello' : 'ismaster']: true,
helloOk: true,
client: options.metadata || makeClientMetadata(options),
compression: compressors,
loadBalanced: options.loadBalanced
};
const credentials = authContext.credentials;
if (credentials) {
if (credentials.mechanism === AuthMechanism.MONGODB_DEFAULT && credentials.username) {
handshakeDoc.saslSupportedMechs = `${credentials.source}.${credentials.username}`;
const provider = AUTH_PROVIDERS.get(AuthMechanism.MONGODB_SCRAM_SHA256);
if (!provider) {
// This auth mechanism is always present.
return callback(
new MongoInvalidArgumentError(
`No AuthProvider for ${AuthMechanism.MONGODB_SCRAM_SHA256} defined.`
)
);
}
return provider.prepare(handshakeDoc, authContext, callback);
}
const provider = AUTH_PROVIDERS.get(credentials.mechanism);
if (!provider) {
return callback(
new MongoInvalidArgumentError(`No AuthProvider for ${credentials.mechanism} defined.`)
);
}
return provider.prepare(handshakeDoc, authContext, callback);
}
callback(undefined, handshakeDoc);
}
/** @public */
export const LEGAL_TLS_SOCKET_OPTIONS = [
'ALPNProtocols',
'ca',
'cert',
'checkServerIdentity',
'ciphers',
'crl',
'ecdhCurve',
'key',
'minDHSize',
'passphrase',
'pfx',
'rejectUnauthorized',
'secureContext',
'secureProtocol',
'servername',
'session'
] as const;
/** @public */
export const LEGAL_TCP_SOCKET_OPTIONS = [
'family',
'hints',
'localAddress',
'localPort',
'lookup'
] as const;
function parseConnectOptions(options: ConnectionOptions): SocketConnectOpts {
const hostAddress = options.hostAddress;
if (!hostAddress) throw new MongoInvalidArgumentError('Option "hostAddress" is required');
const result: Partial<net.TcpNetConnectOpts & net.IpcNetConnectOpts> = {};
for (const name of LEGAL_TCP_SOCKET_OPTIONS) {
if (options[name] != null) {
(result as Document)[name] = options[name];
}
}
if (typeof hostAddress.socketPath === 'string') {
result.path = hostAddress.socketPath;
return result as net.IpcNetConnectOpts;
} else if (typeof hostAddress.host === 'string') {
result.host = hostAddress.host;
result.port = hostAddress.port;
return result as net.TcpNetConnectOpts;
} else {
// This should never happen since we set up HostAddresses
// But if we don't throw here the socket could hang until timeout
// TODO(NODE-3483)
throw new MongoRuntimeError(`Unexpected HostAddress ${JSON.stringify(hostAddress)}`);
}
}
function parseSslOptions(options: ConnectionOptions): TLSConnectionOpts {
const result: TLSConnectionOpts = parseConnectOptions(options);
// Merge in valid SSL options
for (const name of LEGAL_TLS_SOCKET_OPTIONS) {
if (options[name] != null) {
(result as Document)[name] = options[name];
}
}
// Set default sni servername to be the same as host
if (result.servername == null && result.host && !net.isIP(result.host)) {
result.servername = result.host;
}
return result;
}
const SOCKET_ERROR_EVENT_LIST = ['error', 'close', 'timeout', 'parseError'] as const;
type ErrorHandlerEventName = typeof SOCKET_ERROR_EVENT_LIST[number] | 'cancel';
const SOCKET_ERROR_EVENTS = new Set(SOCKET_ERROR_EVENT_LIST);
function makeConnection(options: ConnectionOptions, _callback: CallbackWithType<AnyError, Stream>) {
const useTLS = options.tls ?? false;
const keepAlive = options.keepAlive ?? true;
const socketTimeoutMS = options.socketTimeoutMS ?? Reflect.get(options, 'socketTimeout') ?? 0;
const noDelay = options.noDelay ?? true;
const connectionTimeout = options.connectTimeoutMS ?? 30000;
const rejectUnauthorized = options.rejectUnauthorized ?? true;
const keepAliveInitialDelay =
((options.keepAliveInitialDelay ?? 120000) > socketTimeoutMS
? Math.round(socketTimeoutMS / 2)
: options.keepAliveInitialDelay) ?? 120000;
let socket: Stream;
const callback: Callback<Stream> = function (err, ret) {
if (err && socket) {
socket.destroy();
}
_callback(err, ret);
};
if (useTLS) {
const tlsSocket = tls.connect(parseSslOptions(options));
if (typeof tlsSocket.disableRenegotiation === 'function') {
tlsSocket.disableRenegotiation();
}
socket = tlsSocket;
} else {
socket = net.createConnection(parseConnectOptions(options));
}
socket.setKeepAlive(keepAlive, keepAliveInitialDelay);
socket.setTimeout(connectionTimeout);
socket.setNoDelay(noDelay);
const connectEvent = useTLS ? 'secureConnect' : 'connect';
let cancellationHandler: (err: Error) => void;
function errorHandler(eventName: ErrorHandlerEventName) {
return (err: Error) => {
SOCKET_ERROR_EVENTS.forEach(event => socket.removeAllListeners(event));
if (cancellationHandler && options.cancellationToken) {
options.cancellationToken.removeListener('cancel', cancellationHandler);
}
socket.removeListener(connectEvent, connectHandler);
callback(connectionFailureError(eventName, err));
};
}
function connectHandler() {
SOCKET_ERROR_EVENTS.forEach(event => socket.removeAllListeners(event));
if (cancellationHandler && options.cancellationToken) {
options.cancellationToken.removeListener('cancel', cancellationHandler);
}
if ('authorizationError' in socket) {
if (socket.authorizationError && rejectUnauthorized) {
return callback(socket.authorizationError);
}
}
socket.setTimeout(socketTimeoutMS);
callback(undefined, socket);
}
SOCKET_ERROR_EVENTS.forEach(event => socket.once(event, errorHandler(event)));
if (options.cancellationToken) {
cancellationHandler = errorHandler('cancel');
options.cancellationToken.once('cancel', cancellationHandler);
}
socket.once(connectEvent, connectHandler);
}
function connectionFailureError(type: string, err: Error) {
switch (type) {
case 'error':
return new MongoNetworkError(err);
case 'timeout':
return new MongoNetworkTimeoutError('connection timed out');
case 'close':
return new MongoNetworkError('connection closed');
case 'cancel':
return new MongoNetworkError('connection establishment was cancelled');
default:
return new MongoNetworkError('unknown network error');
}
} | the_stack |
import {
Connection,
Server,
WebSocketTransport
} from "vscode-cdp-proxy";
import * as semver from "semver";
import { IncomingMessage } from "http";
import { OutputChannelLogger } from "../../utils/log/outputChannelLogger";
import { DebuggerEndpointHelper } from "./debuggerEndpointHelper";
import { LogLevel } from "../../utils/log/logHelper";
import { CancellationToken } from "vscode";
import { SourcemapPathTransformer } from "./sourcemapPathTransformer";
import { PlatformType } from "../cordovaDebugSession";
import { IProjectType } from "../../utils/cordovaProjectHelper";
import { SimulateHelper } from "../../utils/simulateHelper";
import { CDPMessageHandlerBase, DispatchDirection } from "./CDPMessageHandlers/CDPMessageHandlerBase";
import { ChromeCDPMessageHandler } from "./CDPMessageHandlers/chromeCDPMessageHandler";
import { SafariCDPMessageHandler } from "./CDPMessageHandlers/safariCDPMessageHandler";
import { ICordovaAttachRequestArgs } from "../requestArgs";
import { TargetType } from "../cordovaDebugSession";
export class CordovaCDPProxy {
private readonly PROXY_LOG_TAGS = {
DEBUGGER_COMMAND: "Command Debugger To Target",
APPLICATION_COMMAND: "Command Target To Debugger",
DEBUGGER_REPLY: "Reply From Debugger To Target",
APPLICATION_REPLY: "Reply From Target To Debugger",
};
private server: Server | null;
private hostAddress: string;
private port: number;
private debuggerTarget: Connection | null;
private applicationTarget: Connection | null;
private simPageTarget: Connection | null;
private logger: OutputChannelLogger;
private debuggerEndpointHelper: DebuggerEndpointHelper;
private applicationTargetPort: number;
private logLevel: LogLevel;
private cancellationToken: CancellationToken | undefined;
private CDPMessageHandler: CDPMessageHandlerBase;
private communicationPreparationsDone: boolean;
private browserInspectUri: string;
private isSimulate: boolean;
constructor(
hostAddress: string,
port: number,
sourcemapPathTransformer: SourcemapPathTransformer,
projectType: IProjectType,
args: ICordovaAttachRequestArgs,
logLevel: LogLevel = LogLevel.None
) {
this.port = port;
this.hostAddress = hostAddress;
this.logLevel = logLevel;
this.logger = OutputChannelLogger.getChannel("Cordova Chrome Proxy", true, false, true);
this.debuggerEndpointHelper = new DebuggerEndpointHelper();
this.browserInspectUri = args.webSocketDebuggerUrl || "";
this.isSimulate = !!(SimulateHelper.isSimulateTarget(args.target) && args.simulatePort);
if (args.platform === PlatformType.IOS && (args.target === TargetType.Emulator || args.target === TargetType.Device)) {
this.CDPMessageHandler = new SafariCDPMessageHandler(sourcemapPathTransformer, projectType, args);
this.communicationPreparationsDone = false;
} else {
this.CDPMessageHandler = new ChromeCDPMessageHandler(sourcemapPathTransformer, projectType, args);
this.communicationPreparationsDone = true;
}
}
public createServer(logLevel: LogLevel, cancellationToken: CancellationToken): Promise<void> {
this.cancellationToken = cancellationToken;
this.logLevel = logLevel;
return Server.create({ port: this.port, host: this.hostAddress })
.then((server: Server) => {
this.server = server;
this.server.onConnection(this.onConnectionHandler.bind(this));
});
}
public async stopServer(): Promise<void> {
if (this.simPageTarget) {
await this.simPageTarget.close();
this.simPageTarget = null;
}
if (this.applicationTarget) {
await this.applicationTarget.close();
this.applicationTarget = null;
}
if (this.server) {
this.server.dispose();
this.server = null;
}
this.browserInspectUri = "";
this.cancellationToken = undefined;
}
public setApplicationTargetPort(applicationTargetPort: number): void {
this.applicationTargetPort = applicationTargetPort;
}
public setBrowserInspectUri(browserInspectUri: string): void {
this.browserInspectUri = browserInspectUri;
}
public configureCDPMessageHandlerAccordingToProcessedAttachArgs(args: ICordovaAttachRequestArgs): void {
if (
args.iOSVersion
&& !this.communicationPreparationsDone
&& semver.lt(args.iOSVersion, "12.2.0")
) {
this.communicationPreparationsDone = true;
}
this.CDPMessageHandler.configureHandlerAccordingToProcessedAttachArgs(args);
}
public getSimPageTargetAPI(): any | undefined {
return this.simPageTarget?.api;
}
private async onConnectionHandler([debuggerTarget]: [Connection, IncomingMessage]): Promise<void> {
this.debuggerTarget = debuggerTarget;
this.debuggerTarget.pause(); // don't listen for events until the target is ready
if (!this.browserInspectUri) {
if (this.cancellationToken) {
this.browserInspectUri = await this.debuggerEndpointHelper.retryGetWSEndpoint(
`http://localhost:${this.applicationTargetPort}`,
20,
this.cancellationToken
);
} else {
this.browserInspectUri = await this.debuggerEndpointHelper.getWSEndpoint(`http://localhost:${this.applicationTargetPort}`);
}
}
if (this.isSimulate) {
// There is a problem that the browser endpoint cannot handle "Emulation" domain requests, so we attach to
// the application page endpoint, since each page is processed in a separate process.
// But the application page endpoint does not handle "Target" domain requests, that's why we store both browser
// and app page connections.
const simPageInspectUri = await this.debuggerEndpointHelper.getWSEndpoint(`http://localhost:${this.applicationTargetPort}`, this.isSimulate);
this.simPageTarget = new Connection(await WebSocketTransport.create(simPageInspectUri));
}
this.applicationTarget = new Connection(await WebSocketTransport.create(this.browserInspectUri));
this.applicationTarget.onError(this.onApplicationTargetError.bind(this));
this.debuggerTarget.onError(this.onDebuggerTargetError.bind(this));
this.applicationTarget.onCommand(this.handleApplicationTargetCommand.bind(this));
this.debuggerTarget.onCommand(this.handleDebuggerTargetCommand.bind(this));
this.applicationTarget.onReply(this.handleApplicationTargetReply.bind(this));
this.debuggerTarget.onReply(this.handleDebuggerTargetReply.bind(this));
this.applicationTarget.onEnd(this.onApplicationTargetClosed.bind(this));
this.debuggerTarget.onEnd(this.onDebuggerTargetClosed.bind(this));
this.CDPMessageHandler.setApplicationTarget(this.applicationTarget);
this.CDPMessageHandler.setDebuggerTarget(this.debuggerTarget);
// dequeue any messages we got in the meantime
this.unpauseDebuggerTarget();
}
private handleDebuggerTargetCommand(event: any) {
this.logger.logWithCustomTag(this.PROXY_LOG_TAGS.DEBUGGER_COMMAND, JSON.stringify(event, null , 2), this.logLevel);
const processedMessage = this.CDPMessageHandler.processDebuggerCDPMessage(event);
if (processedMessage.dispatchDirection === DispatchDirection.BACK) {
this.debuggerTarget?.send(processedMessage.event);
} else if (processedMessage.dispatchDirection === DispatchDirection.FORWARD) {
this.applicationTarget?.send(processedMessage.event);
}
}
private handleApplicationTargetCommand(event: any) {
this.logger.logWithCustomTag(this.PROXY_LOG_TAGS.APPLICATION_COMMAND, JSON.stringify(event, null , 2), this.logLevel);
const processedMessage = this.CDPMessageHandler.processApplicationCDPMessage(event);
if (processedMessage.communicationPreparationsDone) {
this.communicationPreparationsDone = true;
this.unpauseDebuggerTarget();
}
if (processedMessage.dispatchDirection === DispatchDirection.BACK) {
this.applicationTarget?.send(processedMessage.event);
} else if (processedMessage.dispatchDirection === DispatchDirection.FORWARD) {
this.debuggerTarget?.send(processedMessage.event);
}
}
private handleDebuggerTargetReply(event: any) {
this.logger.logWithCustomTag(this.PROXY_LOG_TAGS.DEBUGGER_REPLY, JSON.stringify(event, null , 2), this.logLevel);
const processedMessage = this.CDPMessageHandler.processDebuggerCDPMessage(event);
if (processedMessage.dispatchDirection === DispatchDirection.BACK) {
this.debuggerTarget?.send(processedMessage.event);
} else if (processedMessage.dispatchDirection === DispatchDirection.FORWARD) {
this.applicationTarget?.send(processedMessage.event);
}
}
private handleApplicationTargetReply(event: any) {
this.logger.logWithCustomTag(this.PROXY_LOG_TAGS.APPLICATION_REPLY, JSON.stringify(event, null , 2), this.logLevel);
const processedMessage = this.CDPMessageHandler.processApplicationCDPMessage(event);
if (processedMessage.dispatchDirection === DispatchDirection.BACK) {
this.applicationTarget?.send(processedMessage.event);
} else if (processedMessage.dispatchDirection === DispatchDirection.FORWARD) {
this.debuggerTarget?.send(processedMessage.event);
}
}
private onDebuggerTargetError(err: Error) {
this.logger.log(`Error on debugger transport: ${err}`);
}
private onApplicationTargetError(err: Error) {
this.logger.log(`Error on application transport: ${err}`);
}
private async onApplicationTargetClosed() {
this.applicationTarget = null;
}
private async onDebuggerTargetClosed() {
this.CDPMessageHandler.processDebuggerCDPMessage({method: "close"});
this.debuggerTarget = null;
this.communicationPreparationsDone = false;
this.browserInspectUri = "";
}
private unpauseDebuggerTarget(): void {
if (this.debuggerTarget && this.communicationPreparationsDone) {
this.debuggerTarget.unpause();
}
}
} | the_stack |
import * as pulumi from "@pulumi/pulumi";
import * as utilities from "../utilities";
/**
* Adds a trust between Active Directory domains
*
* To get more information about DomainTrust, see:
*
* * [API documentation](https://cloud.google.com/managed-microsoft-ad/reference/rest/v1/projects.locations.global.domains/attachTrust)
* * How-to Guides
* * [Active Directory Trust](https://cloud.google.com/managed-microsoft-ad/docs/create-one-way-trust)
*
* > **Warning:** All arguments including `trustHandshakeSecret` will be stored in the raw
* state as plain-text. [Read more about sensitive data in state](https://www.terraform.io/docs/state/sensitive-data.html).
*
* ## Example Usage
* ### Active Directory Domain Trust Basic
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as gcp from "@pulumi/gcp";
*
* const ad_domain_trust = new gcp.activedirectory.DomainTrust("ad-domain-trust", {
* domain: "test-managed-ad.com",
* targetDnsIpAddresses: ["10.1.0.100"],
* targetDomainName: "example-gcp.com",
* trustDirection: "OUTBOUND",
* trustHandshakeSecret: "Testing1!",
* trustType: "FOREST",
* });
* ```
*
* ## Import
*
* DomainTrust can be imported using any of these accepted formats
*
* ```sh
* $ pulumi import gcp:activedirectory/domainTrust:DomainTrust default projects/{{project}}/locations/global/domains/{{domain}}/{{target_domain_name}}
* ```
*
* ```sh
* $ pulumi import gcp:activedirectory/domainTrust:DomainTrust default {{project}}/{{domain}}/{{target_domain_name}}
* ```
*
* ```sh
* $ pulumi import gcp:activedirectory/domainTrust:DomainTrust default {{domain}}/{{target_domain_name}}
* ```
*/
export class DomainTrust extends pulumi.CustomResource {
/**
* Get an existing DomainTrust resource's state with the given name, ID, and optional extra
* properties used to qualify the lookup.
*
* @param name The _unique_ name of the resulting resource.
* @param id The _unique_ provider ID of the resource to lookup.
* @param state Any extra arguments used during the lookup.
* @param opts Optional settings to control the behavior of the CustomResource.
*/
public static get(name: string, id: pulumi.Input<pulumi.ID>, state?: DomainTrustState, opts?: pulumi.CustomResourceOptions): DomainTrust {
return new DomainTrust(name, <any>state, { ...opts, id: id });
}
/** @internal */
public static readonly __pulumiType = 'gcp:activedirectory/domainTrust:DomainTrust';
/**
* Returns true if the given object is an instance of DomainTrust. This is designed to work even
* when multiple copies of the Pulumi SDK have been loaded into the same process.
*/
public static isInstance(obj: any): obj is DomainTrust {
if (obj === undefined || obj === null) {
return false;
}
return obj['__pulumiType'] === DomainTrust.__pulumiType;
}
/**
* The fully qualified domain name. e.g. mydomain.myorganization.com, with the restrictions,
* https://cloud.google.com/managed-microsoft-ad/reference/rest/v1/projects.locations.global.domains.
*/
public readonly domain!: pulumi.Output<string>;
/**
* The ID of the project in which the resource belongs.
* If it is not provided, the provider project is used.
*/
public readonly project!: pulumi.Output<string>;
/**
* Whether the trusted side has forest/domain wide access or selective access to an approved set of resources.
*/
public readonly selectiveAuthentication!: pulumi.Output<boolean | undefined>;
/**
* The target DNS server IP addresses which can resolve the remote domain involved in the trust.
*/
public readonly targetDnsIpAddresses!: pulumi.Output<string[]>;
/**
* The fully qualified target domain name which will be in trust with the current domain.
*/
public readonly targetDomainName!: pulumi.Output<string>;
/**
* The trust direction, which decides if the current domain is trusted, trusting, or both.
* Possible values are `INBOUND`, `OUTBOUND`, and `BIDIRECTIONAL`.
*/
public readonly trustDirection!: pulumi.Output<string>;
/**
* The trust secret used for the handshake with the target domain. This will not be stored.
* **Note**: This property is sensitive and will not be displayed in the plan.
*/
public readonly trustHandshakeSecret!: pulumi.Output<string>;
/**
* The type of trust represented by the trust resource.
* Possible values are `FOREST` and `EXTERNAL`.
*/
public readonly trustType!: pulumi.Output<string>;
/**
* Create a DomainTrust resource with the given unique name, arguments, and options.
*
* @param name The _unique_ name of the resource.
* @param args The arguments to use to populate this resource's properties.
* @param opts A bag of options that control this resource's behavior.
*/
constructor(name: string, args: DomainTrustArgs, opts?: pulumi.CustomResourceOptions)
constructor(name: string, argsOrState?: DomainTrustArgs | DomainTrustState, opts?: pulumi.CustomResourceOptions) {
let inputs: pulumi.Inputs = {};
opts = opts || {};
if (opts.id) {
const state = argsOrState as DomainTrustState | undefined;
inputs["domain"] = state ? state.domain : undefined;
inputs["project"] = state ? state.project : undefined;
inputs["selectiveAuthentication"] = state ? state.selectiveAuthentication : undefined;
inputs["targetDnsIpAddresses"] = state ? state.targetDnsIpAddresses : undefined;
inputs["targetDomainName"] = state ? state.targetDomainName : undefined;
inputs["trustDirection"] = state ? state.trustDirection : undefined;
inputs["trustHandshakeSecret"] = state ? state.trustHandshakeSecret : undefined;
inputs["trustType"] = state ? state.trustType : undefined;
} else {
const args = argsOrState as DomainTrustArgs | undefined;
if ((!args || args.domain === undefined) && !opts.urn) {
throw new Error("Missing required property 'domain'");
}
if ((!args || args.targetDnsIpAddresses === undefined) && !opts.urn) {
throw new Error("Missing required property 'targetDnsIpAddresses'");
}
if ((!args || args.targetDomainName === undefined) && !opts.urn) {
throw new Error("Missing required property 'targetDomainName'");
}
if ((!args || args.trustDirection === undefined) && !opts.urn) {
throw new Error("Missing required property 'trustDirection'");
}
if ((!args || args.trustHandshakeSecret === undefined) && !opts.urn) {
throw new Error("Missing required property 'trustHandshakeSecret'");
}
if ((!args || args.trustType === undefined) && !opts.urn) {
throw new Error("Missing required property 'trustType'");
}
inputs["domain"] = args ? args.domain : undefined;
inputs["project"] = args ? args.project : undefined;
inputs["selectiveAuthentication"] = args ? args.selectiveAuthentication : undefined;
inputs["targetDnsIpAddresses"] = args ? args.targetDnsIpAddresses : undefined;
inputs["targetDomainName"] = args ? args.targetDomainName : undefined;
inputs["trustDirection"] = args ? args.trustDirection : undefined;
inputs["trustHandshakeSecret"] = args ? args.trustHandshakeSecret : undefined;
inputs["trustType"] = args ? args.trustType : undefined;
}
if (!opts.version) {
opts = pulumi.mergeOptions(opts, { version: utilities.getVersion()});
}
super(DomainTrust.__pulumiType, name, inputs, opts);
}
}
/**
* Input properties used for looking up and filtering DomainTrust resources.
*/
export interface DomainTrustState {
/**
* The fully qualified domain name. e.g. mydomain.myorganization.com, with the restrictions,
* https://cloud.google.com/managed-microsoft-ad/reference/rest/v1/projects.locations.global.domains.
*/
domain?: pulumi.Input<string>;
/**
* The ID of the project in which the resource belongs.
* If it is not provided, the provider project is used.
*/
project?: pulumi.Input<string>;
/**
* Whether the trusted side has forest/domain wide access or selective access to an approved set of resources.
*/
selectiveAuthentication?: pulumi.Input<boolean>;
/**
* The target DNS server IP addresses which can resolve the remote domain involved in the trust.
*/
targetDnsIpAddresses?: pulumi.Input<pulumi.Input<string>[]>;
/**
* The fully qualified target domain name which will be in trust with the current domain.
*/
targetDomainName?: pulumi.Input<string>;
/**
* The trust direction, which decides if the current domain is trusted, trusting, or both.
* Possible values are `INBOUND`, `OUTBOUND`, and `BIDIRECTIONAL`.
*/
trustDirection?: pulumi.Input<string>;
/**
* The trust secret used for the handshake with the target domain. This will not be stored.
* **Note**: This property is sensitive and will not be displayed in the plan.
*/
trustHandshakeSecret?: pulumi.Input<string>;
/**
* The type of trust represented by the trust resource.
* Possible values are `FOREST` and `EXTERNAL`.
*/
trustType?: pulumi.Input<string>;
}
/**
* The set of arguments for constructing a DomainTrust resource.
*/
export interface DomainTrustArgs {
/**
* The fully qualified domain name. e.g. mydomain.myorganization.com, with the restrictions,
* https://cloud.google.com/managed-microsoft-ad/reference/rest/v1/projects.locations.global.domains.
*/
domain: pulumi.Input<string>;
/**
* The ID of the project in which the resource belongs.
* If it is not provided, the provider project is used.
*/
project?: pulumi.Input<string>;
/**
* Whether the trusted side has forest/domain wide access or selective access to an approved set of resources.
*/
selectiveAuthentication?: pulumi.Input<boolean>;
/**
* The target DNS server IP addresses which can resolve the remote domain involved in the trust.
*/
targetDnsIpAddresses: pulumi.Input<pulumi.Input<string>[]>;
/**
* The fully qualified target domain name which will be in trust with the current domain.
*/
targetDomainName: pulumi.Input<string>;
/**
* The trust direction, which decides if the current domain is trusted, trusting, or both.
* Possible values are `INBOUND`, `OUTBOUND`, and `BIDIRECTIONAL`.
*/
trustDirection: pulumi.Input<string>;
/**
* The trust secret used for the handshake with the target domain. This will not be stored.
* **Note**: This property is sensitive and will not be displayed in the plan.
*/
trustHandshakeSecret: pulumi.Input<string>;
/**
* The type of trust represented by the trust resource.
* Possible values are `FOREST` and `EXTERNAL`.
*/
trustType: pulumi.Input<string>;
} | the_stack |
import { Emitter, Event } from 'vs/base/common/event';
import { CharCode } from 'vs/base/common/charCode';
import { IDisposable } from 'vs/base/common/lifecycle';
import { IPosition, Position } from 'vs/editor/common/core/position';
import { IRange, Range } from 'vs/editor/common/core/range';
import { getWordAtText, IWordAtPosition } from 'vs/editor/common/core/wordHelper';
import { StandardTokenType } from 'vs/editor/common/languages';
import { ILanguageService } from 'vs/editor/common/languages/language';
import { ILanguageConfigurationService, ResolvedLanguageConfiguration } from 'vs/editor/common/languages/languageConfigurationRegistry';
import { TextModel } from 'vs/editor/common/model/textModel';
import { TextModelPart } from 'vs/editor/common/model/textModelPart';
import { TextModelTokenization } from 'vs/editor/common/model/textModelTokens';
import { IModelContentChangedEvent, IModelLanguageChangedEvent, IModelLanguageConfigurationChangedEvent, IModelTokensChangedEvent } from 'vs/editor/common/textModelEvents';
import { ContiguousMultilineTokens } from 'vs/editor/common/tokens/contiguousMultilineTokens';
import { ContiguousTokensStore } from 'vs/editor/common/tokens/contiguousTokensStore';
import { LineTokens } from 'vs/editor/common/tokens/lineTokens';
import { SparseMultilineTokens } from 'vs/editor/common/tokens/sparseMultilineTokens';
import { SparseTokensStore } from 'vs/editor/common/tokens/sparseTokensStore';
import { BracketPairsTextModelPart } from 'vs/editor/common/model/bracketPairsTextModelPart/bracketPairsImpl';
import { BackgroundTokenizationState, ITokenizationTextModelPart } from 'vs/editor/common/tokenizationTextModelPart';
export class TokenizationTextModelPart extends TextModelPart implements ITokenizationTextModelPart {
private readonly _onDidChangeLanguage: Emitter<IModelLanguageChangedEvent> = this._register(new Emitter<IModelLanguageChangedEvent>());
public readonly onDidChangeLanguage: Event<IModelLanguageChangedEvent> = this._onDidChangeLanguage.event;
private readonly _onDidChangeLanguageConfiguration: Emitter<IModelLanguageConfigurationChangedEvent> = this._register(new Emitter<IModelLanguageConfigurationChangedEvent>());
public readonly onDidChangeLanguageConfiguration: Event<IModelLanguageConfigurationChangedEvent> = this._onDidChangeLanguageConfiguration.event;
private readonly _onDidChangeTokens: Emitter<IModelTokensChangedEvent> = this._register(new Emitter<IModelTokensChangedEvent>());
public readonly onDidChangeTokens: Event<IModelTokensChangedEvent> = this._onDidChangeTokens.event;
private readonly _languageRegistryListener: IDisposable;
private readonly _tokens: ContiguousTokensStore;
private readonly _semanticTokens: SparseTokensStore;
private readonly _tokenization: TextModelTokenization;
constructor(
private readonly _languageService: ILanguageService,
private readonly _languageConfigurationService: ILanguageConfigurationService,
private readonly _textModel: TextModel,
private readonly bracketPairsTextModelPart: BracketPairsTextModelPart,
private _languageId: string,
) {
super();
this._tokens = new ContiguousTokensStore(
this._languageService.languageIdCodec
);
this._semanticTokens = new SparseTokensStore(
this._languageService.languageIdCodec
);
this._tokenization = new TextModelTokenization(
_textModel,
this,
this._languageService.languageIdCodec
);
this._languageRegistryListener = this._languageConfigurationService.onDidChange(
e => {
if (e.affects(this._languageId)) {
this._onDidChangeLanguageConfiguration.fire({});
}
}
);
}
_hasListeners(): boolean {
return (
this._onDidChangeLanguage.hasListeners()
|| this._onDidChangeLanguageConfiguration.hasListeners()
|| this._onDidChangeTokens.hasListeners()
|| this._onBackgroundTokenizationStateChanged.hasListeners()
);
}
public acceptEdit(
range: IRange,
text: string,
eolCount: number,
firstLineLength: number,
lastLineLength: number
): void {
this._tokens.acceptEdit(range, eolCount, firstLineLength);
this._semanticTokens.acceptEdit(
range,
eolCount,
firstLineLength,
lastLineLength,
text.length > 0 ? text.charCodeAt(0) : CharCode.Null
);
}
public handleDidChangeAttached(): void {
this._tokenization.handleDidChangeAttached();
}
public flush(): void {
this._tokens.flush();
this._semanticTokens.flush();
}
public handleDidChangeContent(change: IModelContentChangedEvent): void {
this._tokenization.handleDidChangeContent(change);
}
public override dispose(): void {
this._languageRegistryListener.dispose();
this._tokenization.dispose();
super.dispose();
}
private _backgroundTokenizationState = BackgroundTokenizationState.Uninitialized;
public get backgroundTokenizationState(): BackgroundTokenizationState {
return this._backgroundTokenizationState;
}
private handleTokenizationProgress(completed: boolean) {
if (this._backgroundTokenizationState === BackgroundTokenizationState.Completed) {
// We already did a full tokenization and don't go back to progressing.
return;
}
const newState = completed ? BackgroundTokenizationState.Completed : BackgroundTokenizationState.InProgress;
if (this._backgroundTokenizationState !== newState) {
this._backgroundTokenizationState = newState;
this.bracketPairsTextModelPart.handleDidChangeBackgroundTokenizationState();
this._onBackgroundTokenizationStateChanged.fire();
}
}
private readonly _onBackgroundTokenizationStateChanged = this._register(new Emitter<void>());
public readonly onBackgroundTokenizationStateChanged: Event<void> = this._onBackgroundTokenizationStateChanged.event;
public setLineTokens(
lineNumber: number,
tokens: Uint32Array | ArrayBuffer | null
): void {
if (lineNumber < 1 || lineNumber > this._textModel.getLineCount()) {
throw new Error('Illegal value for lineNumber');
}
this._tokens.setTokens(
this._languageId,
lineNumber - 1,
this._textModel.getLineLength(lineNumber),
tokens,
false
);
}
public setTokens(
tokens: ContiguousMultilineTokens[],
backgroundTokenizationCompleted: boolean = false
): void {
if (tokens.length !== 0) {
const ranges: { fromLineNumber: number; toLineNumber: number }[] = [];
for (let i = 0, len = tokens.length; i < len; i++) {
const element = tokens[i];
let minChangedLineNumber = 0;
let maxChangedLineNumber = 0;
let hasChange = false;
for (
let lineNumber = element.startLineNumber;
lineNumber <= element.endLineNumber;
lineNumber++
) {
if (hasChange) {
this._tokens.setTokens(
this._languageId,
lineNumber - 1,
this._textModel.getLineLength(lineNumber),
element.getLineTokens(lineNumber),
false
);
maxChangedLineNumber = lineNumber;
} else {
const lineHasChange = this._tokens.setTokens(
this._languageId,
lineNumber - 1,
this._textModel.getLineLength(lineNumber),
element.getLineTokens(lineNumber),
true
);
if (lineHasChange) {
hasChange = true;
minChangedLineNumber = lineNumber;
maxChangedLineNumber = lineNumber;
}
}
}
if (hasChange) {
ranges.push({
fromLineNumber: minChangedLineNumber,
toLineNumber: maxChangedLineNumber,
});
}
}
if (ranges.length > 0) {
this._emitModelTokensChangedEvent({
tokenizationSupportChanged: false,
semanticTokensApplied: false,
ranges: ranges,
});
}
}
this.handleTokenizationProgress(backgroundTokenizationCompleted);
}
public setSemanticTokens(
tokens: SparseMultilineTokens[] | null,
isComplete: boolean
): void {
this._semanticTokens.set(tokens, isComplete);
this._emitModelTokensChangedEvent({
tokenizationSupportChanged: false,
semanticTokensApplied: tokens !== null,
ranges: [{ fromLineNumber: 1, toLineNumber: this._textModel.getLineCount() }],
});
}
public hasCompleteSemanticTokens(): boolean {
return this._semanticTokens.isComplete();
}
public hasSomeSemanticTokens(): boolean {
return !this._semanticTokens.isEmpty();
}
public setPartialSemanticTokens(
range: Range,
tokens: SparseMultilineTokens[]
): void {
if (this.hasCompleteSemanticTokens()) {
return;
}
const changedRange = this._textModel.validateRange(
this._semanticTokens.setPartial(range, tokens)
);
this._emitModelTokensChangedEvent({
tokenizationSupportChanged: false,
semanticTokensApplied: true,
ranges: [
{
fromLineNumber: changedRange.startLineNumber,
toLineNumber: changedRange.endLineNumber,
},
],
});
}
public tokenizeViewport(
startLineNumber: number,
endLineNumber: number
): void {
startLineNumber = Math.max(1, startLineNumber);
endLineNumber = Math.min(this._textModel.getLineCount(), endLineNumber);
this._tokenization.tokenizeViewport(startLineNumber, endLineNumber);
}
public clearTokens(): void {
this._tokens.flush();
this._emitModelTokensChangedEvent({
tokenizationSupportChanged: true,
semanticTokensApplied: false,
ranges: [
{
fromLineNumber: 1,
toLineNumber: this._textModel.getLineCount(),
},
],
});
}
public clearSemanticTokens(): void {
this._semanticTokens.flush();
this._emitModelTokensChangedEvent({
tokenizationSupportChanged: false,
semanticTokensApplied: false,
ranges: [{ fromLineNumber: 1, toLineNumber: this._textModel.getLineCount() }],
});
}
private _emitModelTokensChangedEvent(e: IModelTokensChangedEvent): void {
if (!this._textModel._isDisposing()) {
this.bracketPairsTextModelPart.handleDidChangeTokens(e);
this._onDidChangeTokens.fire(e);
}
}
public resetTokenization(): void {
this._tokenization.reset();
}
public forceTokenization(lineNumber: number): void {
if (lineNumber < 1 || lineNumber > this._textModel.getLineCount()) {
throw new Error('Illegal value for lineNumber');
}
this._tokenization.forceTokenization(lineNumber);
}
public isCheapToTokenize(lineNumber: number): boolean {
return this._tokenization.isCheapToTokenize(lineNumber);
}
public tokenizeIfCheap(lineNumber: number): void {
if (this.isCheapToTokenize(lineNumber)) {
this.forceTokenization(lineNumber);
}
}
public getLineTokens(lineNumber: number): LineTokens {
if (lineNumber < 1 || lineNumber > this._textModel.getLineCount()) {
throw new Error('Illegal value for lineNumber');
}
return this._getLineTokens(lineNumber);
}
private _getLineTokens(lineNumber: number): LineTokens {
const lineText = this._textModel.getLineContent(lineNumber);
const syntacticTokens = this._tokens.getTokens(
this._languageId,
lineNumber - 1,
lineText
);
return this._semanticTokens.addSparseTokens(lineNumber, syntacticTokens);
}
public getTokenTypeIfInsertingCharacter(
lineNumber: number,
column: number,
character: string
): StandardTokenType {
const position = this._textModel.validatePosition(new Position(lineNumber, column));
return this._tokenization.getTokenTypeIfInsertingCharacter(
position,
character
);
}
public tokenizeLineWithEdit(
position: IPosition,
length: number,
newText: string
): LineTokens | null {
const validatedPosition = this._textModel.validatePosition(position);
return this._tokenization.tokenizeLineWithEdit(
validatedPosition,
length,
newText
);
}
private getLanguageConfiguration(
languageId: string
): ResolvedLanguageConfiguration {
return this._languageConfigurationService.getLanguageConfiguration(
languageId
);
}
// Having tokens allows implementing additional helper methods
public getWordAtPosition(_position: IPosition): IWordAtPosition | null {
this.assertNotDisposed();
const position = this._textModel.validatePosition(_position);
const lineContent = this._textModel.getLineContent(position.lineNumber);
const lineTokens = this._getLineTokens(position.lineNumber);
const tokenIndex = lineTokens.findTokenIndexAtOffset(position.column - 1);
// (1). First try checking right biased word
const [rbStartOffset, rbEndOffset] = TokenizationTextModelPart._findLanguageBoundaries(
lineTokens,
tokenIndex
);
const rightBiasedWord = getWordAtText(
position.column,
this.getLanguageConfiguration(
lineTokens.getLanguageId(tokenIndex)
).getWordDefinition(),
lineContent.substring(rbStartOffset, rbEndOffset),
rbStartOffset
);
// Make sure the result touches the original passed in position
if (
rightBiasedWord &&
rightBiasedWord.startColumn <= _position.column &&
_position.column <= rightBiasedWord.endColumn
) {
return rightBiasedWord;
}
// (2). Else, if we were at a language boundary, check the left biased word
if (tokenIndex > 0 && rbStartOffset === position.column - 1) {
// edge case, where `position` sits between two tokens belonging to two different languages
const [lbStartOffset, lbEndOffset] = TokenizationTextModelPart._findLanguageBoundaries(
lineTokens,
tokenIndex - 1
);
const leftBiasedWord = getWordAtText(
position.column,
this.getLanguageConfiguration(
lineTokens.getLanguageId(tokenIndex - 1)
).getWordDefinition(),
lineContent.substring(lbStartOffset, lbEndOffset),
lbStartOffset
);
// Make sure the result touches the original passed in position
if (
leftBiasedWord &&
leftBiasedWord.startColumn <= _position.column &&
_position.column <= leftBiasedWord.endColumn
) {
return leftBiasedWord;
}
}
return null;
}
private static _findLanguageBoundaries(
lineTokens: LineTokens,
tokenIndex: number
): [number, number] {
const languageId = lineTokens.getLanguageId(tokenIndex);
// go left until a different language is hit
let startOffset = 0;
for (
let i = tokenIndex;
i >= 0 && lineTokens.getLanguageId(i) === languageId;
i--
) {
startOffset = lineTokens.getStartOffset(i);
}
// go right until a different language is hit
let endOffset = lineTokens.getLineContent().length;
for (
let i = tokenIndex, tokenCount = lineTokens.getCount();
i < tokenCount && lineTokens.getLanguageId(i) === languageId;
i++
) {
endOffset = lineTokens.getEndOffset(i);
}
return [startOffset, endOffset];
}
public getWordUntilPosition(position: IPosition): IWordAtPosition {
const wordAtPosition = this.getWordAtPosition(position);
if (!wordAtPosition) {
return {
word: '',
startColumn: position.column,
endColumn: position.column,
};
}
return {
word: wordAtPosition.word.substr(
0,
position.column - wordAtPosition.startColumn
),
startColumn: wordAtPosition.startColumn,
endColumn: position.column,
};
}
public getLanguageId(): string {
return this._languageId;
}
public getLanguageIdAtPosition(lineNumber: number, column: number): string {
const position = this._textModel.validatePosition(new Position(lineNumber, column));
const lineTokens = this.getLineTokens(position.lineNumber);
return lineTokens.getLanguageId(lineTokens.findTokenIndexAtOffset(position.column - 1));
}
public setLanguageId(languageId: string): void {
if (this._languageId === languageId) {
// There's nothing to do
return;
}
const e: IModelLanguageChangedEvent = {
oldLanguage: this._languageId,
newLanguage: languageId
};
this._languageId = languageId;
this.bracketPairsTextModelPart.handleDidChangeLanguage(e);
this._tokenization.handleDidChangeLanguage(e);
this._onDidChangeLanguage.fire(e);
this._onDidChangeLanguageConfiguration.fire({});
}
} | the_stack |
* @fileoverview Externs creates Closure Compiler #externs definitions from the
* ambient declarations in a TypeScript file.
*
* (Note that we cannot write the "@" form of the externs tag, even in comments,
* because the compiler greps for it in source files(!). So we write #externs
* instead.)
*
* For example, a
* declare interface Foo { bar: string; }
*
* Would generate a
* /.. #externs ./
* /.. @record ./
* var Foo = function() {};
* /.. @type {string} ./
* Foo.prototype.bar;
*
* The generated externs indicate to Closure Compiler that symbols are external
* to the optimization process, i.e. they are provided by outside code. That
* most importantly means they must not be renamed or removed.
*
* A major difficulty here is that TypeScript supports module-scoped external
* symbols; `.d.ts` files can contain `export`s and `import` other files.
* Closure Compiler does not have such a concept, so tsickle must emulate the
* behaviour. It does so by following this scheme:
*
* 1. non-module .d.ts produces global symbols
* 2. module .d.ts produce symbols namespaced to the module, by creating a
* mangled name matching the current file's path. tsickle expects outside
* code (e.g. build system integration or manually written code) to contain a
* goog.module/provide that references the mangled path.
* 3. declarations in `.ts` files produce types that can be separately emitted
* in e.g. an `externs.js`, using `getGeneratedExterns` below.
* 1. non-exported symbols produce global types, because that's what users
* expect and it matches TypeScripts emit, which just references `Foo` for
* a locally declared symbol `Foo` in a module. Arguably these should be
* wrapped in `declare global { ... }`.
* 2. exported symbols are scoped to the `.ts` file by prefixing them with a
* mangled name. Exported types are re-exported from the JavaScript
* `goog.module`, allowing downstream code to reference them. This has the
* same problem regarding ambient values as above, it is unclear where the
* value symbol would be defined, so for the time being this is
* unsupported.
*
* The effect of this is that:
* - symbols in a module (i.e. not globals) are generally scoped to the local
* module using a mangled name, preventing symbol collisions on the Closure
* side.
* - importing code can unconditionally refer to and import any symbol defined
* in a module `X` as `path.to.module.X`, regardless of whether the defining
* location is a `.d.ts` file or a `.ts` file, and regardless whether the
* symbol is ambient (assuming there's an appropriate shim).
* - if there is a shim present, tsickle avoids emitting the Closure namespace
* itself, expecting the shim to provide the namespace and initialize it to a
* symbol that provides the right value at runtime (i.e. the implementation of
* whatever third party library the .d.ts describes).
*/
import * as ts from 'typescript';
import {AnnotatorHost, moduleNameAsIdentifier} from './annotator_host';
import {getEnumType} from './enum_transformer';
import {namespaceForImportUrl, resolveModuleName} from './googmodule';
import * as jsdoc from './jsdoc';
import {escapeForComment, maybeAddHeritageClauses, maybeAddTemplateClause} from './jsdoc_transformer';
import {ModuleTypeTranslator} from './module_type_translator';
import * as path from './path';
import {getEntityNameText, getIdentifierText, hasModifierFlag, isAmbient, isDtsFileName, reportDiagnostic} from './transformer_util';
import {isValidClosurePropertyName} from './type_translator';
/**
* Symbols that are already declared as externs in Closure, that should
* be avoided by tsickle's "declare ..." => externs.js conversion.
*/
const PREDECLARED_CLOSURE_EXTERNS_LIST: ReadonlyArray<string> = [
'exports',
'global',
'module',
// ErrorConstructor is the interface of the Error object itself.
// tsickle detects that this is part of the TypeScript standard library
// and assumes it's part of the Closure standard library, but this
// assumption is wrong for ErrorConstructor. To properly handle this
// we'd somehow need to map methods defined on the ErrorConstructor
// interface into properties on Closure's Error object, but for now it's
// simpler to just treat it as already declared.
'ErrorConstructor',
'Symbol',
'WorkerGlobalScope',
];
/**
* The header to be used in generated externs. This is not included in the
* output of generateExterns() because generateExterns() works one file at a
* time, and typically you create one externs file from the entire compilation
* unit.
*
* Suppressions:
* - checkTypes: Closure's type system does not match TS'.
* - const: for clashes of const variable assignments. This is needed to not
* conflict with the hand-written closure externs.
* - duplicate: because externs might duplicate re-opened definitions from other
* JS files.
* - missingOverride: There's no benefit to having closure-compiler warn us that
* we're overriding methods. Producing such warnings, if any, should be
* the job of the TS type system.
*/
const EXTERNS_HEADER = `/**
* @${''}externs
* @suppress {checkTypes,const,duplicate,missingOverride}
*/
// NOTE: generated by tsickle, do not edit.
`;
/**
* Concatenate all generated externs definitions together into a string, including a file comment
* header.
*
* @param rootDir Project root. Emitted comments will reference paths relative to this root.
* This param is effectively required, but made optional here until Angular is fixed.
*/
export function getGeneratedExterns(
externs: {[fileName: string]: string}, rootDir?: string): string {
let allExterns = EXTERNS_HEADER;
for (const fileName of Object.keys(externs)) {
const srcPath = rootDir ? path.relative(rootDir, fileName) :
'ERROR: getGeneratedExterns called without rootDir';
allExterns += `// ${jsdoc.createGeneratedFromComment(srcPath)}\n`;
allExterns += externs[fileName];
}
return allExterns;
}
/**
* isInGlobalAugmentation returns true if declaration is the immediate child of a 'declare global'
* block.
*/
function isInGlobalAugmentation(declaration: ts.Declaration): boolean {
// declare global { ... } creates a ModuleDeclaration containing a ModuleBlock containing the
// declaration, with the ModuleDeclaration having the GlobalAugmentation flag set.
if (!declaration.parent || !declaration.parent.parent) return false;
return (declaration.parent.parent.flags & ts.NodeFlags.GlobalAugmentation) !== 0;
}
/**
* generateExterns generates extern definitions for all ambient declarations in the given source
* file. It returns a string representation of the Closure JavaScript, not including the initial
* comment with \@fileoverview and #externs (see above for that).
*/
export function generateExterns(
typeChecker: ts.TypeChecker, sourceFile: ts.SourceFile, host: AnnotatorHost,
moduleResolutionHost: ts.ModuleResolutionHost,
options: ts.CompilerOptions): {output: string, diagnostics: ts.Diagnostic[]} {
let output = '';
const diagnostics: ts.Diagnostic[] = [];
const isDts = isDtsFileName(sourceFile.fileName);
const isExternalModule = ts.isExternalModule(sourceFile);
const mtt =
new ModuleTypeTranslator(sourceFile, typeChecker, host, diagnostics, /*isForExterns*/ true);
// .d.ts files declare symbols. The code below translates these into a form understood by Closure
// Compiler, converting the type syntax, but also converting symbol names into a form accessible
// to Closure Compiler.
// Like regular .ts files, .d.ts can be either scripts or modules. Scripts declare symbols in the
// global namespace, which has the same semantics in Closure and TypeScript, so the code below
// emits those with the same name.
// Modules however declare symbols scoped to the module that can be exported. Closure has no
// concept of externs that are non-global, so tsickle needs to mangle the symbol names, both at
// their declaration and at their use site.
// This mangling happens by wrapping all declared symbols in a namespace based on the file name.
// This namespace is then essentially the exports object for the ambient module (externs in
// Closure terms). This namespace is called `moduleNamespace` below:
let moduleNamespace = '';
if (isExternalModule) {
moduleNamespace = moduleNameAsIdentifier(host, sourceFile.fileName);
}
// Symbols are generated starting in rootNamespace. For script .d.ts with global symbols, this is
// the empty string. For most module `.d.ts` files, this is the mangled namespace object. The
// remaining special case are `.d.ts` files containing an `export = something;` statement. In
// these, the effective exports object, i.e. the object containing the symbols that importing code
// receives, is different from the main module scope.
// tsickle handles the `export =` case by generating symbols in a different namespace (escaped
// with a `_`) below, and then assigning whatever is actually exported into the `moduleNamespace`
// below.
let rootNamespace = moduleNamespace;
// There can only be one export =, and if there is one, there cannot be any other exports.
const exportAssignment = sourceFile.statements.find(ts.isExportAssignment);
const hasExportEquals = exportAssignment && exportAssignment.isExportEquals;
if (hasExportEquals) {
// If so, move all generated symbols into a different sub-namespace, so that later on we can
// control what exactly goes on the actual exported namespace.
rootNamespace = rootNamespace + '_';
}
for (const stmt of sourceFile.statements) {
if (!isDts && !hasModifierFlag(stmt as ts.DeclarationStatement, ts.ModifierFlags.Ambient)) {
continue;
}
visitor(stmt, []);
}
/**
* Convert a qualified name from a .d.ts file or declaration context into a mangled identifier.
* E.g. for a qualified name in `export = someName;` or `import = someName;`.
* If `someName` is `declare global { namespace someName {...} }`, tsickle must not qualify access
* to it with the mangled module namespace as it is emitted in the global namespace. Similarly, if
* the symbol is declared in a non-module context, it must not be mangled.
*/
function qualifiedNameToMangledIdentifier(name: ts.Identifier|ts.QualifiedName) {
const entityName = getEntityNameText(name);
let symbol = typeChecker.getSymbolAtLocation(name);
if (symbol) {
// If this is an aliased name (e.g. from an import), use the alias to refer to it.
if (symbol.flags & ts.SymbolFlags.Alias) {
symbol = typeChecker.getAliasedSymbol(symbol);
}
const alias = mtt.symbolsToAliasedNames.get(symbol);
if (alias) return alias;
const isGlobalSymbol = symbol && symbol.declarations && symbol.declarations.some(d => {
if (isInGlobalAugmentation(d)) return true;
// If the declaration's source file is not a module, it must be global.
// If it is a module, the identifier must be local to this file, or handled above via the
// alias.
return !ts.isExternalModule(d.getSourceFile());
});
if (isGlobalSymbol) return entityName;
}
return rootNamespace + '.' + entityName;
}
if (output && isExternalModule) {
// If tsickle generated any externs and this is an external module, prepend the namespace
// declaration for it.
output = `/** @const */\nvar ${rootNamespace} = {};\n` + output;
let exportedNamespace = rootNamespace;
if (exportAssignment && hasExportEquals) {
if (ts.isIdentifier(exportAssignment.expression) ||
ts.isQualifiedName(exportAssignment.expression)) {
// E.g. export = someName;
// If someName is "declare global { namespace someName {...} }", tsickle must not qualify
// access to it with module namespace as it is emitted in the global namespace.
exportedNamespace = qualifiedNameToMangledIdentifier(exportAssignment.expression);
} else {
reportDiagnostic(
diagnostics, exportAssignment.expression,
`export = expression must be a qualified name, got ${
ts.SyntaxKind[exportAssignment.expression.kind]}.`);
}
// Assign the actually exported namespace object (which lives somewhere under rootNamespace)
// into the module's namespace.
emit(`/**\n * export = ${exportAssignment.expression.getText()}\n * @const\n */\n`);
emit(`var ${moduleNamespace} = ${exportedNamespace};\n`);
}
if (isDts && host.provideExternalModuleDtsNamespace) {
// In a non-shimmed module, create a global namespace. This exists purely for backwards
// compatiblity, in the medium term all code using tsickle should always use `goog.module`s,
// so global names should not be neccessary.
for (const nsExport of sourceFile.statements.filter(ts.isNamespaceExportDeclaration)) {
const namespaceName = getIdentifierText(nsExport.name);
emit(`// export as namespace ${namespaceName}\n`);
writeVariableStatement(namespaceName, [], exportedNamespace);
}
}
}
return {output, diagnostics};
function emit(str: string) {
output += str;
}
/**
* isFirstDeclaration returns true if decl is the first declaration
* of its symbol. E.g. imagine
* interface Foo { x: number; }
* interface Foo { y: number; }
* we only want to emit the "\@record" for Foo on the first one.
*
* The exception are variable declarations, which - in externs - do not assign a value:
* /.. \@type {...} ./
* var someVariable;
* /.. \@type {...} ./
* someNamespace.someVariable;
* If a later declaration wants to add additional properties on someVariable, tsickle must still
* emit an assignment into the object, as it's otherwise absent.
*/
function isFirstValueDeclaration(decl: ts.DeclarationStatement): boolean {
if (!decl.name) return true;
const sym = typeChecker.getSymbolAtLocation(decl.name)!;
if (!sym.declarations || sym.declarations.length < 2) return true;
const earlierDecls = sym.declarations.slice(0, sym.declarations.indexOf(decl));
// Either there are no earlier declarations, or all of them are variables (see above). tsickle
// emits a value for all other declaration kinds (function for functions, classes, interfaces,
// {} object for namespaces).
return earlierDecls.length === 0 || earlierDecls.every(ts.isVariableDeclaration);
}
/** Writes the actual variable statement of a Closure variable declaration. */
function writeVariableStatement(name: string, namespace: ReadonlyArray<string>, value?: string) {
const qualifiedName = namespace.concat([name]).join('.');
if (namespace.length === 0) emit(`var `);
emit(qualifiedName);
if (value) emit(` = ${value}`);
emit(';\n');
}
/**
* Writes a Closure variable declaration, i.e. the variable statement with a leading JSDoc
* comment making it a declaration.
*/
function writeVariableDeclaration(
decl: ts.VariableDeclaration, namespace: ReadonlyArray<string>) {
if (decl.name.kind === ts.SyntaxKind.Identifier) {
const name = getIdentifierText(decl.name as ts.Identifier);
if (PREDECLARED_CLOSURE_EXTERNS_LIST.indexOf(name) >= 0) return;
emit(jsdoc.toString([{tagName: 'type', type: mtt.typeToClosure(decl)}]));
emit('\n');
writeVariableStatement(name, namespace);
} else {
errorUnimplementedKind(decl.name, 'externs for variable');
}
}
/**
* Emits a JSDoc declaration that merges the signatures of the given function declaration (for
* overloads), and returns the parameter names chosen.
*/
function emitFunctionType(decls: ts.FunctionLikeDeclaration[], extraTags: jsdoc.Tag[] = []) {
const {tags, parameterNames} = mtt.getFunctionTypeJSDoc(decls, extraTags);
emit('\n');
emit(jsdoc.toString(tags));
return parameterNames;
}
function writeFunction(name: ts.Node, params: string[], namespace: ReadonlyArray<string>) {
const paramsStr = params.join(', ');
if (namespace.length > 0) {
let fqn = namespace.join('.');
if (name.kind === ts.SyntaxKind.Identifier) {
fqn += '.'; // computed names include [ ] in their getText() representation.
}
fqn += name.getText();
emit(`${fqn} = function(${paramsStr}) {};\n`);
} else {
if (name.kind !== ts.SyntaxKind.Identifier) {
reportDiagnostic(diagnostics, name, 'Non-namespaced computed name in externs');
}
emit(`function ${name.getText()}(${paramsStr}) {}\n`);
}
}
function writeEnum(decl: ts.EnumDeclaration, namespace: ReadonlyArray<string>) {
// E.g. /** @enum {number} */ var COUNTRY = {US: 1, CA: 1};
const name = getIdentifierText(decl.name);
let members = '';
const enumType = getEnumType(typeChecker, decl);
// Closure enums members must have a value of the correct type, but the actual value does not
// matter in externs.
const initializer = enumType === 'string' ? `''` : 1;
for (const member of decl.members) {
let memberName: string|undefined;
switch (member.name.kind) {
case ts.SyntaxKind.Identifier:
memberName = getIdentifierText(member.name as ts.Identifier);
break;
case ts.SyntaxKind.StringLiteral:
const text = (member.name as ts.StringLiteral).text;
if (isValidClosurePropertyName(text)) memberName = text;
break;
default:
break;
}
if (!memberName) {
members += ` /* TODO: ${ts.SyntaxKind[member.name.kind]}: ${
escapeForComment(member.name.getText())} */\n`;
continue;
}
members += ` ${memberName}: ${initializer},\n`;
}
emit(`\n/** @enum {${enumType}} */\n`);
writeVariableStatement(name, namespace, `{\n${members}}`);
}
function writeTypeAlias(decl: ts.TypeAliasDeclaration, namespace: ReadonlyArray<string>) {
const typeStr = mtt.typeToClosure(decl, undefined);
emit(`\n/** @typedef {${typeStr}} */\n`);
writeVariableStatement(getIdentifierText(decl.name), namespace);
}
function writeType(
decl: ts.InterfaceDeclaration|ts.ClassDeclaration, namespace: ReadonlyArray<string>) {
const name = decl.name;
if (!name) {
reportDiagnostic(diagnostics, decl, 'anonymous type in externs');
return;
}
const typeName = namespace.concat([name.getText()]).join('.');
if (PREDECLARED_CLOSURE_EXTERNS_LIST.indexOf(typeName) >= 0) return;
if (isFirstValueDeclaration(decl)) {
// Emit the 'function' that is actually the declaration of the interface
// itself. If it's a class, this function also must include the type
// annotations of the constructor.
let paramNames: string[] = [];
const jsdocTags: jsdoc.Tag[] = [];
let wroteJsDoc = false;
maybeAddHeritageClauses(jsdocTags, mtt, decl);
maybeAddTemplateClause(jsdocTags, decl);
if (decl.kind === ts.SyntaxKind.ClassDeclaration) {
// TODO: it appears you can just write 'class Foo { ...' in externs.
// This code instead tries to translate it to a function.
jsdocTags.push({tagName: 'constructor'}, {tagName: 'struct'});
const ctors = (decl as ts.ClassDeclaration)
.members.filter((m) => m.kind === ts.SyntaxKind.Constructor);
if (ctors.length) {
const firstCtor: ts.ConstructorDeclaration = ctors[0] as ts.ConstructorDeclaration;
if (ctors.length > 1) {
paramNames = emitFunctionType(ctors as ts.ConstructorDeclaration[], jsdocTags);
} else {
paramNames = emitFunctionType([firstCtor], jsdocTags);
}
wroteJsDoc = true;
}
} else {
// Otherwise it's an interface; tag it as structurally typed.
jsdocTags.push({tagName: 'record'}, {tagName: 'struct'});
}
if (!wroteJsDoc) emit(jsdoc.toString(jsdocTags));
writeFunction(name, paramNames, namespace);
}
// Process everything except (MethodSignature|MethodDeclaration|Constructor)
const methods = new Map<string, ts.MethodDeclaration[]>();
for (const member of decl.members) {
switch (member.kind) {
case ts.SyntaxKind.PropertySignature:
case ts.SyntaxKind.PropertyDeclaration:
const prop = member as ts.PropertySignature;
if (prop.name.kind === ts.SyntaxKind.Identifier) {
let type = mtt.typeToClosure(prop);
if (prop.questionToken && type === '?') {
// An optional 'any' type translates to '?|undefined' in Closure.
type = '?|undefined';
}
emit(jsdoc.toString([{tagName: 'type', type}]));
if (hasModifierFlag(prop, ts.ModifierFlags.Static)) {
emit(`\n${typeName}.${prop.name.getText()};\n`);
} else {
emit(`\n${typeName}.prototype.${prop.name.getText()};\n`);
}
continue;
}
// TODO: For now property names other than Identifiers are not handled; e.g.
// interface Foo { "123bar": number }
break;
case ts.SyntaxKind.MethodSignature:
case ts.SyntaxKind.MethodDeclaration:
const method = member as ts.MethodDeclaration;
const isStatic = hasModifierFlag(method, ts.ModifierFlags.Static);
const methodSignature = `${method.name.getText()}$$$${isStatic ? 'static' : 'instance'}`;
if (methods.has(methodSignature)) {
methods.get(methodSignature)!.push(method);
} else {
methods.set(methodSignature, [method]);
}
continue;
case ts.SyntaxKind.Constructor:
continue; // Handled above.
default:
// Members can include things like index signatures, for e.g.
// interface Foo { [key: string]: number; }
// For now, just skip it.
break;
}
// If we get here, the member wasn't handled in the switch statement.
let memberName = namespace;
if (member.name) {
memberName = memberName.concat([member.name.getText()]);
}
emit(`\n/* TODO: ${ts.SyntaxKind[member.kind]}: ${memberName.join('.')} */\n`);
}
// Handle method declarations/signatures separately, since we need to deal with overloads.
for (const methodVariants of Array.from(methods.values())) {
const firstMethodVariant = methodVariants[0];
let parameterNames: string[];
if (methodVariants.length > 1) {
parameterNames = emitFunctionType(methodVariants);
} else {
parameterNames = emitFunctionType([firstMethodVariant]);
}
const methodNamespace = namespace.concat([name.getText()]);
// If the method is static, don't add the prototype.
if (!hasModifierFlag(firstMethodVariant, ts.ModifierFlags.Static)) {
methodNamespace.push('prototype');
}
writeFunction(firstMethodVariant.name, parameterNames, methodNamespace);
}
}
function writeExportDeclaration(
exportDeclaration: ts.ExportDeclaration, namespace: ReadonlyArray<string>) {
if (!exportDeclaration.exportClause) {
emit(`\n// TODO(tsickle): export * declaration in ${
debugLocationStr(exportDeclaration, namespace)}\n`);
return;
}
if (ts.isNamespaceExport(exportDeclaration.exportClause)) {
// TODO(#1135): Support generating externs using this syntax.
emit(`\n// TODO(tsickle): export * as declaration in ${
debugLocationStr(exportDeclaration, namespace)}\n`);
return;
}
for (const exportSpecifier of exportDeclaration.exportClause.elements) {
// No need to do anything for properties exported under their original name.
if (!exportSpecifier.propertyName) continue;
emit('/** @const */\n');
writeVariableStatement(
exportSpecifier.name.text, namespace,
namespace.join('.') + '.' + exportSpecifier.propertyName.text);
}
}
/**
* Adds aliases for the symbols imported in the given declaration, so that their types get
* printed as the fully qualified name, and not just as a reference to the local import alias.
*
* tsickle generates .js files that (at most) contain a `goog.provide`, but are not
* `goog.module`s. These files cannot express an aliased import. However Closure Compiler allows
* referencing types using fully qualified names in such files, so tsickle can resolve the
* imported module URI and produce `path.to.module.Symbol` as an alias, and use that when
* referencing the type.
*/
function addImportAliases(decl: ts.ImportDeclaration|ts.ImportEqualsDeclaration) {
let moduleUri: ts.StringLiteral;
if (ts.isImportDeclaration(decl)) {
moduleUri = decl.moduleSpecifier as ts.StringLiteral;
} else if (ts.isExternalModuleReference(decl.moduleReference)) {
// import foo = require('./bar');
moduleUri = decl.moduleReference.expression as ts.StringLiteral;
} else {
// import foo = bar.baz.bam;
// handled at call site.
return;
}
if (ts.isImportEqualsDeclaration(decl)) {
// import foo = require('./bar');
addImportAlias(decl.name, moduleUri, undefined);
return;
}
// Side effect import, like "import 'somepath';" declares no local aliases.
if (!decl.importClause) return;
if (decl.importClause.name) {
// import name from ... -> map to .default on the module.name.
addImportAlias(decl.importClause.name, moduleUri, 'default');
}
const namedBindings = decl.importClause.namedBindings;
if (!namedBindings) return;
if (ts.isNamespaceImport(namedBindings)) {
// import * as name -> map directly to the module.name.
addImportAlias(namedBindings.name, moduleUri, undefined);
}
if (ts.isNamedImports(namedBindings)) {
// import {A as B}, map to module.name.A
for (const namedBinding of namedBindings.elements) {
addImportAlias(namedBinding.name, moduleUri, namedBinding.name);
}
}
}
/**
* Adds an import alias for the symbol defined at the given node. Creates an alias name based on
* the given moduleName and (optionally) the name.
*/
function addImportAlias(
node: ts.Node, moduleUri: ts.StringLiteral,
name: ts.Identifier|string|undefined) {
let symbol = typeChecker.getSymbolAtLocation(node);
if (!symbol) {
reportDiagnostic(diagnostics, node, `named import has no symbol`);
return;
}
if (symbol.flags & ts.SymbolFlags.Alias) {
symbol = typeChecker.getAliasedSymbol(symbol);
}
const moduleSymbol = typeChecker.getSymbolAtLocation(moduleUri);
if (!moduleSymbol) {
reportDiagnostic(diagnostics, moduleUri, `imported module has no symbol`);
return;
}
const googNamespace =
namespaceForImportUrl(moduleUri, diagnostics, moduleUri.text, moduleSymbol);
let aliasName: string;
if (googNamespace) {
aliasName = googNamespace;
} else {
// While type_translator does add the mangled prefix for ambient declarations, it only does so
// for non-aliased (i.e. not imported) symbols. That's correct for its use in regular modules,
// which will have a local symbol for the imported ambient symbol. However within an externs
// file, there are no imports, so we need to make sure the alias already contains the correct
// module name, which means the mangled module name in case of imports symbols.
// This only applies to non-Closure ('goog:') imports.
const isAmbientModuleDeclaration =
symbol.declarations && symbol.declarations.some(d => isAmbient(d));
const fullUri =
resolveModuleName(host, sourceFile.fileName, moduleUri.text);
if (isAmbientModuleDeclaration) {
aliasName = moduleNameAsIdentifier(host, fullUri);
} else {
aliasName = host.pathToModuleName(
sourceFile.fileName, resolveModuleName(host, sourceFile.fileName, fullUri));
}
if (typeof name === 'string') {
aliasName += '.' + name;
} else if (name) {
aliasName += '.' + getIdentifierText(name);
}
}
mtt.symbolsToAliasedNames.set(symbol, aliasName);
}
/**
* Produces a compiler error that references the Node's kind. This is useful for the "else"
* branch of code that is attempting to handle all possible input Node types, to ensure all cases
* covered.
*/
function errorUnimplementedKind(node: ts.Node, where: string) {
reportDiagnostic(diagnostics, node, `${ts.SyntaxKind[node.kind]} not implemented in ${where}`);
}
/**
* getNamespaceForLocalDeclaration returns the namespace that should be used for the given
* declaration, deciding whether to namespace the symbol to the file or whether to create a
* global name.
*
* The function covers these cases:
* 1) a declaration in a .d.ts
* 1a) where the .d.ts is an external module --> namespace
* 1b) where the .d.ts is not an external module --> global
* 2) a declaration in a .ts file (all are treated as modules)
* 2a) that is exported --> namespace
* 2b) that is unexported --> global
*
* For 1), all symbols in .d.ts should generally be namespaced to the file to avoid collisions.
* However .d.ts files that are not external modules do declare global names (1b).
*
* For 2), ambient declarations in .ts files must be namespaced, for the same collision reasons.
* The exception is 2b), where in TypeScript, an unexported local "declare const x: string;"
* creates a symbol that, when used locally, is emitted as just "x". That is, it behaves
* like a variable declared in a 'declare global' block. Closure Compiler would fail the build if
* there is no declaration for "x", so tsickle must generate a global external symbol, i.e.
* without the namespace wrapper.
*/
function getNamespaceForTopLevelDeclaration(
declaration: ts.Declaration, namespace: ReadonlyArray<string>): ReadonlyArray<string> {
// Only use rootNamespace for top level symbols, any other namespacing (global names, nested
// namespaces) is always kept.
if (namespace.length !== 0) return namespace;
// All names in a module (external) .d.ts file can only be accessed locally, so they always get
// namespace prefixed.
if (isDts && isExternalModule) return [rootNamespace];
// Same for exported declarations in regular .ts files.
if (hasModifierFlag(declaration, ts.ModifierFlags.Export)) return [rootNamespace];
// But local declarations in .ts files or .d.ts files (1b, 2b) are global, too.
return [];
}
/**
* Returns a string representation for the location: either the namespace, or, if empty, the
* current source file name. This is intended to be included in the emit for warnings, so that
* users can more easily find where a problematic definition is from.
*
* The code below does not use diagnostics to avoid breaking the build for harmless unhandled
* cases.
*/
function debugLocationStr(node: ts.Node, namespace: ReadonlyArray<string>) {
// Use a regex to grab the filename without a path, to make the output stable
// under bazel where sandboxes use different paths.
return namespace.join('.') || node.getSourceFile().fileName.replace(/.*[/\\]/, '');
}
function visitor(node: ts.Node, namespace: ReadonlyArray<string>) {
if (node.parent === sourceFile) {
namespace = getNamespaceForTopLevelDeclaration(node as ts.DeclarationStatement, namespace);
}
switch (node.kind) {
case ts.SyntaxKind.ModuleDeclaration:
const decl = node as ts.ModuleDeclaration;
switch (decl.name.kind) {
case ts.SyntaxKind.Identifier:
if (decl.flags & ts.NodeFlags.GlobalAugmentation) {
// E.g. "declare global { ... }". Reset to the outer namespace.
namespace = [];
} else {
// E.g. "declare namespace foo {"
const name = getIdentifierText(decl.name as ts.Identifier);
if (isFirstValueDeclaration(decl)) {
emit('/** @const */\n');
writeVariableStatement(name, namespace, '{}');
}
namespace = namespace.concat(name);
}
if (decl.body) visitor(decl.body, namespace);
break;
case ts.SyntaxKind.StringLiteral:
// E.g. "declare module 'foo' {" (note the quotes).
// We still want to emit externs for this module, but Closure doesn't provide a
// mechanism for module-scoped externs. Instead, we emit in a mangled namespace.
// The mangled namespace (after resolving files) matches the emit for an original module
// file, so effectively this augments any existing module.
const importName = (decl.name as ts.StringLiteral).text;
const importedModuleName =
resolveModuleName({moduleResolutionHost, options}, sourceFile.fileName, importName);
const mangled = moduleNameAsIdentifier(host, importedModuleName);
emit(`// Derived from: declare module "${importName}"\n`);
namespace = [mangled];
// Declare "mangled$name" if it's not declared already elsewhere.
if (isFirstValueDeclaration(decl)) {
emit('/** @const */\n');
writeVariableStatement(mangled, [], '{}');
}
// Declare the contents inside the "mangled$name".
if (decl.body) visitor(decl.body, [mangled]);
break;
default:
errorUnimplementedKind(decl.name, 'externs generation of namespace');
break;
}
break;
case ts.SyntaxKind.ModuleBlock:
const block = node as ts.ModuleBlock;
for (const stmt of block.statements) {
visitor(stmt, namespace);
}
break;
case ts.SyntaxKind.ImportEqualsDeclaration:
const importEquals = node as ts.ImportEqualsDeclaration;
const localName = getIdentifierText(importEquals.name);
if (importEquals.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference) {
addImportAliases(importEquals);
break;
}
const qn = qualifiedNameToMangledIdentifier(importEquals.moduleReference);
// @const so that Closure Compiler understands this is an alias.
emit('/** @const */\n');
writeVariableStatement(localName, namespace, qn);
break;
case ts.SyntaxKind.ClassDeclaration:
case ts.SyntaxKind.InterfaceDeclaration:
writeType(node as ts.InterfaceDeclaration | ts.ClassDeclaration, namespace);
break;
case ts.SyntaxKind.FunctionDeclaration:
const fnDecl = node as ts.FunctionDeclaration;
const name = fnDecl.name;
if (!name) {
reportDiagnostic(diagnostics, fnDecl, 'anonymous function in externs');
break;
}
// Gather up all overloads of this function.
const sym = typeChecker.getSymbolAtLocation(name)!;
const decls = sym.declarations!.filter(ts.isFunctionDeclaration);
// Only emit the first declaration of each overloaded function.
if (fnDecl !== decls[0]) break;
const params = emitFunctionType(decls);
writeFunction(name, params, namespace);
break;
case ts.SyntaxKind.VariableStatement:
for (const decl of (node as ts.VariableStatement).declarationList.declarations) {
writeVariableDeclaration(decl, namespace);
}
break;
case ts.SyntaxKind.EnumDeclaration:
writeEnum(node as ts.EnumDeclaration, namespace);
break;
case ts.SyntaxKind.TypeAliasDeclaration:
writeTypeAlias(node as ts.TypeAliasDeclaration, namespace);
break;
case ts.SyntaxKind.ImportDeclaration:
addImportAliases(node as ts.ImportDeclaration);
break;
case ts.SyntaxKind.NamespaceExportDeclaration:
case ts.SyntaxKind.ExportAssignment:
// Handled on the file level.
break;
case ts.SyntaxKind.ExportDeclaration:
const exportDeclaration = node as ts.ExportDeclaration;
writeExportDeclaration(exportDeclaration, namespace);
break;
default:
emit(`\n// TODO(tsickle): ${ts.SyntaxKind[node.kind]} in ${
debugLocationStr(node, namespace)}\n`);
break;
}
}
} | the_stack |
import * as path from 'path'
import readdirp from 'readdirp'
import {
stat, exists, readFile, replaceContents, mkdirp, rm, isEmptyDir,
rename, existsSync, readFileSync, statSync, notFoundAsUndefined,
} from '@salto-io/file'
import { localDirectoryStore } from '../../../src/local-workspace/dir_store'
jest.mock('@salto-io/file', () => ({
...jest.requireActual<{}>('@salto-io/file'),
readdirp: jest.fn(),
stat: jest.fn(),
statSync: jest.fn(),
exists: jest.fn(),
existsSync: jest.fn(),
readFile: jest.fn(),
readFileSync: jest.fn(),
promise: jest.fn(),
replaceContents: jest.fn(),
mkdirp: jest.fn(),
rm: jest.fn(),
rename: jest.fn(),
isEmptyDir: jest.fn(),
}))
isEmptyDir.notFoundAsUndefined = notFoundAsUndefined(isEmptyDir)
rename.notFoundAsUndefined = notFoundAsUndefined(rename)
jest.mock('readdirp')
describe('localDirectoryStore', () => {
const encoding = 'utf8'
beforeEach(() => {
jest.clearAllMocks()
})
const mockStat = stat as unknown as jest.Mock
const mockStatSync = statSync as unknown as jest.Mock
const mockFileExists = exists as jest.Mock
const mockFileSyncExists = existsSync as jest.Mock
const mockReadFile = readFile as unknown as jest.Mock
const mockReadFileSync = readFileSync as unknown as jest.Mock
const mockReaddirp = readdirp.promise as jest.Mock
const mockReplaceContents = replaceContents as jest.Mock
const mockMkdir = mkdirp as jest.Mock
const mockRm = rm as jest.Mock
const mockRename = rename as unknown as jest.Mock
const mockEmptyDir = isEmptyDir as unknown as jest.Mock
describe('list', () => {
it('returns empty list if dir not exists', async () => {
mockFileExists.mockResolvedValue(false)
const result = await localDirectoryStore({ baseDir: '', name: '', encoding }).list()
expect(result).toEqual([])
})
it('skip hidden directories', async () => {
const fileFilter = '*.nacl'
const baseDir = 'hidden'
mockFileExists.mockResolvedValue(true)
mockReaddirp.mockResolvedValue([{ fullPath: 'test1' }, { fullPath: 'test2' }])
const result = await localDirectoryStore({ baseDir, name: '', encoding, fileFilter }).list()
expect(result).toEqual(['test1', 'test2'])
expect(mockReaddirp.mock.calls[0][0]).toEqual(baseDir)
expect(mockReaddirp.mock.calls[0][1].fileFilter).toEqual(fileFilter)
expect(mockReaddirp.mock.calls[0][1].directoryFilter({ basename: '.hidden' })).toBeFalsy()
})
it('list only under accessible path', async () => {
const baseDir = '/baseDir'
mockFileExists.mockResolvedValue(true)
mockReaddirp.mockResolvedValue([{ fullPath: '/baseDir/access/test1' }, { fullPath: '/baseDir/access/test2' }])
const result = await localDirectoryStore({ baseDir, name: '', accessiblePath: 'access', encoding }).list()
expect(result).toEqual(['access/test1', 'access/test2'])
expect(mockReaddirp).toHaveBeenCalledTimes(1)
expect(mockReaddirp).toHaveBeenCalledWith(path.join(baseDir, 'access'), expect.any(Object))
})
})
describe('isEmpty', () => {
it('returns true if dir does not exist', async () => {
mockFileExists.mockResolvedValue(false)
const result = await localDirectoryStore({ baseDir: 'not-found', name: '', encoding }).isEmpty()
expect(result).toEqual(true)
})
it('return true if dir is empty', async () => {
const fileFilter = '*.nacl'
const baseDir = 'base'
mockFileExists.mockResolvedValue(true)
mockReaddirp.mockResolvedValue([])
const result = await localDirectoryStore({ baseDir, name: '', encoding, fileFilter }).isEmpty()
expect(result).toEqual(true)
})
it('return false if dir has files', async () => {
const fileFilter = '*.nacl'
const baseDir = 'base'
mockFileExists.mockResolvedValue(true)
mockReaddirp.mockResolvedValue([{ fullPath: 'test1' }, { fullPath: 'test2' }])
const result = await localDirectoryStore({ baseDir, name: '', encoding, fileFilter }).isEmpty()
expect(result).toEqual(false)
})
})
describe('get', () => {
it('does not return the file if it does not exist', async () => {
const baseDir = 'not-exists'
const naclFileName = 'blabla/notexist.nacl'
mockFileExists.mockResolvedValue(false)
const naclFile = await localDirectoryStore({ baseDir, name: '', encoding }).get(naclFileName)
expect(naclFile).toBeUndefined()
expect(mockFileExists.mock.calls[0][0]).toMatch(path.join(baseDir, naclFileName))
expect(mockReadFile).not.toHaveBeenCalled()
})
it('returns the file if it exist for string dir store', async () => {
const baseDir = 'exists'
const naclFileName = 'blabla/exist.nacl'
const content = 'content'
mockFileExists.mockResolvedValue(true)
mockReadFile.mockResolvedValue(content)
mockStat.mockResolvedValue({ mtimeMs: 7 })
const naclFile = await localDirectoryStore({ baseDir, name: '', encoding }).get(naclFileName)
expect(naclFile?.buffer).toBe(content)
expect(mockFileExists.mock.calls[0][0]).toMatch(path.join(baseDir, naclFileName))
expect(mockReadFile.mock.calls[0][0]).toMatch(path.join(baseDir, naclFileName))
expect(mockReadFile.mock.calls[0][1]).toEqual({ encoding })
})
it('returns the file if it exist for Buffer dir store', async () => {
const baseDir = '/base'
const bufferStore = localDirectoryStore({ baseDir, name: '' })
const bufferFileName = 'someBufferFile.ext'
const content = Buffer.from('content')
mockFileExists.mockReturnValue(true)
mockReadFile.mockReturnValueOnce(content)
mockStat.mockReturnValue({ mtimeMs: 7 })
const bufferFile = await bufferStore.get(bufferFileName)
expect(bufferFile?.buffer).toBe(content)
expect(mockFileExists.mock.calls[0][0]).toMatch(path.join(baseDir, bufferFileName))
expect(mockReadFile.mock.calls[0][0]).toMatch(path.join(baseDir, bufferFileName))
expect(mockReadFile.mock.calls[0][1]).toEqual({ encoding: undefined })
})
})
describe('sync get', () => {
it('does not return the file if it does not exist', () => {
const baseDir = 'not-exists'
const naclFileName = 'blabla/notexist.nacl'
mockFileSyncExists.mockReturnValue(false)
const naclFile = localDirectoryStore({ baseDir, name: '', encoding }).getSync(naclFileName)
expect(naclFile).toBeUndefined()
expect(mockFileSyncExists.mock.calls[0][0]).toMatch(path.join(baseDir, naclFileName))
expect(mockReadFileSync).not.toHaveBeenCalled()
})
it('returns the file if it exist for string dir store', () => {
const baseDir = 'exists'
const naclFileName = 'blabla/exist.nacl'
const content = 'content'
mockFileSyncExists.mockReturnValue(true)
mockReadFileSync.mockReturnValue(content)
mockStatSync.mockReturnValue({ mtimeMs: 7 })
const naclFile = localDirectoryStore({ baseDir, name: '', encoding }).getSync(naclFileName)
expect(naclFile?.buffer).toBe(content)
expect(mockFileSyncExists.mock.calls[0][0]).toMatch(path.join(baseDir, naclFileName))
expect(mockReadFileSync.mock.calls[0][0]).toMatch(path.join(baseDir, naclFileName))
expect(mockReadFileSync.mock.calls[0][1]).toEqual({ encoding })
})
it('returns the file if it exist for Buffer dir store', () => {
const baseDir = '/base'
const bufferStore = localDirectoryStore({ baseDir, name: '' })
const bufferFileName = 'someBufferFile.ext'
const content = Buffer.from('content')
mockFileSyncExists.mockReturnValue(true)
mockReadFileSync.mockReturnValue(content)
mockStatSync.mockReturnValue({ mtimeMs: 7 })
const bufferFile = bufferStore.getSync(bufferFileName)
expect(bufferFile?.buffer).toBe(content)
expect(mockFileSyncExists.mock.calls[0][0]).toMatch(path.join(baseDir, bufferFileName))
expect(mockReadFileSync.mock.calls[0][0]).toMatch(path.join(baseDir, bufferFileName))
expect(mockReadFileSync.mock.calls[0][1]).toEqual({ encoding: undefined })
})
})
describe('set', () => {
const filename = 'inner/file'
it('writes a content with the right filename for string dir store', async () => {
mockFileExists.mockResolvedValue(false)
mockReplaceContents.mockResolvedValue(true)
mockMkdir.mockResolvedValue(true)
const buffer = 'bla'
const naclFileStore = localDirectoryStore({ baseDir: '', name: '', encoding })
await naclFileStore.set({ filename, buffer })
expect(mockMkdir).not.toHaveBeenCalled()
expect(mockReplaceContents).not.toHaveBeenCalled()
await naclFileStore.flush()
expect(mockMkdir.mock.calls[0][0]).toMatch('inner')
expect(mockReplaceContents.mock.calls[0][0]).toMatch(filename)
expect(mockReplaceContents.mock.calls[0][1]).toEqual(buffer)
expect(mockReplaceContents.mock.calls[0][2]).toEqual(encoding)
mockReplaceContents.mockClear()
await naclFileStore.flush()
expect(mockReplaceContents).not.toHaveBeenCalled()
})
it('writes a content with the right filename for Buffer dir store', async () => {
mockFileExists.mockResolvedValue(false)
mockReplaceContents.mockResolvedValue(true)
mockMkdir.mockResolvedValue(true)
const buffer = Buffer.from('bla')
const bufferFileStore = localDirectoryStore({ baseDir: '', name: '' })
await bufferFileStore.set({ filename, buffer })
expect(mockMkdir).not.toHaveBeenCalled()
expect(mockReplaceContents).not.toHaveBeenCalled()
await bufferFileStore.flush()
expect(mockMkdir.mock.calls[0][0]).toMatch('inner')
expect(mockReplaceContents.mock.calls[0][0]).toMatch(filename)
expect(mockReplaceContents.mock.calls[0][1]).toEqual(buffer)
expect(mockReplaceContents.mock.calls[0][2]).toEqual(undefined)
mockReplaceContents.mockClear()
await bufferFileStore.flush()
expect(mockReplaceContents).not.toHaveBeenCalled()
})
it('fails to get an absolute path', () =>
localDirectoryStore({ baseDir: 'dir', name: '', encoding }).set({ filename: '/aaaa', buffer: 'aa' })
.catch(err => expect(err.message).toEqual('Filepath not contained in dir store base dir: /aaaa')))
})
describe('getFiles', () => {
it('return multiple files', async () => {
mockFileExists.mockResolvedValueOnce(false)
.mockResolvedValueOnce(true)
.mockResolvedValueOnce(true)
mockReadFile.mockResolvedValueOnce('bla1').mockResolvedValueOnce('bla2')
mockStat.mockResolvedValue({ mtimeMs: 7 })
const files = await localDirectoryStore({ baseDir: '', name: '', encoding }).getFiles(['', '', ''])
expect(files[0]).toBeUndefined()
expect(files[1]?.buffer).toEqual('bla1')
expect(files[2]?.buffer).toEqual('bla2')
})
it('fails to get an absolute path', () =>
localDirectoryStore({ baseDir: 'dir', name: '', encoding }).getFiles(['/aaaa'])
.catch(err => expect(err.message).toEqual('Filepath not contained in dir store base dir: /aaaa')))
})
describe('rm Nacl file', () => {
const baseDir = '/base'
const multipleFilesDir = 'multi'
const oneFileDir = 'single'
const naclFileDir = path.join(baseDir, multipleFilesDir, oneFileDir)
const naclFileName = 'rm_this.nacl'
const naclFilePath = path.join(naclFileDir, naclFileName)
const naclFileStore = localDirectoryStore({ baseDir, name: '', encoding })
it('delete the Nacl file and its empty directory', async () => {
mockEmptyDir.mockResolvedValueOnce(true).mockResolvedValueOnce(false)
mockFileExists.mockResolvedValue(true)
await naclFileStore.delete(naclFilePath)
await naclFileStore.flush()
expect(mockRm).toHaveBeenCalledTimes(2)
expect(mockRm).toHaveBeenNthCalledWith(1, naclFilePath)
expect(mockRm).toHaveBeenNthCalledWith(2, naclFileDir)
})
it('fails to delete an absolute path', async () =>
naclFileStore.delete('/aaaa')
.catch(err => expect(err.message).toEqual('Filepath not contained in dir store base dir: /aaaa')))
})
describe('clear', () => {
const baseDir = '/base'
const naclFileStore = localDirectoryStore({ baseDir, name: '', encoding })
beforeAll(() => {
mockReaddirp.mockResolvedValue([])
})
afterAll(() => {
mockReaddirp.mockClear()
})
it('should delete the all the files', async () => {
mockEmptyDir.mockResolvedValueOnce(true).mockResolvedValueOnce(false)
mockFileExists.mockResolvedValue(true)
mockReaddirp.mockResolvedValueOnce([
{ fullPath: path.join(baseDir, 'test1') },
{ fullPath: path.join(baseDir, 'test2') },
]).mockResolvedValueOnce([])
await naclFileStore.clear()
expect(mockRm).toHaveBeenCalledTimes(3)
expect(mockRm).toHaveBeenCalledWith(path.join(baseDir, 'test1'))
expect(mockRm).toHaveBeenCalledWith(path.join(baseDir, 'test2'))
expect(mockRm).toHaveBeenCalledWith(baseDir)
expect(mockRm).not.toHaveBeenCalledWith(path.dirname(baseDir))
})
it('should delete empty directories', async () => {
mockEmptyDir.mockResolvedValueOnce(true).mockResolvedValueOnce(true)
mockReaddirp.mockResolvedValueOnce([])
.mockResolvedValueOnce([
{ fullPath: path.join(baseDir, 'emptyDir') },
])
await naclFileStore.clear()
expect(mockRm).toHaveBeenCalledTimes(2)
expect(mockRm).toHaveBeenCalledWith(path.join(baseDir, 'emptyDir'))
expect(mockRm).toHaveBeenCalledWith(baseDir)
})
it('should not delete parent', async () => {
const store = localDirectoryStore({ baseDir, name: 'name', encoding })
const filePath = `${baseDir}/name`
mockEmptyDir.mockResolvedValueOnce(true).mockResolvedValueOnce(false).mockResolvedValueOnce(
true
)
mockFileExists.mockResolvedValue(true)
mockReaddirp.mockResolvedValueOnce([
{ fullPath: path.join(filePath, 'test1') },
{ fullPath: path.join(filePath, 'test2') },
])
await store.clear()
expect(mockRm).toHaveBeenCalledTimes(3)
expect(mockRm).toHaveBeenCalledWith(path.join(filePath, 'test1'))
expect(mockRm).toHaveBeenCalledWith(path.join(filePath, 'test2'))
expect(mockRm).toHaveBeenCalledWith(filePath)
expect(mockRm).not.toHaveBeenCalledWith(baseDir)
})
})
describe('rename', () => {
const baseDir = '/dir'
const name = 'base'
const naclFileStore = localDirectoryStore({ baseDir, name, encoding })
it('should rename the all files', async () => {
mockFileExists.mockResolvedValue(true)
mockReaddirp.mockResolvedValue([{ fullPath: path.join(baseDir, name, 'test1') }])
await naclFileStore.rename('new')
expect(mockRename).toHaveBeenCalledTimes(1)
expect(mockRename).toHaveBeenCalledWith(path.join(baseDir, name, 'test1'), path.join(baseDir, 'new', 'test1'))
})
})
describe('renameFile', () => {
const baseDir = '/base'
const naclFileStore = localDirectoryStore({ baseDir, name: '', encoding })
it('should rename the file', async () => {
await naclFileStore.renameFile('old', 'new')
expect(mockRename).toHaveBeenCalledTimes(1)
expect(mockRename).toHaveBeenCalledWith(path.join(baseDir, 'old'), path.join(baseDir, 'new'))
})
})
describe('contained', () => {
const baseDir = '/base'
const fileStore = localDirectoryStore({ baseDir, name: '', accessiblePath: 'access', encoding })
it('should fail for absolute paths', () =>
fileStore.get('/absolutely/fabulous')
.catch(err =>
expect(err.message).toEqual('Filepath not contained in dir store base dir: /absolutely/fabulous')))
it('should fail for relative paths outside basedir', () =>
fileStore.get('../../bla')
.catch(err =>
expect(err.message).toEqual('Filepath not contained in dir store base dir: ../../bla')))
it('should fail for relative paths inside basedir but outside accessible path', () =>
fileStore.get('bla')
.catch(err =>
expect(err.message).toEqual('Filepath not contained in dir store base dir: bla')))
it('should fail for relative paths outside basedir even for smart assets', () =>
fileStore.mtimestamp('something/bla/../../../dev/null')
.catch(err =>
expect(err.message).toEqual('Filepath not contained in dir store base dir: something/bla/../../../dev/null')))
it('should succeed for paths that contain ".." as part of the parts names', () =>
expect(fileStore.get('access/..relatively../..fabulous../..bla..jsonl')).resolves.not.toThrow())
})
describe('getTotalSize', () => {
const baseDir = '/base'
const naclFileStore = localDirectoryStore({ baseDir, name: '', encoding })
it('should getTotalSize the file', async () => {
mockEmptyDir.mockResolvedValueOnce(true).mockResolvedValueOnce(false)
mockFileExists.mockResolvedValue(true)
mockReaddirp.mockResolvedValueOnce([
{ fullPath: path.join(baseDir, 'test1') },
{ fullPath: path.join(baseDir, 'test2') },
]).mockResolvedValueOnce([])
mockStat
.mockImplementation(filePath =>
(filePath.endsWith('test1') ? ({ size: 5 }) : ({ size: 4 })))
const totalSize = await naclFileStore.getTotalSize()
expect(mockStat).toHaveBeenCalledTimes(2)
expect(totalSize).toEqual(9)
})
})
describe('getFullPath', () => {
it('should return the full path of a file', () => {
const baseDir = '/base'
const name = 'name'
const naclFileStore = localDirectoryStore({ baseDir, name, encoding })
const filename = 'filename'
expect(naclFileStore.getFullPath(filename)).toBe(`${baseDir}/${name}/${filename}`)
})
})
}) | the_stack |
import "source-map-support/register";
import * as assert from "assert";
import * as Mocha from "mocha";
import * as path from "path";
// @ts-expect-error
import * as unexpected from "unexpected";
import * as vscode from "vscode";
import { Context, SelectionBehavior, Selections } from "../../src/api";
import type { Extension } from "../../src/state/extension";
interface Expect<T = any> {
<T>(subject: T, assertion: string, ...args: readonly any[]): {
readonly and: Expect.Continuation<T>;
};
readonly it: Expect.Continuation<T>;
addAssertion<T>(
pattern: string,
handler: (expect: Expect<T>, subject: T, ...args: readonly any[]) => void,
): void;
addType<T>(typeDefinition: {
name: string;
identify: (value: unknown) => boolean;
base?: string;
inspect?: (value: T, depth: number, output: Expect.Output, inspect: Expect.Inspect) => any;
}): void;
}
namespace Expect {
export interface Continuation<T = any> {
(assertion: string, ...args: readonly any[]): { readonly and: Continuation<T> };
}
export interface Inspect {
(value: any, depth: number): any;
}
export interface Output {
text(text: string): Output;
append(_: any): Output;
}
}
/**
* Resolves a path starting at the root of the Git repository.
*/
export function resolve(subpath: string) {
// Current path is dance/out/test/suite/utils
return path.join(__dirname, "../../..", subpath);
}
/**
* Add depth to command-like suites for nicer reporting.
*/
export function groupTestsByParentName(toplevel: Mocha.Suite) {
for (const test of toplevel.tests) {
const parts = test.title.split(" > "),
testName = parts.pop()!,
suiteName = parts.join(" "),
suite = toplevel.suites.find((s) => s.title === suiteName)
?? Mocha.Suite.create(toplevel, suiteName);
suite.addTest(test);
test.title = testName;
}
toplevel.tests.splice(0);
}
/**
* Executes a VS Code command, attempting to better recover errors.
*/
export async function executeCommand(command: string, ...args: readonly any[]) {
const extension =
vscode.extensions.getExtension<{ extension: Extension }>("gregoire.dance")!.exports.extension;
extension.runPromiseSafely = async (f) => {
try {
return await f();
} catch (e) {
error = e;
throw e;
}
};
let result: unknown,
error: unknown;
try {
result = await vscode.commands.executeCommand(command, ...args);
} catch (e) {
if (error === undefined
|| !(e instanceof Error
&& e.message.startsWith("Running the contributed command")
&& e.message.endsWith("failed."))) {
error = e;
}
} finally {
// @ts-expect-error
delete extension.runPromiseSafely;
}
if (command.startsWith("dance") && args.length === 1 && args[0].$expect instanceof RegExp) {
assert.notStrictEqual(error, undefined, "an error was expected, but no error was raised");
const pattern = args[0].$expect,
message = "" + ((error as any)?.message ?? error);
assert.match(
message,
pattern,
`error ${JSON.stringify(message)} does not match expected pattern ${pattern}`,
);
} else if (error !== undefined) {
throw error;
}
return result;
}
export const expect: Expect = unexpected.clone();
const shortPos = (p: vscode.Position) => `${p.line}:${p.character}`;
expect.addType<vscode.Position>({
name: "position",
identify: (v) => v instanceof vscode.Position,
base: "object",
inspect: (value, _, output) => {
output
.text("Position(")
.text(shortPos(value))
.text(")");
},
});
expect.addType<vscode.Range>({
name: "range",
identify: (v) => v instanceof vscode.Range,
base: "object",
inspect: (value, _, output) => {
output
.text("Range(")
.text(shortPos(value.start))
.text(" -> ")
.text(shortPos(value.end))
.text(")");
},
});
expect.addType<vscode.Selection>({
name: "selection",
identify: (v) => v instanceof vscode.Selection,
base: "range",
inspect: (value, _, output) => {
output
.text("Selection(")
.text(shortPos(value.anchor))
.text(" -> ")
.text(shortPos(value.active))
.text(")");
},
});
expect.addAssertion<vscode.Position>(
"<position> [not] to (have|be at) coords <number> <number>",
(expect, subject, line: number, character: number) => {
expect(subject, "[not] to satisfy", { line, character });
},
);
expect.addAssertion<vscode.Range>(
"<range> [not] to be empty at coords <number> <number>",
(expect, subject, line: number, character: number) => {
expect(subject, "[not] to start at", new vscode.Position(line, character))
.and("[not] to be empty");
},
);
expect.addAssertion<vscode.Range>(
"<range> [not] to be empty",
(expect, subject) => {
expect(subject, "[not] to satisfy", { isEmpty: true });
},
);
expect.addAssertion<vscode.Range>(
"<range> [not] to start at <position>",
(expect, subject, position: vscode.Position) => {
expect(subject, "[not] to satisfy", { start: position });
},
);
expect.addAssertion<vscode.Range>(
"<range> [not] to end at <position>",
(expect, subject, position: vscode.Position) => {
expect(subject, "[not] to satisfy", { end: position });
},
);
expect.addAssertion<vscode.Range>(
"<range> [not] to start at coords <number> <number>",
(expect, subject, line: number, character: number) => {
expect(subject, "[not] to start at", new vscode.Position(line, character));
},
);
expect.addAssertion<vscode.Range>(
"<range> [not] to end at coords <number> <number>",
(expect, subject, line: number, character: number) => {
expect(subject, "[not] to end at", new vscode.Position(line, character));
},
);
expect.addAssertion<vscode.Selection>(
"<selection> [not] to be reversed",
(expect, subject) => {
expect(subject, "[not] to satisfy", { isReversed: true });
},
);
expect.addAssertion<vscode.Selection>(
"<selection> [not] to (have anchor|be anchored) at <position>",
(expect, subject, position: vscode.Position) => {
expect(subject, "[not] to satisfy", { anchor: position });
},
);
expect.addAssertion<vscode.Selection>(
"<selection> [not] to (have cursor|be active) at <position>",
(expect, subject, position: vscode.Position) => {
expect(subject, "[not] to satisfy", { active: position });
},
);
expect.addAssertion<vscode.Selection>(
"<selection> [not] to (have anchor|be anchored) at coords <number> <number>",
(expect, subject, line: number, character: number) => {
expect(subject, "[not] to be anchored at", new vscode.Position(line, character));
},
);
expect.addAssertion<vscode.Selection>(
"<selection> [not] to (have cursor|be active) at coords <number> <number>",
(expect, subject, line: number, character: number) => {
expect(subject, "[not] to be active at", new vscode.Position(line, character));
},
);
function stringifySelection(document: vscode.TextDocument, selection: vscode.Selection) {
const content = document.getText(),
startOffset = document.offsetAt(selection.start);
if (selection.isEmpty) {
return content.slice(0, startOffset) + "|" + content.slice(startOffset);
}
let endOffset = document.offsetAt(selection.end),
endString = selection.isReversed ? "<" : "|";
const startString = selection.isReversed ? "|" : ">";
if (selection.end.character === 0) {
// Selection ends at line break.
endString = "↵" + endString;
endOffset--;
}
return (
content.slice(0, startOffset)
+ startString
+ content.slice(startOffset, endOffset)
+ endString
+ content.slice(endOffset)
);
}
export class ExpectedDocument {
public constructor(
public readonly text: string,
public readonly selections: vscode.Selection[] = [],
) {
const lineCount = text.split("\n").length;
for (const selection of selections) {
expect(selection.end.line, "to be less than", lineCount);
}
}
public static snapshot(editor: vscode.TextEditor) {
return new this(editor.document.getText(), editor.selections);
}
public static apply(editor: vscode.TextEditor, indent: number, text: string) {
return this.parseIndented(indent, text).apply(editor);
}
public static assertEquals(
editor: vscode.TextEditor,
message: string | undefined,
indent: number,
text: string,
) {
return this.parseIndented(indent, text).assertEquals(editor, message);
}
public static parseIndented(indent: number, text: string) {
if (text.length < indent) {
// Empty document.
return new ExpectedDocument("");
}
// Remove first line break.
text = text.slice(1);
// Remove final line break (indent - (two spaces) + line break).
text = text.slice(0, text.length - (indent - 2 + 1));
// Remove indentation.
text = text.replace(new RegExp(`^ {${indent}}`, "gm"), "");
return ExpectedDocument.parse(text);
}
public static parse(text: string) {
text = text.replace(/·/g, " ");
const selections = [] as vscode.Selection[],
lines = [] as string[];
let previousLineStart = 0;
for (let line of text.split("\n")) {
let hasIndicator = false;
line = line.replace(/([|^]+) *(\d+)|(\d+) *([|^]+)/g, (match, c1, n1, n2, c2, offset) => {
const carets = (c1 ?? c2) as string,
selectionIndex = +(n1 ?? n2),
prevSelection = selections[selectionIndex],
empty = carets === "|" && prevSelection === undefined,
start = new vscode.Position(lines.length - 1, offset),
end = offset + carets.length === lines[lines.length - 1].length + 1 && !empty
? new vscode.Position(lines.length, 0) // Select end of line character.
: new vscode.Position(lines.length - 1, offset + (empty ? 0 : carets.length));
if (prevSelection === undefined) {
selections[selectionIndex] = carets[0] === "|"
? new vscode.Selection(end, start)
: new vscode.Selection(start, end);
} else {
selections[selectionIndex] = prevSelection.isEmpty || prevSelection.isReversed
? new vscode.Selection(end, prevSelection.start)
: new vscode.Selection(prevSelection.start, end);
}
hasIndicator = true;
return " ".repeat(match.length);
});
if (hasIndicator && /^ +$/.test(line)) {
continue;
}
if (lines.length > 0) {
previousLineStart += lines[lines.length - 1].length + 1;
// Accounting for the newline character. ^^^
}
lines.push(line);
}
return new this(lines.join("\n"), selections);
}
public async apply(editor: vscode.TextEditor) {
await editor.edit((builder) => {
const start = new vscode.Position(0, 0),
end = editor.document.lineAt(editor.document.lineCount - 1).rangeIncludingLineBreak.end;
builder.replace(new vscode.Range(start, end), this.text);
});
if (this.selections.length > 0) {
editor.selections = this.selections;
}
}
public assertEquals(editor: vscode.TextEditor, message = "") {
const document = editor.document;
assert.strictEqual(
document.getText(),
this.text,
message + (message ? "\n" : "") + `Document text is not as expected.`,
);
const expectedSelections = this.selections.slice() as (vscode.Selection | undefined)[];
if (expectedSelections.length === 0) {
return;
}
expect(editor.selections, "to have items satisfying", expect.it("to satisfy", {
end: expect.it("to satisfy", {
line: expect.it("to be less than", document.lineCount),
}),
}));
// Ensure resulting selections are right.
let mergedSelections = Selections.mergeOverlapping(editor.selections).slice();
if (Context.currentOrUndefined?.selectionBehavior === SelectionBehavior.Character) {
mergedSelections = Selections.toCharacterMode(mergedSelections, document);
}
const actualSelections = mergedSelections.slice() as (vscode.Selection | undefined)[];
// First, we set correct selections to `undefined` to ignore them in the
// checks below.
let hasUnexpectedSelection = false;
for (let i = 0; i < expectedSelections.length && i < actualSelections.length; i++) {
if (expectedSelections[i]!.isEqual(actualSelections[i]!)) {
expectedSelections[i] = actualSelections[i] = undefined;
} else {
hasUnexpectedSelection = true;
}
}
if (!hasUnexpectedSelection && expectedSelections.length === actualSelections.length) {
return;
}
const commonText: string[] = [message === "" ? "Selections are not as expected." : message],
expectedText: string[] = [],
actualText: string[] = [];
// Then, we report selections that are correct, but have the wrong index.
for (let i = 0; i < expectedSelections.length; i++) {
const expectedSelection = expectedSelections[i];
if (expectedSelection === undefined) {
continue;
}
for (let j = 0; j < actualSelections.length; j++) {
const actualSelection = actualSelections[j];
if (actualSelection === undefined) {
continue;
}
if (expectedSelection.isEqual(actualSelection)) {
commonText.push(`Expected selection found at index #${j} to be at index #${i}.`);
expectedSelections[i] = actualSelections[j] = undefined;
break;
}
}
}
// Then, we diff selections that exist in both arrays.
const sortedExpectedSelections = expectedSelections
.map((x, i) => [i, x!] as const)
.filter((x) => x[1] !== undefined)
.sort((a, b) => a[1].start.compareTo(b[1].start));
const sortedActualSelections = actualSelections
.map((x, i) => [i, x!] as const)
.filter((x) => x[1] !== undefined)
.sort((a, b) => a[1].start.compareTo(b[1].start));
for (let i = 0; i < sortedExpectedSelections.length && i < sortedActualSelections.length; i++) {
const [expectedIndex, expectedSelection] = sortedExpectedSelections[i],
[actualIndex, actualSelection] = sortedActualSelections[i];
const error = actualIndex === expectedIndex
? `Selection #${actualIndex} is not as expected:`
: `Actual selection #${actualIndex} differs from expected selection #${expectedIndex}:`;
actualText.push(error);
expectedText.push(error);
actualText.push(stringifySelection(document, actualSelection).replace(/^/gm, " "));
expectedText.push(stringifySelection(document, expectedSelection).replace(/^/gm, " "));
}
// Finally, we report selections that are expected and not found, and those
// that were found but were not expected.
for (let i = sortedActualSelections.length; i < sortedExpectedSelections.length; i++) {
const [index, expectedSelection] = sortedExpectedSelections[i];
expectedText.push(
`Missing selection #${index}:\n${
stringifySelection(document, expectedSelection).replace(/^(?=.)/gm, " ")}`);
actualText.push(`Missing selection #${index}:\n`);
}
for (let i = sortedExpectedSelections.length; i < sortedActualSelections.length; i++) {
const [index, actualSelection] = sortedActualSelections[i];
actualText.push(
`Unexpected selection #${index}:\n${
stringifySelection(document, actualSelection).replace(/^(?=.)/gm, " ")}`);
expectedText.push(`Unexpected selection #${index}:\n`);
}
// Show error:
assert.strictEqual(
actualText.join("\n"),
expectedText.join("\n"),
commonText.join("\n"),
);
// Sometimes the error messages end up being the same; ensure this isn't the
// case below.
assert.fail(commonText.join("\n") + "\n" + actualText.join("\n"));
}
} | the_stack |
import { Component, AfterViewInit } from '@angular/core';
import { NavController, AlertController, Platform } from '@ionic/angular';
import { OsmApiService } from '../../services/osmApi.service';
import { TagsService } from '../../services/tags.service';
import { MapService } from '../../services/map.service';
import { DataService } from '../../services/data.service';
import { ConfigService } from '../../services/config.service';
import { timer } from 'rxjs';
import { TranslateService } from '@ngx-translate/core';
import { cloneDeep, clone } from 'lodash';
import { addAttributesToFeature } from '../../../../scripts/osmToOsmgo/index.js'
import { InitService } from 'src/app/services/init.service';
@Component({
selector: 'page-push-data-to-osm',
templateUrl: './pushDataToOsm.html',
styleUrls: ['./pushDataToOsm.scss']
})
export class PushDataToOsmPage implements AfterViewInit {
summary = { 'Total': 0, 'Create': 0, 'Update': 0, 'Delete': 0 };
changesetId = '';
commentChangeset = '';
isPushing = false;
featuresChanges = [];
basicPassword = null;
connectionError;
constructor(
public dataService: DataService,
public osmApi: OsmApiService,
public tagsService: TagsService,
public mapService: MapService,
public navCtrl: NavController,
private alertCtrl: AlertController,
public configService: ConfigService,
public platform: Platform,
private translate: TranslateService,
public initService: InitService
) {
this.commentChangeset = this.configService.getChangeSetComment();
this.featuresChanges = this.dataService.getGeojsonChanged().features;
}
ngOnInit(): void {
if (!this.initService.isLoaded){
console.log('ooo')
this.initService.initLoadData$()
.subscribe( e => {
this.basicPassword = this.configService.user_info.password;
this.commentChangeset = this.configService.getChangeSetComment();
this.featuresChanges = this.dataService.getGeojsonChanged().features;
})
}
this.basicPassword = this.configService.user_info.password;
}
presentConfirm() {
this.alertCtrl.create({
header: this.translate.instant('SEND_DATA.DELETE_CONFIRM_HEADER'),
message: this.translate.instant('SEND_DATA.DELETE_CONFIRM_MESSAGE'),
buttons: [
{
text: this.translate.instant('SHARED.CANCEL'),
role: 'cancel',
handler: () => {
}
},
{
text: this.translate.instant('SHARED.CONFIRM'),
handler: () => {
this.cancelAllFeatures();
}
}
]
}).then(alert => {
alert.present();
});
}
displayError(error) {
this.alertCtrl.create({
message: error,
buttons: [
{
text: this.translate.instant('SHARED.CLOSE'),
role: 'cancel',
handler: () => {
}
}
]
})
.then(alert => {
alert.present();
});
}
getSummary() {
const summary = { 'Total': 0, 'Create': 0, 'Update': 0, 'Delete': 0 };
this.featuresChanges = this.dataService.getGeojsonChanged().features;
const featuresChanged = this.dataService.getGeojsonChanged().features;
for (let i = 0; i < featuresChanged.length; i++) {
const featureChanged = featuresChanged[i];
summary[featureChanged.properties.changeType]++;
summary['Total']++;
}
return summary;
}
/**
* Send this feature to OSM
*/
private pushFeatureToOsm(featureChanged, CS, password) {
return new Promise((resolve, reject) => {
if (featureChanged.properties.changeType === 'Create') {
this.osmApi.apiOsmCreateNode(featureChanged, CS, password)
.subscribe(id => {
let newFeature = {};
newFeature['type'] = 'Feature';
newFeature['id'] = 'node/' + id;
newFeature['properties'] = {};
newFeature['geometry'] = cloneDeep(featureChanged.geometry);
newFeature['properties']['type'] = 'node';
newFeature['properties']['id'] = id;
newFeature['properties']['tags'] = cloneDeep(featureChanged.properties.tags);
newFeature['properties']['meta'] = {};
newFeature['properties']['meta']['version'] = 1;
newFeature['properties']['meta']['user'] = this.configService.getUserInfo().display_name;
newFeature['properties']['meta']['uid'] = this.configService.getUserInfo().uid;
newFeature['properties']['meta']['timestamp'] = new Date().toISOString();
newFeature['properties']['time'] = new Date().getTime();
newFeature = this.mapService.getIconStyle(newFeature); // style
addAttributesToFeature(newFeature)
this.summary.Total--;
this.summary.Create--;
this.dataService.deleteFeatureFromGeojsonChanged(featureChanged);
this.dataService.addFeatureToGeojson(newFeature); // creation du nouveau TODO
this.featuresChanges = this.dataService.getGeojsonChanged().features;
resolve(newFeature)
},
async error => {
featureChanged['error'] = error.error || error.response || 'oups';
this.dataService.updateFeatureToGeojsonChanged(featureChanged);
this.featuresChanges = this.dataService.getGeojsonChanged().features;
reject(error);
});
} else if
(featureChanged.properties.changeType === 'Update') {
this.osmApi.apiOsmUpdateOsmElement(featureChanged, CS, password)
.subscribe(newVersion => {
let newFeature = {};
newFeature = featureChanged;
newFeature['properties']['meta']['version'] = newVersion;
newFeature['properties']['meta']['user'] = this.configService.getUserInfo().display_name;
newFeature['properties']['meta']['uid'] = this.configService.getUserInfo().uid;
newFeature['properties']['meta']['timestamp'] = new Date().toISOString();
newFeature['properties']['time'] = new Date().getTime();
if (newFeature['properties']['tags']['fixme']) {
newFeature['properties']['fixme'] = true;
} else {
if (newFeature['properties']['fixme'])
delete newFeature['properties']['fixme'];
}
if (newFeature['properties']['deprecated']){
delete newFeature['properties']['deprecated']
}
delete newFeature['properties']['changeType'];
delete newFeature['properties']['originalData'];
newFeature = this.mapService.getIconStyle(newFeature); // style
addAttributesToFeature(newFeature)
this.summary.Total--;
this.summary.Update--;
this.dataService.deleteFeatureFromGeojsonChanged(featureChanged);
this.dataService.addFeatureToGeojson(newFeature);
this.featuresChanges = this.dataService.getGeojsonChanged().features;
resolve(newFeature)
},
error => {
featureChanged['error'] = error.error || error.response || 'oups';
this.dataService.updateFeatureToGeojsonChanged(featureChanged);
this.featuresChanges = this.dataService.getGeojsonChanged().features;
reject(error)
// this.pushFeatureToOsm(this.dataService.getGeojsonChanged().features[this.index], this.changesetId, this.index);
});
} else if
(featureChanged.properties.changeType === 'Delete') {
if (featureChanged.properties.usedByWays){
let emptyFeaturetags = clone(featureChanged);
emptyFeaturetags['properties']['tags']= {};
this.osmApi.apiOsmUpdateOsmElement(emptyFeaturetags, CS, password)
.subscribe(newVersion => {
this.summary.Total--;
this.summary.Delete--;
this.dataService.deleteFeatureFromGeojsonChanged(featureChanged);
this.featuresChanges = this.dataService.getGeojsonChanged().features;
resolve(newVersion);
})
}else {
this.osmApi.apiOsmDeleteOsmElement(featureChanged, CS, password)
.subscribe(id => {
this.summary.Total--;
this.summary.Delete--;
this.dataService.deleteFeatureFromGeojsonChanged(featureChanged);
this.featuresChanges = this.dataService.getGeojsonChanged().features;
resolve(id);
},
async error => {
featureChanged['error'] = error.error || error.response || 'oups';
this.dataService.updateFeatureToGeojsonChanged(featureChanged);
this.featuresChanges = this.dataService.getGeojsonChanged().features;
reject(error)
});
}
}
})
}
async presentAlertPassword(user_info) {
const alert = await this.alertCtrl.create({
header: user_info.display_name,
inputs: [
{
name: 'password',
type: 'password',
placeholder: this.translate.instant('SEND_DATA.PASSWORD_OSM')
}
],
buttons: [
{
text: 'Cancel',
role: 'cancel',
cssClass: 'secondary',
handler: (blah) => {
console.log('Confirm Cancel: blah');
}
}, {
text: 'Ok',
handler: (e) => {
console.log(e.password);
this.basicPassword = e.password;
this.pushDataToOsm(this.commentChangeset, this.basicPassword );
}
}
]
});
await alert.present();
}
userIsConnected(password){
return new Promise((resolve, reject) => {
this.osmApi.getUserDetail$(this.configService.user_info.user, password, this.configService.user_info.authType === 'basic' ? true : false, null, true)
.subscribe( u => {
resolve( true)
},
err => {
reject(err.error)
if (this.configService.user_info.authType === 'basic' && !this.configService.user_info.password){
this.basicPassword = null;
this.isPushing = false;
}
// console.log('HTTP Error', err.error)
}
)
})
}
async pushDataToOsm(commentChangeset, password) {
if (this.isPushing) {
console.log('Already being sent')
return;
}
this.configService.setChangeSetComment(commentChangeset);
if (this.configService.user_info.authType == 'basic' && !this.basicPassword){
await this.presentAlertPassword(this.configService.user_info)
return
}
this.isPushing = true;
let userIsConnected;
try {
userIsConnected = await this.userIsConnected(password);
} catch (error) {
this.connectionError = error;
if (this.configService.user_info.authType === 'basic' && !this.configService.user_info.password){
this.basicPassword = null;
this.isPushing = false;
}
this.isPushing = false;
return;
}
this.connectionError = undefined;
this.osmApi.getValidChangset(commentChangeset, password)
.pipe()
.subscribe(async CS => {
const cloneGeojsonChanged = this.dataService.getGeojsonChanged()
this.changesetId = CS;
for (let feature of cloneGeojsonChanged.features) {
try {
await this.pushFeatureToOsm(feature, this.changesetId, password)
} catch (error) {
console.log(error)
}
};
this.isPushing = false;
this.summary = this.getSummary();
this.mapService.eventMarkerReDraw.emit(this.dataService.getGeojson());
this.mapService.eventMarkerChangedReDraw.emit(this.dataService.getGeojsonChanged());
this.featuresChanges = this.dataService.getGeojsonChanged().features;
if (this.dataService.getGeojsonChanged().features.length === 0) { // Y'a plus d'éléments à pusher
this.navCtrl.pop();
}
});
}
async cancelAllFeatures() { // rollBack
const featuresChanged = this.dataService.getGeojsonChanged().features;
for (let feature of featuresChanged) {
this.dataService.cancelFeatureChange(feature);
}
await this.dataService.resetGeojsonChanged();
this.summary = this.getSummary();
this.featuresChanges = this.dataService.getGeojsonChanged().features;
timer(100).subscribe(t => {
this.mapService.eventMarkerReDraw.emit(this.dataService.getGeojson());
this.mapService.eventMarkerChangedReDraw.emit(this.dataService.getGeojsonChanged());
this.navCtrl.pop();
});
}
centerToElement(pointCoordinates) {
if (this.mapService.map.getZoom() < 18.5) {
this.mapService.map.setZoom(18.5);
}
this.mapService.map.setCenter(pointCoordinates);
this.navCtrl.pop();
}
ngAfterViewInit() {
this.summary = this.getSummary();
}
} | the_stack |
import qs from 'qs';
import { isObject, has, merge, isArray, map, get } from 'lodash';
import { tokenize, dataMapping } from './tpl-builtin';
import { evalExpression } from './tpl';
import {
isObjectShallowModified,
hasFile,
object2formData,
qsstringify,
cloneObject,
createObject,
} from './helper';
import { Api, ApiObject, Payload, FetcherType, ApiType } from './types';
const rSchema = /(?:^|raw\:)(get|post|put|delete|patch|options|head):/i;
interface ApiCacheConfig extends ApiObject {
result: any;
requestTime: number;
}
const apiCaches: Array<ApiCacheConfig> = [];
export function normalizeApi(api: Api, defaultMethod?: string): ApiObject {
if (typeof api === 'string') {
const method = rSchema.test(api) ? RegExp.$1 : '';
method && (api = api.replace(method + ':', ''));
api = {
method: (method || defaultMethod) as any,
url: api,
};
}
return api;
}
export function buildApi(
api: Api,
data?: object,
options: {
autoAppend?: boolean;
ignoreData?: boolean;
[propName: string]: any;
} = {},
): ApiObject {
api = normalizeApi(api, options.method);
const { autoAppend, ignoreData, ...rest } = options;
api.config = {
...rest,
};
api.method = (api.method || (options as any).method || 'get').toLowerCase();
if (!data) {
return api;
} else if (
data instanceof FormData ||
data instanceof Blob ||
data instanceof ArrayBuffer
) {
api.data = data;
return api;
}
const raw = api.url || '';
const idx = api.url.indexOf('?');
if (~idx) {
const hashIdx = api.url.indexOf('#');
const params = qs.parse(
api.url.substring(idx + 1, ~hashIdx ? hashIdx : undefined),
);
api.url =
tokenize(api.url.substring(0, idx + 1), data, '| urlEncode') +
qsstringify((api.query = dataMapping(params, data))) +
(~hashIdx ? api.url.substring(hashIdx) : '');
} else {
api.url = tokenize(api.url, data, '| urlEncode');
}
if (ignoreData) {
return api;
}
if (api.data) {
api.body = api.data = dataMapping(api.data, data);
} else if (api.method === 'post' || api.method === 'put') {
api.body = api.data = cloneObject(data);
}
// get 类请求,把 data 附带到 url 上。
if (api.method === 'get') {
if (!raw.includes('$') && !api.data && autoAppend) {
api.query = api.data = data;
} else if (
api.attachDataToQuery === false &&
api.data &&
!raw.includes('$') &&
autoAppend
) {
const idx = api.url.indexOf('?');
if (~idx) {
const params = (api.query = {
...qs.parse(api.url.substring(idx + 1)),
...data,
});
api.url = api.url.substring(0, idx) + '?' + qsstringify(params);
} else {
api.query = data;
api.url += '?' + qsstringify(data);
}
}
if (api.data && api.attachDataToQuery !== false) {
const idx = api.url.indexOf('?');
if (~idx) {
const params = (api.query = {
...qs.parse(api.url.substring(idx + 1)),
...api.data,
});
api.url = api.url.substring(0, idx) + '?' + qsstringify(params);
} else {
api.query = api.data;
api.url += '?' + qsstringify(api.data);
}
delete api.data;
}
}
if (api.headers) {
api.headers = dataMapping(api.headers, data);
}
if (api.requestAdaptor && typeof api.requestAdaptor === 'string') {
api.requestAdaptor = str2function(api.requestAdaptor, 'api') as any;
}
if (api.adaptor && typeof api.adaptor === 'string') {
api.adaptor = str2function(
api.adaptor,
'payload',
'response',
'api',
) as any;
}
return api;
}
function str2function(
contents: string,
...args: Array<string>
): Function | null {
try {
const fn = new Function(...args, contents);
return fn;
} catch (e) {
console.warn(e);
return null;
}
}
function responseAdaptor(ret: any, api: ApiObject) {
let hasStatusField = true;
if (!ret) {
throw new Error('Response is empty!');
} else if (!has(ret, 'status')) {
hasStatusField = false;
}
const result = ret.data || ret.result;
const payload: Payload = {
ok: hasStatusField === false || ret.status == 0,
status: hasStatusField === false ? 0 : ret.status,
msg: ret.msg || ret.message,
msgTimeout: ret.msgTimeout,
data: Array.isArray(result) ? { items: result } : result, // 兼容直接返回数据的情况
isNotState: api.isNotState,
isPageState: api.isPageState,
};
if (payload.status == 422) {
payload.errors = ret.errors;
}
if (payload.ok && api.responseData) {
payload.data = dataMapping(
api.responseData,
createObject({ api }, payload.data || {}),
);
}
return payload;
}
export const defaultFetcher = async (api: any) => {
const { url, ...rest } = api;
const result = await fetch(url, rest);
return result.json();
};
export function wrapFetcher(
fn: FetcherType = defaultFetcher,
): (api: Api, data: object, options?: object) => Promise<Payload | void> {
return async function (api, data, options) {
api = buildApi(api, data, options) as ApiObject;
api.requestAdaptor && (api = api.requestAdaptor(api));
if (api.data && (hasFile(api.data) || api.dataType === 'form-data')) {
api.data = object2formData(api.data, api.qsOptions);
} else if (
api.data &&
typeof api.data !== 'string' &&
api.dataType === 'form'
) {
api.data = qsstringify(api.data, api.qsOptions) as any;
api.headers = api.headers || (api.headers = {});
api.headers['Content-Type'] = 'application/x-www-form-urlencoded';
} else if (
api.data &&
typeof api.data !== 'string' &&
api.dataType === 'json'
) {
api.data = JSON.stringify(api.data) as any;
api.headers = api.headers || (api.headers = {});
api.headers['Content-Type'] = 'application/json';
}
const result = await fn(api);
if (typeof api.cache === 'number' && api.cache > 0) {
const apiCache = getApiCache(api);
return wrapAdaptor(
apiCache
? (apiCache as ApiCacheConfig).result
: setApiCache(api, result),
api,
);
}
return wrapAdaptor(result, api);
};
}
export async function wrapAdaptor(result: any, api: ApiObject) {
const adaptor = api.adaptor;
return responseAdaptor(adaptor ? adaptor(result, api) : result, api);
}
export function isApiOutdated(
prevApi: Api | undefined,
nextApi: Api | undefined,
prevData: any,
nextData: any,
isFirst?: boolean,
): nextApi is Api {
const nextUrl: string = get(nextApi, 'url', nextApi);
const prevUrl: string = get(prevApi, 'url', prevApi);
if (nextUrl !== prevUrl && isValidApi(nextUrl)) return true;
if (!isFirst && get(nextApi, 'autoRefresh') === false) {
return false;
}
if (typeof nextUrl === 'string' && nextUrl.includes('$')) {
prevApi = buildApi(prevApi as Api, prevData as object, {
ignoreData: true,
});
nextApi = buildApi(nextApi as Api, nextData as object, {
ignoreData: true,
});
return !!(
prevApi.url !== nextApi.url &&
isValidApi(nextApi.url) &&
(!nextApi.sendOn || evalExpression(nextApi.sendOn, nextData))
);
}
return false;
}
export function isValidApi(api: string) {
return (
api &&
/^(?:(https?|wss?|taf):\/\/[^\/]+)?(\/?[^\s\/\?]*){1,}(\?.*)?$/.test(api)
);
}
export function isEffectiveApi(
api?: Api,
data?: any,
initFetch?: boolean,
initFetchOn?: string,
): api is Api {
if (!api) {
return false;
}
if (initFetch === false) {
return false;
}
if (initFetchOn && data && !evalExpression(initFetchOn, data)) {
return false;
}
if (typeof api === 'string' && api.length) {
return true;
} else if (isObject(api) && (api as ApiObject).url) {
if (
(api as ApiObject).sendOn &&
data &&
!evalExpression((api as ApiObject).sendOn as string, data)
) {
return false;
}
return true;
}
return false;
}
export function isSameApi(
apiA: ApiObject | ApiCacheConfig,
apiB: ApiObject | ApiCacheConfig,
): boolean {
return (
apiA.method === apiB.method &&
apiA.url === apiB.url &&
!isObjectShallowModified(apiA.data, apiB.data, false)
);
}
export function getApiCache(api: ApiObject): ApiCacheConfig | undefined {
// 清理过期cache
const now = Date.now();
let result: ApiCacheConfig | undefined;
for (let idx = 0, len = apiCaches.length; idx < len; idx++) {
const apiCache = apiCaches[idx];
if (now - apiCache.requestTime > (apiCache.cache as number)) {
apiCaches.splice(idx, 1);
len--;
idx--;
continue;
}
if (isSameApi(api, apiCache)) {
result = apiCache;
break;
}
}
return result;
}
export function setApiCache(api: ApiObject, result: any) {
apiCaches.push({
...api,
result,
requestTime: Date.now(),
});
return result;
}
export const handleResponseState = (response: Payload) => {
const state = {};
const { isPageState, isNotState, data, status } = response;
if (!isNotState && status === 0) {
if (isPageState) {
merge(state, { pageState: data });
} else {
merge(state, { state: data });
}
}
return state;
};
export const fetchData = async (
fetcher: FetcherType | undefined,
prevApi: ApiType | undefined,
nextApi: ApiType | undefined,
prevData: any,
nextData: any,
isFirst?: boolean,
options?: object,
) => {
if (fetcher && nextApi) {
if (isArray(nextApi)) {
const stateArr = await Promise.allSettled(
map(nextApi, (api, i) => {
if (
isApiOutdated(get(prevApi, i), api, prevData, nextData, isFirst)
) {
return wrapFetcher(fetcher)(api, nextData, options);
}
}),
);
for (const result of stateArr) {
if (get(result, 'value')) {
return handleResponseState(get(result, 'value'));
}
}
} else {
if (isApiOutdated(prevApi as Api, nextApi, prevData, nextData, isFirst)) {
const result = await wrapFetcher(fetcher)(nextApi, nextData, options);
if (result) {
return handleResponseState(result);
}
}
}
}
};
export function clearApiCache() {
apiCaches.splice(0, apiCaches.length);
}
// window.apiCaches = apiCaches; | the_stack |
import { IAcknowledgeStartupWarningsAction } from "../actions/defs";
/**
* Defines our localized message bundle. Any property name here
* can be used as a key in any {@link tr} call
*
* Any translated string bundles must conform to the shape as defined by
* this interface
*
* @export
* @interface ILocalizedMessages
*/
export interface ILocalizedMessages {
NONE: string;
ERROR: string;
WARNING: string;
PBMG: string;
INIT_WARNINGS_FOUND: string;
INIT_WARNING_BING_API_KEY_REQD: string;
INIT_WARNING_BING_UNKNOWN_LAYER: string;
INIT_WARNING_UNSUPPORTED_GOOGLE_MAPS: string;
INIT_WARNING_NO_CONTEXT_MENU: string;
LAYER_TRANSPARENCY: string;
LAYER_ID_BASE: string;
LAYER_ID_MG_BASE: string;
LAYER_ID_SUBJECT: string;
LAYER_ID_MG_SEL_OVERLAY: string;
UNKNOWN_WIDGET: string;
UNKNOWN_COMMAND_REFERENCE: string;
INIT: string;
INIT_DESC: string;
INIT_ERROR_TITLE: string;
INIT_ERROR_UNKNOWN_RESOURCE_TYPE: string;
INIT_ERROR_MISSING_RESOURCE_PARAM: string;
INIT_ERROR_UNSUPPORTED_COORD_SYS: string;
INIT_ERROR_UNREGISTERED_EPSG_CODE: string;
INIT_ERROR_EXPIRED_SESSION: string;
INIT_ERROR_RESOURCE_NOT_FOUND: string;
INIT_ERROR_NO_CONNECTION: string;
TPL_SIDEBAR_OPEN_TASKPANE: string;
TPL_SIDEBAR_OPEN_LEGEND: string;
TPL_SIDEBAR_OPEN_SELECTION_PANEL: string;
TPL_TITLE_TASKPANE: string;
TPL_TITLE_LEGEND: string;
TPL_TITLE_SELECTION_PANEL: string;
TT_GO_HOME: string;
TT_GO_BACK: string;
TT_GO_FORWARD: string;
SESSION_EXPIRED: string;
SESSION_EXPIRED_DETAILED: string;
SESSION_EXPIRED_AVAILABLE_ACTIONS: string;
SESSION_EXPIRED_RELOAD_VIEWER: string;
ERR_UNREGISTERED_LAYOUT: string;
ERR_UNREGISTERED_COMPONENT: string;
ERR_NO_COMPONENT_ID: string;
LOADING_MSG: string;
MENU_TASKS: string;
NO_SELECTED_FEATURES: string;
FMT_SCALE_DISPLAY: string;
FMT_SELECTION_COUNT: string;
DIGITIZE_POINT_PROMPT: string;
DIGITIZE_LINE_PROMPT: string;
DIGITIZE_LINESTRING_PROMPT: string;
DIGITIZE_CIRCLE_PROMPT: string;
DIGITIZE_RECT_PROMPT: string;
DIGITIZE_POLYGON_PROMPT: string;
MEASURE: string;
MEASURE_SEGMENT: string;
MEASURE_LENGTH: string;
MEASURE_SEGMENT_PART: string;
MEASURE_TOTAL_AREA: string;
MEASURE_TOTAL_LENGTH: string;
MEASURING: string;
MEASURING_MESSAGE: string;
MEASUREMENT: string;
MEASUREMENT_TYPE: string;
MEASUREMENT_TYPE_LENGTH: string;
MEASUREMENT_TYPE_AREA: string;
MEASUREMENT_CLEAR: string;
MEASUREMENT_CONTINUE_POLYGON: string;
MEASUREMENT_CONTINUE_LINE: string;
MEASUREMENT_START_DRAWING: string;
MEASUREMENT_START: string;
MEASUREMENT_END: string;
NAVIGATOR_PAN_EAST: string;
NAVIGATOR_PAN_WEST: string;
NAVIGATOR_PAN_SOUTH: string;
NAVIGATOR_PAN_NORTH: string;
NAVIGATOR_ZOOM_OUT: string;
NAVIGATOR_ZOOM_IN: string;
FMT_NAVIGATOR_ZOOM_TO_SCALE: string;
EXTERNAL_BASE_LAYERS: string;
SELECTION_PROPERTY: string;
SELECTION_VALUE: string;
SELECTION_PREV_FEATURE: string;
SELECTION_NEXT_FEATURE: string;
SELECTION_ZOOMTO_FEATURE: string;
VIEWER_OPTIONS: string;
ABOUT: string;
HELP: string;
QUICKPLOT_HEADER: string;
QUICKPLOT_TITLE: string;
QUICKPLOT_SUBTITLE: string;
QUICKPLOT_PAPER_SIZE: string;
QUICKPLOT_ORIENTATION: string;
QUICKPLOT_ORIENTATION_P: string;
QUICKPLOT_ORIENTATION_L: string;
QUICKPLOT_SHOWELEMENTS: string;
QUICKPLOT_SHOWLEGEND: string;
QUICKPLOT_SHOWNORTHARROW: string;
QUICKPLOT_SHOWCOORDINTES: string;
QUICKPLOT_SHOWSCALEBAR: string;
QUICKPLOT_SHOWDISCLAIMER: string;
QUICKPLOT_ADVANCED_OPTIONS: string;
QUICKPLOT_SCALING: string;
QUICKPLOT_DPI: string;
QUICKPLOT_BOX_INFO: string;
QUICKPLOT_BOX_ROTATION: string;
QUICKPLOT_GENERATE: string;
QUICKPLOT_COMMERCIAL_LAYER_WARNING: string;
FEATURE_TOOLTIPS: string;
MANUAL_FEATURE_TOOLTIPS: string;
GEOLOCATION_SUCCESS: string;
GEOLOCATION_WARN_OUTSIDE_MAP: string;
GEOLOCATION_ERROR: string;
TASK_PANE_RESIZING: string;
TASK_PANE_LOADING: string;
TASK_PANE_LOADING_DESC: string;
COORDTRACKER: string;
COORDTRACKER_NO_PROJECTIONS: string;
MAP_SIZE_DISPLAY_UNITS: string;
OK: string;
ADD_MANAGE_LAYERS: string;
ADD_LAYER: string;
ADD_LAYER_TILED: string;
MANAGE_LAYERS: string;
LAYER_TYPE: string;
SELECT_LAYER_TYPE: string;
ADD_WMS_LAYER_URL: string;
ADD_WMS_LAYER_LOADING: string;
ADD_WMS_LAYER_LOADING_DESC: string;
ADD_WMS_LAYER_NO_LAYERS: string;
WMS_VERSION: string;
ADD_WFS_LAYER_URL: string;
ADD_WFS_LAYER_LOADING: string;
ADD_WFS_LAYER_LOADING_DESC: string;
ADD_WFS_LAYER_NO_LAYERS: string;
WFS_NO_LAYER_DESCRIPTION: string;
WFS_VERSION: string;
OWS_SERVICE_NAME: string;
OWS_SERVICE_TITLE: string;
OWS_SERVICE_ABSTRACT: string;
WMS_LAYERS: string;
WFS_LAYERS: string;
OWS_LAYER_NAME: string;
OWS_LAYER_TITLE: string;
OWS_LAYER_ABSTRACT: string;
OWS_LAYER_CRS: string;
OWS_ADD_LAYER_PROMPT: string;
ADDED_LAYER: string;
REMOVED_LAYER: string;
ADD_WFS_LAYER: string;
SHARE_LINK_COPY_CLIPBOARD: string;
SHARE_LINK_COPIED: string;
WMS_NO_LAYER_DESCRIPTION: string;
UNIT_UNKNOWN: string;
UNIT_INCHES: string;
UNIT_FEET: string;
UNIT_YARDS: string;
UNIT_MILES: string;
UNIT_NAUT_MILES: string;
UNIT_MILLIMETERS: string;
UNIT_CENTIMETERS: string;
UNIT_METERS: string;
UNIT_KILOMETERS: string;
UNIT_DEGREES: string;
UNIT_DEC_DEGREES: string;
UNIT_DMS: string;
UNIT_PIXELS: string;
UNIT_ABBR_UNKNOWN: string;
UNIT_ABBR_INCHES: string;
UNIT_ABBR_FEET: string;
UNIT_ABBR_YARDS: string;
UNIT_ABBR_MILES: string;
UNIT_ABBR_NAUT_MILES: string;
UNIT_ABBR_MILLIMETERS: string;
UNIT_ABBR_CENTIMETERS: string;
UNIT_ABBR_METERS: string;
UNIT_ABBR_KILOMETERS: string;
UNIT_ABBR_DEGREES: string;
UNIT_ABBR_DEC_DEGREES: string;
UNIT_ABBR_DMS: string;
UNIT_ABBR_PIXELS: string;
UNIT_FMT_M: string;
UNIT_FMT_KM: string;
UNIT_FMT_SQM: string;
UNIT_FMT_SQKM: string;
OL_ATTRIBUTION_TIP: string;
OL_OVERVIEWMAP_TIP: string;
OL_RESET_ROTATION_TIP: string;
FEATURE_TOOLTIP_URL_HELP_TEXT: string;
SHARE_LINK_INCLUDE_SESSION: string;
WINDOW_RESIZING: string;
WINDOW_MOVING: string;
OTHER_THEME_RULE_COUNT: string;
LEGEND_FILTER_LAYERS: string;
ADD_LAYER_KIND_PROMPT: string;
LAYER_KIND_FILE: string;
LAYER_KIND_URL: string;
ADD_FILE: string;
ADD_FILE_INSTRUCTIONS: string;
FMT_UPLOADED_FILE: string;
ADD_LOCAL_FILE_LAYER_FAILURE_NOT_TEXT: string;
ADD_LOCAL_FILE_LAYER_FAILURE: string;
WMS_UNSUPPORTED_VERSION: string;
NO_EXTERNAL_LAYERS: string;
NO_EXTERNAL_LAYERS_DESC: string;
LAYER_OPACITY: string;
LAYER_HEATMAP_BLUR: string;
LAYER_HEATMAP_RADIUS: string;
LAYER_NAME_EXISTS: string;
LAYER_MANAGER_TT_MOVE_UP: string;
LAYER_MANAGER_TT_MOVE_DOWN: string;
LAYER_MANAGER_TT_ZOOM_EXTENTS: string;
LAYER_MANAGER_TT_REMOVE: string;
LAYER_MANAGER_TT_EDIT_STYLE: string;
LAYER_MANAGER_TT_MORE_OPTIONS: string;
CANCEL: string;
UNKNOWN_FILE_TYPE: string;
WMS_SERVICE_INFO: string;
WFS_SERVICE_INFO: string;
WMS_AVAILABLE_LAYERS: string;
WFS_AVAILABLE_LAYERS: string;
ADD_LAYER_WITH_WMS_STYLE: string;
ADD_LAYER_WITH_WMS_STYLE_TILED: string;
WMS_LEGEND: string;
VSED_NO_STYLES_TITLE: string;
VSED_NO_STYLES_DESC: string;
VECTOR_LAYER_STYLE: string;
VSED_PT_FILL_COLOR: string;
VSED_PT_FILL_COLOR_ALPHA: string;
VSED_PT_RADIUS: string;
VSED_PT_OUTLINE_COLOR: string;
VSED_PT_OUTLINE_COLOR_ALPHA: string;
VSED_PT_OUTLINE_WIDTH: string;
VSED_LN_OUTLINE_COLOR: string;
VSED_LN_OUTLINE_COLOR_ALPHA: string;
VSED_LN_OUTLINE_THICKNESS: string;
VSED_PL_FILL_COLOR: string;
VSED_PL_FILL_COLOR_ALPHA: string;
VSED_PL_OUTLINE_COLOR: string;
VSED_PL_OUTLINE_COLOR_ALPHA: string;
VSED_PL_OUTLINE_THICKNESS: string;
VSED_TAB_POINT: string;
VSED_TAB_LINE: string;
VSED_TAB_POLY: string;
VSED_PT_TYPE: string;
VSED_PT_TYPE_CIRCLE: string;
VSED_PT_TYPE_ICON: string;
VSED_PT_ICON_ANCHOR: string;
VSED_PT_ICON_ANCHOR_H: string;
VSED_PT_ICON_ANCHOR_V: string;
VSED_PT_ICON_SRC: string;
VSED_PT_ICON_ROTATE_WITH_VIEW: string;
VSED_PT_ICON_ROTATION: string;
//VSED_PT_ICON_OPACITY: string;
VSED_PT_ICON_SCALE: string;
ADD_LAYER_PROJECTION: string;
ADDING_LAYER_ERROR: string;
LOADING_LAYER: string;
ACTION_CLOSE: string;
MORE_LAYER_OPTIONS: string;
CHOOSE_FILE: string;
BROWSE: string;
SEL_FEATURE_PROPERTIES: string;
SEL_FEATURE_PROPERTIES_NONE: string;
SEL_CLUSTER_PROPERTIES: string;
ADD_LOCAL_FILE_LAYER_FAILURE_NO_FORMATS: string;
ENABLE_CLUSTERING: string;
POINT_CLUSTER_DISTANCE: string;
EXPR_NOT_SET: string;
ENABLE_LABELS: string;
LABEL_TEXT: string;
LABEL_SIZE: string;
LABEL_COLOR: string;
LABEL_OUTLINE_COLOR: string;
LABEL_OUTLINE_THICKNESS: string;
LABEL_BOLD: string;
LABEL_ITALIC: string;
LABEL_LINE_PLACEMENT: string;
ADD_FILE_PROCESSING: string;
GENERATE_THEMABLE_LAYER: string;
THEME_ON_PROPERTY: string;
COLORBREWER_THEME: string;
LABEL_USING_PROPERTY: string;
CLUSTER_CLICK_ACTION: string;
CLUSTER_CLICK_ACTION_SHOW_POPUP: string;
CLUSTER_CLICK_ACTION_ZOOM_EXTENTS: string;
EXTERNAL_LAYERS: string;
CREATE_VECTOR_LAYER: string;
CREATE_VECTOR_THEMED: string;
CREATE_VECTOR_CLUSTERED: string;
CREATE_VECTOR_HEATMAP: string;
ENABLE_SELECT_DRAGPAN: string;
INIT_WARNING_ARBITRARY_COORDSYS_INCOMPATIBLE_LAYER: string;
INIT_WARNING_ARBITRARY_COORDSYS_UNSUPPORTED_WIDGET: string;
} | the_stack |
import * as Long from 'long';
import {
BigDecimal,
LocalDate,
LocalDateTime,
LocalTime,
OffsetDateTime
} from '../core';
/**
* Portable field type.
*/
export enum FieldType {
PORTABLE = 0,
BYTE = 1,
BOOLEAN = 2,
CHAR = 3,
SHORT = 4,
INT = 5,
LONG = 6,
FLOAT = 7,
DOUBLE = 8,
UTF = 9, // Defined for backwards compatibility.
STRING = 9,
PORTABLE_ARRAY = 10,
BYTE_ARRAY = 11,
BOOLEAN_ARRAY = 12,
CHAR_ARRAY = 13,
SHORT_ARRAY = 14,
INT_ARRAY = 15,
LONG_ARRAY = 16,
FLOAT_ARRAY = 17,
DOUBLE_ARRAY = 18,
UTF_ARRAY = 19, // Defined for backwards compatibility.
STRING_ARRAY = 19,
DECIMAL = 20,
DECIMAL_ARRAY = 21,
TIME = 22,
TIME_ARRAY = 23,
DATE = 24,
DATE_ARRAY = 25,
TIMESTAMP = 26,
TIMESTAMP_ARRAY = 27,
TIMESTAMP_WITH_TIMEZONE,
TIMESTAMP_WITH_TIMEZONE_ARRAY = 29
}
/**
* Writer helper for {@link Portable} objects.
*/
export interface PortableWriter {
/**
* Writes a number as 32-bit signed integer.
*
* @param fieldName name of the field
* @param value int value to be written. The value must be a valid signed 32-bit integer.
* Behavior is undefined when value is anything other than a signed 32-bit integer.
*/
writeInt(fieldName: string, value: number): void;
/**
* Writes a long.
*
* @param fieldName name of the field
* @param value long value to be written
*/
writeLong(fieldName: string, value: Long): void;
/**
* Writes a string as UTF-8 encoded bytes.
*
* @param fieldName name of the field
* @param value UTF string value to be written
* @deprecated since version 4.2 for the sake of better naming. Please use {@link writeString} instead.
*/
writeUTF(fieldName: string, value: string | null): void;
/**
* Writes a string as UTF-8 encoded bytes.
*
* @param fieldName name of the field
* @param value UTF string value to be written
*/
writeString(fieldName: string, value: string | null): void;
/**
* Writes a primitive boolean.
*
* @param fieldName name of the field
* @param value boolean value to be written
*/
writeBoolean(fieldName: string, value: boolean): void;
/**
* Writes a number as 8-bit unsigned integer.
*
* @param fieldName name of the field
* @param value byte value to be written. Must be a valid unsigned 8-bit integer.
* Behaviour is undefined when value is anything other than a unsigned 8-bit integer.
*/
writeByte(fieldName: string, value: number): void;
/**
* Writes a single character string using `char.charCodeAt(0)`.
*
* A two-byte unsigned integer representing the UTF-16
* code unit value of the single character string will be written.
*
* @param fieldName name of the field
* @param value char value to be written
*/
writeChar(fieldName: string, value: string): void;
/**
* Writes a number as double.
*
* @param fieldName name of the field
* @param value double value to be written. The value must be a JavaScript number.
* Behavior is undefined when value is anything other than a JavaScript number.
*/
writeDouble(fieldName: string, value: number): void;
/**
* Writes a number as float.
*
* @param fieldName name of the field
* @param value float value to be written. The value must be a JavaScript number.
* Behavior is undefined when value is anything other than a JavaScript number.
*/
writeFloat(fieldName: string, value: number): void;
/**
* Writes a number as 16-bit signed integer.
*
* @param fieldName name of the field
* @param value short value to be written. The value must be a valid signed 16-bit integer.
* Behavior is undefined when value is anything other than an signed 16-bit integer.
*/
writeShort(fieldName: string, value: number): void;
/**
* Writes a Portable.
* Use {@link writeNullPortable} to write a `null` Portable
*
* @param fieldName name of the field
* @param Portable Portable to be written
*/
writePortable(fieldName: string, Portable: Portable | null): void;
/**
* To write a `null` Portable value, you need to provide class and factory IDs of the related class.
*
* @param fieldName name of the field
* @param factoryId factory ID of related Portable class
* @param classId class ID of related Portable class
*/
writeNullPortable(fieldName: string, factoryId: number, classId: number): void;
/**
* Writes a decimal.
*
* @param fieldName name of the field
* @param value BigDecimal value to be written
*/
writeDecimal(fieldName: string, value: BigDecimal | null): void;
/**
* Write a time.
*
* @param fieldName name of the field
* @param value LocalTime value to be written
*/
writeTime(fieldName: string, value: LocalTime | null): void
/**
* Writes a date.
*
* @param fieldName name of the field
* @param value LocalDate value to be written
*/
writeDate(fieldName: string, value: LocalDate | null): void;
/**
* Writes a timestamp.
*
* @param fieldName name of the field
* @param value LocalDateTime value to be written
*/
writeTimestamp(fieldName: string, value: LocalDateTime | null): void;
/**
* Writes a timestamp with timezone.
*
* @param fieldName name of the field
* @param value OffsetDateTime value to be written
*/
writeTimestampWithTimezone(fieldName: string, value: OffsetDateTime | null): void;
/**
* Writes a Buffer as byte array.
*
* @param fieldName name of the field
* @param bytes Buffer to be written
*/
writeByteArray(fieldName: string, bytes: Buffer | null): void;
/**
* Writes an array of primitive booleans.
*
* @param fieldName name of the field
* @param booleans boolean array to be written
*/
writeBooleanArray(fieldName: string, booleans: boolean[] | null): void;
/**
* Writes an array of single character strings using `char.charCodeAt(0)`.
*
* For each single character string, a two-byte unsigned integer
* representing the UTF-16 code unit value will be written.
*
* @param fieldName name of the field
* @param chars char array to be written
*/
writeCharArray(fieldName: string, chars: string[] | null): void;
/**
* Writes an array of numbers as 32-bit signed integer array.
*
* @param fieldName name of the field
* @param ints int array to be written. Each value must be a valid signed 32-bit integer.
* Behavior is undefined when any value is anything other than a signed 32-bit integer.
*/
writeIntArray(fieldName: string, ints: number[] | null): void;
/**
* Writes an array of longs.
*
* @param fieldName name of the field
* @param longs long array to be written
*/
writeLongArray(fieldName: string, longs: Long[] | null): void;
/**
* Writes an array of numbers as doubles.
*
* @param fieldName name of the field
* @param values double array to be written. Each value must be a JavaScript number.
* Behavior is undefined when any value is anything other than a JavaScript number.
*/
writeDoubleArray(fieldName: string, values: number[] | null): void;
/**
* Writes an array of numbers as floats.
*
* @param fieldName name of the field
* @param values float array to be written. Each value must be a JavaScript number.
* Behavior is undefined when any value is anything other than a JavaScript number.
*/
writeFloatArray(fieldName: string, values: number[] | null): void;
/**
* Writes an array of numbers as 16-bit signed integers.
*
* @param fieldName name of the field
* @param values short array to be written. Each value must be a valid signed 16-bit integer.
* Behavior is undefined when any value is anything other than a signed 16-bit integer.
*/
writeShortArray(fieldName: string, values: number[] | null): void;
/**
* Writes an array of strings. Each string is written as UTF-8 encoded bytes.
*
* @param fieldName name of the field
* @param values string array to be written
* @deprecated since version 4.2. for the sake of better naming. Please use {@link writeStringArray} instead.
*/
writeUTFArray(fieldName: string, values: string[] | null): void;
/**
* Writes an array of strings. Each string is written as UTF-8 encoded bytes.
*
* @param fieldName name of the field
* @param values string array to be written
*/
writeStringArray(fieldName: string, values: string[] | null): void;
/**
* Writes an array Portables.
*
* @param fieldName name of the field
* @param values Portable array to be written
*/
writePortableArray(fieldName: string, values: Portable[] | null): void;
/**
* Writes an array of `BigDecimal`s.
*
* @param fieldName name of the field
* @param values BigDecimal array to be written
* @see {@link writeDecimal}
*/
writeDecimalArray(fieldName: string, values: BigDecimal[] | null): void;
/**
* Writes an array of `LocalTime`s.
*
* @param fieldName name of the field
* @param values LocalTime array to be written
* @see {@link writeTime}
*/
writeTimeArray(fieldName: string, values: LocalTime[] | null): void;
/**
* Writes an array of `LocalDate`s.
*
* @param fieldName name of the field
* @param values LocalDate array to be written
* @see {@link writeDate}
*/
writeDateArray(fieldName: string, values: LocalDate[] | null): void;
/**
* Writes an array of `LocalDateTime`s.
*
* @param fieldName name of the field
* @param values LocalDateTime array to be written
* @see {@link writeTimestamp}
*/
writeTimestampArray(fieldName: string, values: LocalDateTime[] | null): void;
/**
* Writes an array of `OffsetDateTime`s.
*
* @param fieldName name of the field
* @param values OffsetDateTime array to be written
* @see {@link writeTimestampWithTimezone}
*/
writeTimestampWithTimezoneArray(fieldName: string, values: OffsetDateTime[] | null): void;
}
/**
* Reader helper for {@link Portable} objects.
*/
export interface PortableReader {
/**
* @return global version of Portable classes
*/
getVersion(): number;
/**
* @param fieldName name of the field (does not support nested paths)
* @return true if field exist in this class.
*/
hasField(fieldName: string): boolean;
/**
* @return set of field names on this Portable class
*/
getFieldNames(): string[];
/**
* @param fieldName name of the field
* @return field type of given fieldName
* @throws RangeError if the field does not exist.
*/
getFieldType(fieldName: string): FieldType;
/**
* Reads a 32-bit signed integer.
*
* @param fieldName name of the field
* @return the int value read
*/
readInt(fieldName: string): number;
/**
* Reads a long.
*
* @param fieldName name of the field
* @return the long value read
*/
readLong(fieldName: string): Long;
/**
* Reads a string from UTF-8 encoded bytes.
*
* @param fieldName name of the field
* @return the UTF string value read
* @deprecated since version 4.2 for the sake of better naming. Please use {@link readString} instead.
*/
readUTF(fieldName: string): string | null;
/**
* Reads a string from UTF-8 encoded bytes.
*
* @param fieldName name of the field
* @return the string value read
*/
readString(fieldName: string): string | null;
/**
* Reads a primitive boolean.
*
* @param fieldName name of the field
* @return the boolean value read
*/
readBoolean(fieldName: string): boolean;
/**
* Reads a 8-bit unsigned integer.
*
* @param fieldName name of the field
* @return the byte value read
*/
readByte(fieldName: string): number;
/**
* Reads a single character string using `String.fromCharCode` from two bytes of UTF-16 code units.
*
* @param fieldName name of the field
* @return the char value read
*/
readChar(fieldName: string): string;
/**
* Reads a double.
*
* @param fieldName name of the field
* @return the double value read
*/
readDouble(fieldName: string): number;
/**
* Reads a float.
*
* @param fieldName name of the field
* @return the float value read
*/
readFloat(fieldName: string): number;
/**
* Reads a 16-bit signed integer.
*
* @param fieldName name of the field
* @return the short value read
*/
readShort(fieldName: string): number;
/**
* Reads a Portable.
*
* @param fieldName name of the field
* @return the Portable value read
*/
readPortable(fieldName: string): Portable | null;
/**
* Reads a decimal.
*
* @param fieldName name of the field
* @return the BigDecimal value read
*/
readDecimal(fieldName: string): BigDecimal | null;
/**
* Reads a time.
*
* @param fieldName name of the field
* @return the LocalTime value read
*/
readTime(fieldName: string): LocalTime | null;
/**
* Reads a date.
*
* @param fieldName name of the field
* @return the LocalDate value read
*/
readDate(fieldName: string): LocalDate | null;
/**
* Reads a timestamp.
*
* @param fieldName name of the field
* @return the LocalDateTime value read
*/
readTimestamp(fieldName: string): LocalDateTime | null;
/**
* Reads a timestamp with timezone.
*
* @param fieldName name of the field
* @return the OffsetDateTime value read
*/
readTimestampWithTimezone(fieldName: string): OffsetDateTime | null;
/**
* Reads an array of bytes.
*
* @param fieldName name of the field
* @return the byte array value read
*/
readByteArray(fieldName: string): Buffer | null;
/**
* Reads an array of primitive booleans.
*
* @param fieldName name of the field
* @return the boolean array value read
*/
readBooleanArray(fieldName: string): boolean[] | null;
/**
* Reads an array of single character strings.
* Each of them are read using `String.fromCharCode` from a two bytes UTF-16 code units.
*
* @param fieldName name of the field
* @return the char array value read
*/
readCharArray(fieldName: string): string[] | null;
/**
* Reads an array of 32-bit signed integers.
*
* @param fieldName name of the field
* @return the int array value read
*/
readIntArray(fieldName: string): number[] | null;
/**
* Reads an array of longs.
*
* @param fieldName name of the field
* @return the long array value read
*/
readLongArray(fieldName: string): Long[] | null;
/**
* Reads an array of doubles.
*
* @param fieldName name of the field
* @return the double array value read
*/
readDoubleArray(fieldName: string): number[] | null;
/**
* Reads an array of floats.
*
* @param fieldName name of the field
* @return the float array value read
*/
readFloatArray(fieldName: string): number[] | null;
/**
* Reads an array of 16-bit signed integers.
*
* @param fieldName name of the field
* @return the short array value read
*/
readShortArray(fieldName: string): number[] | null;
/**
* Reads an array strings. Strings are read using UTF-8 encoding.
*
* @param fieldName name of the field
* @return the string array value read
* @deprecated since version 4.2 for the sake of better naming. Please use {@link readStringArray} instead
*/
readUTFArray(fieldName: string): string[] | null;
/**
* Reads an array of strings. Strings are read using UTF-8 encoding.
*
* @param fieldName name of the field
* @return the string array value read
*/
readStringArray(fieldName: string): string[] | null;
/**
* Reads an array of Portables.
*
* @param fieldName name of the field
* @return the Portable array read
*/
readPortableArray(fieldName: string): Portable[] | null;
/**
* Reads an array of `BigDecimal`s.
*
* @param fieldName name of the field
* @return the BigDecimal array read
* @see {@link readDecimal}
*/
readDecimalArray(fieldName: string): BigDecimal[] | null;
/**
* Reads an array of `LocalTime`s.
*
* @param fieldName name of the field
* @return the LocalTime array read
* @see {@link readTime}
*/
readTimeArray(fieldName: string): LocalTime[] | null;
/**
* Reads an array of `LocalDate`s.
*
* @param fieldName name of the field
* @return the LocalDate array read
* @see {@link readDate}
*/
readDateArray(fieldName: string): LocalDate[] | null;
/**
* Reads an array of `LocalDateTime`s.
*
* @param fieldName name of the field
* @return the LocalDateTime array read
* @see {@link readTimestamp}
*/
readTimestampArray(fieldName: string): LocalDateTime[] | null;
/**
* Reads an array of `OffsetDateTime`s.
*
* @param fieldName name of the field
* @return the OffsetDateTime array read
* @see {@link readTimestampWithTimezone}
*/
readTimestampWithTimezoneArray(fieldName: string): OffsetDateTime[] | null;
}
/**
* Interface for objects with Portable serialization support.
*/
export interface Portable {
/**
* Factory id of the Portable object.
*/
factoryId: number;
/**
* Class id of the Portable object.
*/
classId: number;
/**
* Reads fields of the Portable object from the binary representation.
*
* @param reader read helper
*/
readPortable(reader: PortableReader): void;
/**
* Writes fields of the Portable object into the binary representation.
*
* @param writer write helper
*/
writePortable(writer: PortableWriter): void;
}
/**
* Interface for Portable serialization with multiversion support.
*/
export interface VersionedPortable extends Portable {
/**
* Version of the Portable object.
*/
version: number;
}
/**
* Factory function for {@link Portable}. Should return
* an instance of the right {@link Portable} object, given
* the matching `classId`.
*
* @param classId class id
* @returns object for further initialization
*/
export type PortableFactory = (classId: number) => Portable; | the_stack |
import { Injectable } from '@angular/core';
export type DisableEnableDate =
| string
| Date
| { from: Date | string; to: Date | string }
| ((date: Date) => boolean);
// tslint:disable no-inferrable-types
export interface FlatpickrDefaultsInterface {
/**
* Exactly the same as date format, but for the altInput field.
*/
altFormat?: string;
/**
* Show the user a readable date (as per altFormat), but return something totally different to the server.
*/
altInput?: boolean;
/**
* This class will be added to the input element created by the altInput option.
* Note that `altInput` already inherits classes from the original input.
*/
altInputClass?: string;
/**
* Allows the user to enter a date directly input the input field. By default, direct entry is disabled.
*/
allowInput?: boolean;
/**
* Instead of `body`, appends the calendar to the specified node instead.
*/
appendTo?: HTMLElement;
/**
* Defines how the date will be formatted in the aria-label for calendar days, using the same tokens as dateFormat. If you change this, you should choose a value that will make sense if a screen reader reads it out loud.
*/
ariaDateFormat?: string;
/**
* Whether clicking on the input should open the picker.
* You could disable this if you wish to open the calendar manually `with.open()`.
*/
clickOpens?: boolean;
/**
* A string of characters which are used to define how the date will be displayed in the input box.
* The supported characters are defined in the table below.
*/
dateFormat?: string;
/**
* Initial value of the hour element.
*/
defaultHour?: number;
/**
* Initial value of the minute element.
*/
defaultMinute?: number;
/**
* Initial value of the seconds element.
*/
defaultSeconds?: number;
/**
* See <a href="https://chmln.github.io/flatpickr/examples/#disabling-specific-dates">disabling dates</a>.
*/
disable?: DisableEnableDate[];
/**
* Set disableMobile to true to always use the non-native picker.
* By default, Flatpickr utilizes native datetime widgets unless certain options (e.g. disable) are used.
*/
disableMobile?: boolean;
/**
* See <a href="https://chmln.github.io/flatpickr/examples/#disabling-all-dates-except-select-few">enabling dates</a>.
*/
enable?: DisableEnableDate[];
/**
* Enables time picker.
*/
enableTime?: boolean;
/**
* Enables seconds in the time picker.
*/
enableSeconds?: boolean;
/**
* Allows using a custom date formatting function instead of the built-in handling for date formats using dateFormat, altFormat, etc.
*/
formatDate?: (value: any) => string;
/**
* Adjusts the step for the hour input (incl. scrolling).
*/
hourIncrement?: number;
/**
* Displays the calendar inline.
*/
inline?: boolean;
/**
* The maximum date that a user can pick to (inclusive).
*/
maxDate?: string | Date;
/**
* The minimum date that a user can start picking from (inclusive).
*/
minDate?: string | Date;
/**
* Adjusts the step for the minute input (incl. scrolling).
*/
minuteIncrement?: number;
/**
* Select a single date, multiple dates or a date range.
*/
mode?: 'single' | 'multiple' | 'range';
/**
* HTML for the arrow icon, used to switch months.
*/
nextArrow?: string;
/**
* Hides the day selection in calendar. Use it along with `enableTime` to create a time picker.
*/
noCalendar?: boolean;
/**
* Provide a date for 'today', which will be used instead of "new Date()"
*/
now?: Date | string | number;
/**
* Function that expects a date string and must return a Date object.
*/
parseDate?: (str: string) => Date;
/**
* HTML for the left arrow icon.
*/
prevArrow?: string;
/**
* Show the month using the shorthand version (ie, Sep instead of September).
*/
shorthandCurrentMonth?: boolean;
/**
* Position the calendar inside the wrapper and next to the input element. (Leave `false` unless you know what you're doing).
*/
static?: boolean;
/**
* Displays time picker in 24 hour mode without AM/PM selection when enabled.
*/
time24hr?: boolean;
/**
* When true, dates will parsed, formatted, and displayed in UTC.
* It's recommended that date strings contain the timezone, but not necessary.
*/
utc?: boolean;
/**
* Enables display of week numbers in calendar.
*/
weekNumbers?: boolean;
/**
* You may override the function that extracts the week numbers from a Date by supplying a getWeek function.
* It takes in a date as a parameter and should return a corresponding string that you want to appear left of every week.
*/
getWeek?: (date: Date) => string;
/**
* Custom elements and input groups.
*/
wrap?: boolean;
/**
* Array of plugin instances to use.
*/
plugins?: any[];
/**
* The locale object or string to use for the locale.
*/
locale?: object | string;
/**
* Auto convert the ngModel value from a string to a date / array of dates / from - to date object depending on the `mode`
*/
convertModelValue?: boolean;
/**
* The number of months shown.
*/
showMonths?: number;
/**
* How the month should be displayed in the header of the calendar.
*/
monthSelectorType?: 'static' | 'dropdown';
}
@Injectable()
export class FlatpickrDefaults implements FlatpickrDefaultsInterface {
/**
* Exactly the same as date format, but for the altInput field.
*/
altFormat: string = 'F j, Y';
/**
* Show the user a readable date (as per altFormat), but return something totally different to the server.
*/
altInput: boolean = false;
/**
* This class will be added to the input element created by the altInput option.
* Note that `altInput` already inherits classes from the original input.
*/
altInputClass: string = '';
/**
* Allows the user to enter a date directly input the input field. By default, direct entry is disabled.
*/
allowInput: boolean = false;
/**
* Instead of `body`, appends the calendar to the specified node instead.
*/
appendTo: HTMLElement = undefined;
/**
* Defines how the date will be formatted in the aria-label for calendar days, using the same tokens as dateFormat. If you change this, you should choose a value that will make sense if a screen reader reads it out loud.
*/
ariaDateFormat?: string = 'F j, Y';
/**
* Whether clicking on the input should open the picker.
* You could disable this if you wish to open the calendar manually `with.open()`.
*/
clickOpens: boolean = true;
/**
* A string of characters which are used to define how the date will be displayed in the input box.
* The supported characters are defined in the table below.
*/
dateFormat: string = 'Y-m-d';
/**
* Initial value of the hour element.
*/
defaultHour?: number = 12;
/**
* Initial value of the minute element.
*/
defaultMinute?: number = 0;
/**
* Initial value of the seconds element.
*/
defaultSeconds?: number = 0;
/**
* See <a href="https://chmln.github.io/flatpickr/examples/#disabling-specific-dates">disabling dates</a>.
*/
disable: DisableEnableDate[] = [];
/**
* Set disableMobile to true to always use the non-native picker.
* By default, Flatpickr utilizes native datetime widgets unless certain options (e.g. disable) are used.
*/
disableMobile: boolean = false;
/**
* See <a href="https://chmln.github.io/flatpickr/examples/#disabling-all-dates-except-select-few">enabling dates</a>.
*/
enable: DisableEnableDate[];
/**
* Enables time picker.
*/
enableTime: boolean = false;
/**
* Enables seconds in the time picker.
*/
enableSeconds: boolean = false;
/**
* Allows using a custom date formatting function instead of the built-in handling for date formats using dateFormat, altFormat, etc.
*/
formatDate?: (value: any) => string = undefined;
/**
* Adjusts the step for the hour input (incl. scrolling).
*/
hourIncrement: number = 1;
/**
* Displays the calendar inline.
*/
inline: boolean = false;
/**
* The maximum date that a user can pick to (inclusive).
*/
maxDate: string | Date = undefined;
/**
* The minimum date that a user can start picking from (inclusive).
*/
minDate: string | Date = undefined;
/**
* Adjusts the step for the minute input (incl. scrolling).
*/
minuteIncrement: number = 5;
/**
* Select a single date, multiple dates or a date range.
*/
mode: 'single' | 'multiple' | 'range' = 'single';
/**
* HTML for the arrow icon, used to switch months.
*/
nextArrow: string = '>';
/**
* Hides the day selection in calendar. Use it along with `enableTime` to create a time picker.
*/
noCalendar: boolean = false;
/**
* Default now to the current date
*/
now: Date | string | number = new Date();
/**
* Function that expects a date string and must return a Date object.
*/
parseDate: (str: string) => Date;
/**
* HTML for the left arrow icon.
*/
prevArrow: string = '<';
/**
* Show the month using the shorthand version (ie, Sep instead of September).
*/
shorthandCurrentMonth: boolean = false;
/**
* Position the calendar inside the wrapper and next to the input element. (Leave `false` unless you know what you're doing).
*/
static: boolean = false;
/**
* Displays time picker in 24 hour mode without AM/PM selection when enabled.
*/
time24hr: boolean = false;
/**
* When true, dates will parsed, formatted, and displayed in UTC.
* It's recommended that date strings contain the timezone, but not necessary.
*/
utc: boolean = false;
/**
* Enables display of week numbers in calendar.
*/
weekNumbers: boolean = false;
/**
* You may override the function that extracts the week numbers from a Date by supplying a getWeek function.
* It takes in a date as a parameter and should return a corresponding string that you want to appear left of every week.
*/
getWeek: (date: Date) => string;
/**
* Custom elements and input groups.
*/
wrap: boolean = false;
/**
* Array of plugin instances to use.
*/
plugins: any[] = [];
/**
* The locale object or string to use for the locale.
*/
locale: object | string = 'default';
/**
* Auto convert the ngModel value from a string to a date / array of dates / from - to date object depending on the `mode`
*/
convertModelValue: boolean = false;
/**
* The number of months shown.
*/
showMonths: number = 1;
/**
* How the month should be displayed in the header of the calendar.
*/
monthSelectorType: 'static' | 'dropdown' = 'static';
} | the_stack |
import React from 'react';
import {assert} from 'chai';
import td from 'testdouble';
import {shallow, mount} from 'enzyme';
import Dialog, {
ChildTypes,
DialogTitle,
DialogContent,
DialogFooter,
DialogButton,
} from '../../../packages/dialog';
import {isScrollable, areTopsMisaligned} from '@material/dialog/util';
import {cssClasses, LAYOUT_EVENTS} from '../../../packages/dialog/constants';
import {coerceForTesting} from '../helpers/types';
import {FocusTrap} from 'focus-trap';
const DialogStub = (
<Dialog>
<DialogContent>
<p>meowkay</p>
</DialogContent>
<DialogFooter>
<DialogButton action='dismiss'>Dismiss</DialogButton>
<DialogButton action='accept'>Accept</DialogButton>
</DialogFooter>
</Dialog>
);
suite('Dialog');
const getAdapter = (instance: Dialog) => {
// @ts-ignore adapter_ property is protection, we need to override it for testing purposes
return instance.foundation.adapter_;
};
test('renders a dialog with default tag', () => {
const wrapper = shallow<Dialog>(<Dialog />);
assert.equal(wrapper.type(), 'div');
});
test('renders a dialog with custom tag', () => {
const wrapper = shallow<Dialog>(<Dialog tag='dialog' />);
assert.equal(wrapper.type(), 'dialog');
});
test('creates foundation', () => {
const wrapper = shallow<Dialog>(<Dialog />);
assert.exists(wrapper.instance().foundation);
});
test('#componentWillUnmount destroys foundation', () => {
const wrapper = shallow<Dialog>(<Dialog />);
const foundation = wrapper.instance().foundation;
foundation.destroy = td.func<() => void>();
wrapper.unmount();
td.verify(foundation.destroy());
});
test('renders a dialog with foundation.autoStackButtons set to true', () => {
const wrapper = shallow<Dialog>(<Dialog />);
assert.isTrue(wrapper.instance().foundation.getAutoStackButtons());
});
test('#componentDidMount sets #foundaiton.autoStackButtons to false if prop false', () => {
const wrapper = shallow<Dialog>(<Dialog autoStackButtons={false} />);
assert.isFalse(wrapper.instance().foundation.getAutoStackButtons());
});
test('renders a dialog with foundation.setEscapeKeyAction set to foundation default', () => {
const wrapper = shallow<Dialog>(<Dialog />);
assert.strictEqual(
wrapper.instance().foundation.getEscapeKeyAction(),
'close'
);
});
test('#componentDidMount calls #foundaiton.setEscapeKeyAction if prop present', () => {
const escapeKeyAction: string = 'meow';
const wrapper = shallow<Dialog>(<Dialog escapeKeyAction={escapeKeyAction} />);
assert.strictEqual(
wrapper.instance().foundation.getEscapeKeyAction(),
escapeKeyAction
);
});
test('renders a dialog with foundation.setScrimClickAction set to foundation default', () => {
const wrapper = shallow<Dialog>(<Dialog />);
assert.strictEqual(
wrapper.instance().foundation.getScrimClickAction(),
'close'
);
});
test('#componentDidMount calls #foundaiton.setScrimClickAction if prop present', () => {
const scrimClickAction: string = 'meow';
const wrapper = shallow<Dialog>(
<Dialog scrimClickAction={scrimClickAction} />
);
assert.strictEqual(
wrapper.instance().foundation.getScrimClickAction(),
scrimClickAction
);
});
test('when props.open updates to true, #foundation.open is called ', () => {
const wrapper = shallow<Dialog>(<Dialog />);
wrapper.instance().foundation.open = td.func<() => void>();
wrapper.setProps({open: true});
td.verify(wrapper.instance().foundation.open(), {times: 1});
});
test('when props.open updates to false from true, #foundation.close is called ', () => {
const wrapper = shallow<Dialog>(<Dialog open />);
wrapper.instance().foundation.close = td.func<(action: string) => null>();
wrapper.setProps({open: false});
td.verify(wrapper.instance().foundation.close(), {times: 1});
});
test(
'when props.autoStackButtons updates to true, ' +
' #foundation.setAutoStackButtons is called ',
() => {
const wrapper = shallow<Dialog>(<Dialog autoStackButtons={false} />);
assert.isFalse(wrapper.instance().foundation.getAutoStackButtons());
wrapper.instance().foundation.setAutoStackButtons = td.func<
(autoStack: boolean) => null
>();
wrapper.setProps({autoStackButtons: true});
td.verify(wrapper.instance().foundation.setAutoStackButtons(true), {
times: 1,
});
}
);
test(
'when props.autoStackButtons updates to false, ' +
' #foundation.setAutoStackButtons is called ',
() => {
const wrapper = shallow<Dialog>(<Dialog />);
assert.isTrue(wrapper.instance().foundation.getAutoStackButtons());
wrapper.instance().foundation.setAutoStackButtons = td.func<
(autoStack: boolean) => null
>();
wrapper.setProps({autoStackButtons: false});
td.verify(wrapper.instance().foundation.setAutoStackButtons(false), {
times: 1,
});
}
);
test('when props.escapeKeyAction updates #foundation.setEscapeKeyAction is called', () => {
const wrapper = shallow<Dialog>(<Dialog />);
const escapeKeyAction: string = 'meow';
wrapper.instance().foundation.setEscapeKeyAction = td.func<
(action: string) => null
>();
wrapper.setProps({escapeKeyAction});
td.verify(wrapper.instance().foundation.setEscapeKeyAction(escapeKeyAction), {
times: 1,
});
});
test('when props.scrimClickAction updates #foundation.setScrimClickAction is called', () => {
const wrapper = shallow<Dialog>(<Dialog />);
const scrimClickAction: string = 'meow';
wrapper.instance().foundation.setScrimClickAction = td.func<
(action: string) => null
>();
wrapper.setProps({scrimClickAction});
td.verify(
wrapper.instance().foundation.setScrimClickAction(scrimClickAction),
{times: 1}
);
});
test('component has default @id', () => {
const wrapper = shallow<Dialog>(<Dialog />);
assert.equal('mdc-dialog', wrapper.prop('id'));
});
test('component will set a custom @id', () => {
const customId = 'my-custom-dialog';
const wrapper = shallow<Dialog>(<Dialog id={customId} />);
assert.equal(customId, wrapper.prop('id'));
});
test('component has aria-modal set to true', () => {
const wrapper = shallow<Dialog>(<Dialog />);
assert.isTrue(wrapper.prop('aria-modal'));
});
test('classNames adds classes', () => {
const wrapper = shallow<Dialog>(<Dialog className='test-class-name' />);
assert.isTrue(wrapper.hasClass('test-class-name'));
assert.isTrue(wrapper.hasClass(cssClasses.BASE));
});
test('#adapter.addClass should update state with new className', () => {
const wrapper = mount<Dialog>(<Dialog />);
getAdapter(wrapper.instance()).addClass('test-class');
assert.isTrue(wrapper.state().classList.has('test-class'));
});
test('#adapter.removeClass should update state with new className', () => {
const wrapper = mount<Dialog>(<Dialog />);
wrapper.setState({classList: new Set(['test-class'])});
getAdapter(wrapper.instance()).removeClass('test-class');
assert.isFalse(wrapper.state().classList.has('test-class'));
});
test('#adapter.hasClass returns true if class is contained in classes', () => {
const wrapper = mount<Dialog>(<Dialog />);
wrapper.setState({classList: new Set(['test-class'])});
assert.isTrue(getAdapter(wrapper.instance()).hasClass('test-class'));
});
test('#adapter.addBodyClass adds a class to the body', () => {
const wrapper = shallow<Dialog>(<Dialog />);
getAdapter(wrapper.instance()).addBodyClass('test-class');
const body = document.querySelector('body')!;
assert.isTrue(body.classList.contains('test-class'));
});
test('#adapter.removeBodyClass adds a class to the body', () => {
const wrapper = shallow<Dialog>(<Dialog />);
const body = document.querySelector('body')!;
body.classList.add('test-class');
assert.isTrue(body.classList.contains('test-class'));
wrapper.instance().adapter.removeBodyClass('test-class');
assert.isFalse(body.classList.contains('test-class'));
});
test('#adapter.eventTargetMatchesSelector matches a selector passed as argument', () => {
const wrapper = shallow<Dialog>(<Dialog />);
const target = document.createElement('div');
target.classList.add('test-class');
const eventTargetMatchesSelector = wrapper
.instance()
.adapter.eventTargetMatches(target, '.test-class');
assert.isTrue(eventTargetMatchesSelector);
});
test('#adapter.trapFocus calls focusTrap.activate', () => {
const wrapper = mount<Dialog>(DialogStub);
const activate = td.func();
wrapper.instance().focusTrap = coerceForTesting<FocusTrap>({activate});
wrapper.instance().adapter.trapFocus();
td.verify(activate(), {times: 1});
});
test('#adapter.releaseFocus calls focusTrap.deactivate ', () => {
const wrapper = shallow<Dialog>(DialogStub);
const deactivate = td.func();
wrapper.instance().focusTrap = coerceForTesting<FocusTrap>({deactivate});
wrapper.instance().adapter.releaseFocus();
td.verify(deactivate(), {times: 1});
});
test('#adapter.isContentScrollable returns false when there is no content', () => {
const wrapper = mount<Dialog>(<Dialog />);
assert.isFalse(wrapper.instance().adapter.isContentScrollable());
});
test('#adapter.isContentScrollable returns the value of util.isScrollable', () => {
const wrapper = mount<Dialog>(
<Dialog open>
<DialogContent>
<p>meowkay</p>
</DialogContent>
</Dialog>
);
const content = wrapper.instance().content;
assert.strictEqual(
wrapper.instance().adapter.isContentScrollable(),
isScrollable(content)
);
});
test('#adapter.areButtonsStacked returns result of util.areTopsMisaligned', () => {
const wrapper = mount<Dialog>(DialogStub);
const buttons = wrapper.instance().buttons;
assert.strictEqual(
wrapper.instance().adapter.areButtonsStacked(),
areTopsMisaligned(buttons)
);
});
test('#adapter.getActionFromEvent returns attribute value on event target', () => {
const wrapper = mount<Dialog>(DialogStub);
const buttons = wrapper.instance().buttons!;
const action = wrapper
.instance()
.adapter.getActionFromEvent(coerceForTesting<Event>({target: buttons[1]}));
assert.equal(action, 'accept');
});
test('#adapter.getActionFromEvent returns attribute value on parent of event target', () => {
const wrapper = mount<Dialog>(
<Dialog>
<DialogContent>
<ul className='mdc-list mdc-list--avatar-list'>
<li className='mdc-list-item' data-mdc-dialog-action='pet'>
<i className='mdc-list-item__graphic material-icons'>pets</i>
<span>Cat</span>
</li>
</ul>
</DialogContent>
</Dialog>
);
const spanEl = wrapper.instance().content!.getElementsByTagName('span')[0];
const action = wrapper
.instance()
.adapter.getActionFromEvent(coerceForTesting<Event>({target: spanEl}));
assert.equal(action, 'pet');
});
test('#adapter.getActionFromEvent returns null when attribute is not present', () => {
const wrapper = mount<Dialog>(
<Dialog>
<DialogContent>
<ul className='mdc-list mdc-list--avatar-list'>
<li className='mdc-list-item'>
<i className='mdc-list-item__graphic material-icons'>pets</i>
<span>Cat</span>
</li>
</ul>
</DialogContent>
</Dialog>
);
const spanEl = wrapper.instance().content!.getElementsByTagName('span')[0];
const action = wrapper
.instance()
.adapter.getActionFromEvent(coerceForTesting<Event>({target: spanEl}));
assert.isNull(action);
});
test(`#adapter.clickDefaultButton invokes click() on button matching ${cssClasses.DEFAULT_BUTTON}`, () => {
const wrapper = mount<Dialog>(
<Dialog>
<DialogContent>
<p>meowkay</p>
</DialogContent>
<DialogFooter>
<DialogButton action='dismiss'>Dismiss</DialogButton>
<DialogButton action='accept' isDefault>
Accept
</DialogButton>
</DialogFooter>
</Dialog>
);
const defaultButton = wrapper.instance().defaultButton!;
defaultButton.click = coerceForTesting<() => void>(td.func('click'));
wrapper.instance().adapter.clickDefaultButton();
td.verify(defaultButton.click(), {times: 1});
});
test(`#adapter.clickDefaultButton does nothing if no button matches ${cssClasses.DEFAULT_BUTTON}`, () => {
const wrapper = mount<Dialog>(DialogStub);
const buttons = wrapper.instance().buttons!;
buttons.map(
(button) => (button.click = coerceForTesting<() => void>(td.func('click')))
);
wrapper.instance().adapter.clickDefaultButton();
buttons.map((button) => td.verify(button.click(), {times: 0}));
});
test('#adapter.reverseButtons reverses the order of children under the actions element', () => {
const acceptButton = <DialogButton action='accept'>Accept</DialogButton>;
const dismissButton = <DialogButton action='dismiss'>Dismiss</DialogButton>;
const wrapper = mount<Dialog>(
<Dialog>
<DialogContent>
<p>meowkay</p>
</DialogContent>
<DialogFooter>
{acceptButton}
{dismissButton}
</DialogFooter>
</Dialog>
);
const buttons = wrapper.instance().buttons;
wrapper.instance().adapter.reverseButtons();
assert.sameOrderedMembers(buttons.reverse(), wrapper.instance().buttons);
});
test('#adapter.notifyOpening calls props.onOpening', () => {
const onOpening = coerceForTesting<() => void>(td.func());
const wrapper = shallow<Dialog>(<Dialog onOpening={onOpening} />);
wrapper.instance().adapter.notifyOpening();
td.verify(onOpening(), {times: 1});
});
test('#adapter.notifyOpened calls props.onOpen', () => {
const onOpen = coerceForTesting<() => void>(td.func());
const wrapper = shallow<Dialog>(<Dialog onOpen={onOpen} />);
wrapper.instance().adapter.notifyOpened();
td.verify(onOpen(), {times: 1});
});
test('#adapter.notifyClosing calls props.onClosing', () => {
const onClosing = coerceForTesting<(action: string) => void>(td.func());
const wrapper = shallow<Dialog>(<Dialog onClosing={onClosing} />);
wrapper.instance().adapter.notifyClosing('close');
td.verify(onClosing('close'), {times: 1});
});
test('#adapter.notifyClosed calls props.onClose', () => {
const onClose = coerceForTesting<(action: string) => void>(td.func());
const wrapper = shallow<Dialog>(<Dialog onClose={onClose} />);
wrapper.instance().adapter.notifyClosed('close');
td.verify(onClose('close'), {times: 1});
});
test(
'#handleOpening adds keydown handler on document that triggers ' +
'#foundation.handleDocumentKeyDown',
() => {
const wrapper = shallow<Dialog>(<Dialog open={false} />);
wrapper.instance().foundation.handleDocumentKeydown = td.func<
(evt: KeyboardEvent) => null
>();
const e = new KeyboardEvent('keydown');
document.dispatchEvent(e);
td.verify(wrapper.instance().foundation.handleDocumentKeydown(e), {
times: 0,
});
wrapper.instance().handleOpening();
document.dispatchEvent(e);
td.verify(wrapper.instance().foundation.handleDocumentKeydown(e), {
times: 1,
});
}
);
test('#handleOpening adds handler for LAYOUT_EVENTS to window', () => {
const wrapper = shallow<Dialog>(<Dialog open={false} />);
wrapper.instance().handleLayout = coerceForTesting<() => void>(td.func());
wrapper.instance().handleOpening();
LAYOUT_EVENTS.forEach((eventType: string) => {
const evt = new Event(eventType);
window.dispatchEvent(evt);
// @ts-ignore expected 0 arguments but got 1 -- evt will always be passed
td.verify(wrapper.instance().handleLayout(evt), {times: 1});
});
});
test(
'#handleClosing removes keydown handler on document that triggers ' +
'#foundation.handleDocumentKeyDown',
() => {
const wrapper = shallow<Dialog>(<Dialog open={false} />);
wrapper.instance().foundation.handleDocumentKeydown = coerceForTesting<
(evt: KeyboardEvent) => void
>(td.func());
wrapper.instance().handleOpening();
const e = new KeyboardEvent('keydown');
document.dispatchEvent(e);
td.verify(wrapper.instance().foundation.handleDocumentKeydown(e), {
times: 1,
});
wrapper.instance().foundation.handleDocumentKeydown = coerceForTesting<
(evt: KeyboardEvent) => void
>(td.func());
wrapper.instance().handleClosing('close');
document.dispatchEvent(e);
td.verify(wrapper.instance().foundation.handleDocumentKeydown(e), {
times: 0,
});
}
);
test('#handleClosing removes handler for LAYOUT_EVENTS to window', () => {
const wrapper = shallow<Dialog>(<Dialog open={false} />);
wrapper.instance().handleLayout = coerceForTesting<(evt?: Event) => void>(
td.func()
);
wrapper.instance().handleOpening();
LAYOUT_EVENTS.forEach((eventType) => {
const evt = new KeyboardEvent(eventType);
window.dispatchEvent(evt);
// @ts-ignore expected 0 arguments but got 1
td.verify(wrapper.instance().handleLayout(evt), {times: 1});
});
wrapper.instance().handleClosing('close');
LAYOUT_EVENTS.forEach((eventType) => {
const evt = new Event(eventType);
window.dispatchEvent(evt);
// @ts-ignore expected 0 arguments but got 1
td.verify(wrapper.instance().handleLayout(evt), {times: 0});
});
});
test('#renderContainer returns undefined if no children', () => {
const wrapper = shallow<Dialog>(<Dialog />);
const container = wrapper.instance().renderContainer(undefined);
assert.isUndefined(container);
});
test('#renderContainer renders container if children present', () => {
const wrapper = shallow<Dialog>(
<Dialog>
<DialogTitle>Test</DialogTitle>
<DialogContent>
<p>Meowkay</p>
</DialogContent>
</Dialog>
);
wrapper.instance().renderChild = coerceForTesting<
(child: ChildTypes, i?: number) => ChildTypes
>(td.func());
const children: ChildTypes[] = wrapper.instance().props
.children as ChildTypes[];
const container = wrapper.instance().renderContainer(children);
assert.isDefined(container);
assert.equal(container!.props.className, cssClasses.CONTAINER);
children.forEach((child: ChildTypes, i: number) =>
td.verify(wrapper.instance().renderChild(child, i), {times: 1})
);
});
test('#renderChild will call setId if DialogTitle', () => {
const title = <DialogTitle>Test</DialogTitle>;
const wrapper = shallow<Dialog>(<Dialog>{title}</Dialog>);
wrapper.instance().setId = coerceForTesting<
(name: ChildTypes, componentId?: string) => string
>(td.func());
wrapper.instance().renderChild(title, 0);
td.verify(wrapper.instance().setId(title, undefined), {times: 1});
});
test('#renderChild will call setId if DialogContent', () => {
const content = (
<DialogContent id='your-pet-cat'>
<p>Meow</p>
</DialogContent>
);
const wrapper = shallow<Dialog>(<Dialog>{content}</Dialog>);
wrapper.instance().setId = coerceForTesting<
(name: ChildTypes, componentId?: string) => string
>(td.func());
wrapper.instance().renderChild(content, 1);
td.verify(wrapper.instance().setId(content, 'your-pet-cat'), {times: 1});
});
test('#renderChild will not call setId if !DialogTitle || !DialogContent', () => {
const footer = <DialogFooter>Test</DialogFooter>;
const wrapper = shallow<Dialog>(<Dialog>{footer}</Dialog>);
wrapper.instance().setId = coerceForTesting<
(name: ChildTypes, componentId?: string) => string
>(td.func());
wrapper.instance().renderChild(footer, 2);
td.verify(wrapper.instance().setId(footer), {times: 0});
});
test('#setId will set labelledby and a id on DialogTitle if not present', () => {
const wrapper = mount<Dialog>(
<Dialog>
<DialogTitle>Test</DialogTitle>
</Dialog>
);
const dialog = wrapper.instance().dialogElement.current;
const labelledby = dialog!.getAttribute('aria-labelledby');
const title = dialog!.getElementsByClassName(cssClasses.TITLE)[0];
assert.equal(labelledby, title.id);
});
test('#setId will set labelledby and from a custom DialogTitle', () => {
const customId = 'custom-id';
const wrapper = mount<Dialog>(
<Dialog>
<DialogTitle id={customId}>Test</DialogTitle>
</Dialog>
);
const dialog = wrapper.instance().dialogElement.current;
const labelledby = dialog!.getAttribute('aria-labelledby');
const title = dialog!.getElementsByClassName(cssClasses.TITLE)[0];
assert.equal(labelledby, customId);
assert.equal(labelledby, title.id);
});
test('#events.onKeyDown triggers #foundaiton.handleInteraction', () => {
const wrapper = shallow<Dialog>(
<Dialog>
<DialogContent>
<p>meow</p>
</DialogContent>
</Dialog>
);
wrapper.instance().foundation.handleInteraction = td.func<
(e: KeyboardEvent) => null
>();
const e = coerceForTesting<React.KeyboardEvent>({
nativeEvent: {},
});
wrapper.simulate('keydown', e);
td.verify(wrapper.instance().foundation.handleInteraction(e.nativeEvent), {
times: 1,
});
});
test('#events.onClick triggers #foundaiton.handleInteraction', () => {
const wrapper = shallow<Dialog>(
<Dialog>
<DialogContent>
<p>meow</p>
</DialogContent>
</Dialog>
);
wrapper.instance().foundation.handleInteraction = td.func<
(e: KeyboardEvent) => null
>();
const e = coerceForTesting<React.KeyboardEvent>({
nativeEvent: {},
});
wrapper.simulate('click', e);
td.verify(wrapper.instance().foundation.handleInteraction(e.nativeEvent), {
times: 1,
});
});
test('Dialog closes when esc key is pressed', () => {
const wrapper = mount<Dialog>(<Dialog open={true} />);
assert.isTrue(wrapper.instance().foundation.isOpen());
const e = new KeyboardEvent('keydown', {key: 'Escape'});
document.dispatchEvent(e);
assert.isFalse(wrapper.instance().foundation.isOpen());
});
test('Dialog does not close when esc key is pressed if escapeKeyAction set to empty string', () => {
const wrapper = mount<Dialog>(<Dialog open={true} escapeKeyAction='' />);
assert.isTrue(wrapper.instance().foundation.isOpen());
const e = new KeyboardEvent('keydown', {key: 'Escape'});
document.dispatchEvent(e);
assert.isTrue(wrapper.instance().foundation.isOpen());
});
test('Dialog closes when scrim is clicked', () => {
const wrapper = mount<Dialog>(<Dialog open={true} />);
assert.isTrue(wrapper.instance().foundation.isOpen());
wrapper.find(`.${cssClasses.SCRIM}`).simulate('click');
assert.isFalse(wrapper.instance().foundation.isOpen());
});
test('Dialog does not close when scrim is clicked if scrimClickAction set to empty string', () => {
const wrapper = mount<Dialog>(<Dialog open scrimClickAction={''} />);
assert.isTrue(wrapper.instance().foundation.isOpen());
wrapper.find(`.${cssClasses.SCRIM}`).simulate('click');
assert.isTrue(wrapper.instance().foundation.isOpen());
}); | the_stack |
import * as _ from "lodash";
import * as Studio from "../studio";
import { Field, SharedModel, UIModeType } from "../studio";
import { XBridge } from "../studio/bridge/XBridge";
import { FieldValueKind } from "../studio/models/FieldValueKind";
import { Relation } from "../studio/models/Relation";
import { ModelUtils } from "../utils";
import { EnumConfigType } from "./defs";
type FilesType = "file" | "files" | "fileGroup";
type RendererType = "relation" | "number" | "tag" | FilesType | FieldValueKind;
export type ViewItemModel = {
id: string;
title: string;
key: string;
dataIndex?: string;
sorter: string | boolean;
isMany: boolean;
required?: boolean;
order?: number;
description?: string;
/**
* If it's an array or a string it renders it directly
*/
dataIndexStr?: string;
enumValues?: Array<{ label: string; value: string }>;
rendererType: RendererType;
relational?: boolean;
remoteField?: string;
remoteCollectionClass?: string;
routeName?: string;
subfields?: ViewItemModel[];
/**
* The default value the form will initially have in (create) mode only
*/
defaultValue?: any;
form?: {
component: string;
props?: string;
};
};
export type CRUDFeatureType = "create" | "edit" | "view" | "list" | "delete";
export class UICRUDModel {
studioCollection: Studio.Collection;
bundleName: string;
features:
| true
| {
[key in CRUDFeatureType]?: boolean;
} = true;
hasFeature(feature: CRUDFeatureType) {
if (this.features === true) {
return true;
}
return this.features[feature];
}
get sheetName() {
if (this.studioCollection.ui === false) {
throw new Error("Shouldn't generate CRUD for it");
}
return this.studioCollection.ui.label;
}
// The icon from UI Generated.
get icon() {
if (this.studioCollection.ui === false) {
throw new Error("Shouldn't add a menu for it");
}
return this.studioCollection.ui.icon || "SettingFilled";
}
get collectionName() {
return this.studioCollection.id;
}
get collectionClass() {
return this.collectionName + "Collection";
}
get entityName() {
return this.studioCollection.entityName;
}
/**
* Based on the UI mode it generates the GraphQL request body to be sent to server when a specific page is reached
* @param mode
* @returns
*/
generateRequestBodyAsString(mode: UIModeType) {
let body = {
_id: 1,
};
this.recursiveBodyExpand(mode, body, this.studioCollection.fields);
this.studioCollection.getRelationshipsByUIMode(mode).forEach((r) => {
body[r.id] = {
_id: 1,
[r.cleaned.representedBy.id]: 1,
};
if (r.isDirect) {
body[r.cleaned.field.id] = 1;
}
if (r.isFileSimpleRelated()) {
Object.assign(body[r.id], {
downloadUrl: 1,
name: 1,
});
}
if (r.isFileGroupRelated()) {
Object.assign(body[r.id], {
name: 1,
files: {
downloadUrl: 1,
name: 1,
},
});
}
});
return JSON.stringify(body, null, 2);
}
/**
* The idea here is that when we request a body we have to request it's nested fields
* @param mode
* @param body
* @param fields
* TODO: move to own function separated from here
*/
recursiveBodyExpand(mode: UIModeType, body: object, fields: Field[]) {
fields.forEach((field) => {
const model = field.model as SharedModel;
if (model && !model.isEnum()) {
body[field.id] = {};
this.recursiveBodyExpand(
mode,
body[field.id],
field.cleaned.model.fields
);
// If the subfields haven't been found
if (Object.keys(body).length === 0) {
delete body[field.id];
}
} else if (field.subfields.length) {
body[field.id] = {};
this.recursiveBodyExpand(mode, body[field.id], field.subfields);
// If the subfields haven't been found
if (Object.keys(body).length === 0) {
delete body[field.id];
}
} else {
if (field.ui && field.ui[mode]) {
body[field.id] = 1;
}
}
});
}
collectionRoutePath() {
return _.kebabCase(this.collectionName);
}
/**
* Collection is Posts, name is: new => POSTS_NEW
* @param name
*/
generateRouteName(name: string) {
return this.generateRouteNameForCollection(this.studioCollection.id, name);
}
generateRouteNameForCollection(collectionName, name: string) {
return _.toUpper(_.snakeCase(collectionName) + "_" + name);
}
generateI18NName() {
return _.toLower(_.snakeCase(this.collectionName));
}
/**
* The list for i18n fields for forms, lists, and everything
*/
generateI18NFieldsAsJSON(): string {
const i18nSignatures = [
...this.studioCollection
.getFlattenedFields()
.map((field) => field.getI18NSignature()),
...this.studioCollection.relations.map((relation) =>
relation.getI18NSignature()
),
];
const obj = {};
i18nSignatures.forEach((i18nSignature) => {
obj[i18nSignature.key] = i18nSignature.label;
if (i18nSignature.description) {
obj[i18nSignature.key + "_description"] = i18nSignature.description;
}
});
return JSON.stringify(obj);
}
collectionHasMode(mode: UIModeType) {
return this.studioCollection.ui && this.studioCollection.ui[mode];
}
fieldHasMode(field: Studio.Field, mode: UIModeType) {
return field.ui && field.ui[mode];
}
generateComponentName(name: string) {
return this.collectionName + _.upperFirst(name);
}
antColumnsString() {
return JSON.stringify(this.antColumns());
}
/**
* Helper method for handlebars
* @param type
* @param othertype
*/
typeIs(type, othertype) {
return type === othertype;
}
/**
* @param value
* @returns
*/
isUndefined(value) {
return value === undefined;
}
typeIsFormPrimitive(type: RendererType) {
const primitives = [
Studio.Field.Types.STRING,
Studio.Field.Types.BOOLEAN,
Studio.Field.Types.DATE,
Studio.Field.Types.OBJECT_ID,
Studio.Field.Types.INTEGER,
Studio.Field.Types.FLOAT,
"number",
];
return primitives.includes(type);
}
collectionClassNamesOfInterest(): string[] {
const names = this.studioCollection.relations
.filter((r) => !r.isFileRelated())
.map((relation) => {
// AppFiles are handled separately not imported directly.
return relation.cleaned.to.id + "Collection";
});
names.push(this.collectionClass);
function onlyUnique(value, index, self) {
return self.indexOf(value) === index;
}
return names.filter(onlyUnique);
}
cssClass(name: string) {
return `page-${_.kebabCase(this.collectionName)}-${name}`;
}
antColumns(mode: UIModeType = "list"): ViewItemModel[] {
const result = [];
this.studioCollection.fields.map((field) => {
// Here it can be relational and fields should be in.
this.fillFields(result, field, mode);
});
this.studioCollection.relations.forEach((r) => {
if (this.isForm(mode)) {
if (!r.isDirect) {
// cancel execution
return;
}
}
this.fillRelations(result, r, mode);
});
return result;
}
fillRelations(
store: ViewItemModel[],
_relation: Studio.Relation,
mode: UIModeType
) {
const relation = _relation.cleaned;
if (relation.ui === false) {
return;
}
if (!relation.ui[mode]) {
return;
}
// Sanity-check
// You cannot add or modify relationships from an indirect one because the update is done on the other collection
if (this.isForm(mode)) {
if (!relation.isDirect) {
throw new Error(
`(${this.studioCollection.id}:${relation.id}) Relations which are inversed cannot be inside the forms.`
);
}
}
store.push({
id: this.isForm(mode) ? relation.field.id : relation.id,
title: this.getI18NKey(_relation),
description: this.getI18NKey(_relation, true),
required: relation.field && relation.field.isRequired,
order: relation.ui.order,
dataIndexStr: this.isForm(mode)
? `[ "${relation.field.id}" ]`
: `[ "${relation.id}" ]`,
key: relation.id,
isMany: relation.isMany,
sorter: true,
remoteField: relation.representedBy.id,
routeName: this.generateRouteNameForCollection(relation.to.id, "view"),
relational: true,
remoteCollectionClass: relation.to.id + "Collection",
rendererType: this.getRendererTypeForRelation(_relation),
});
}
fillFields(
store: ViewItemModel[],
field: Studio.Field,
mode: UIModeType,
dataIndexParent?: string
): void {
// This refers to how ant prefers rendering items
let dataIndexStr = `[ ${dataIndexParent ? `"${dataIndexParent}", ` : ""} "${
field.id
}" ]`;
if (this.fieldHasMode(field, mode) === false) {
return;
}
if (field.ui === false) {
return;
}
const base: ViewItemModel = {
id: `${field.id}`,
dataIndexStr,
required: field.isRequired,
order: field.ui && field.ui.order,
title: this.getI18NKey(field),
description: this.getI18NKey(field, true),
key: field.id,
isMany: field.isArray,
sorter: true,
rendererType: this.getRendererType(field),
enumValues: this.getEnumValuesLabels(
field.enumValues as EnumConfigType[]
),
defaultValue: ModelUtils.getDefaultValue(
XBridge.fieldToGenericField(field)
),
};
if (field.ui.form) {
base.form = {
component: field.ui.form.component,
props: JSON.stringify(field.ui.form.props),
};
}
let subfields = field.model ? field.cleaned.model.fields : field.subfields;
if (!subfields) {
subfields = [];
}
// TODO: maybe run the check against it being an object
if (subfields.length > 0) {
if (!this.isForm(mode)) {
// LIST/VIEW MODE
if (field.isArray) {
store.push(base);
} else {
// We spread it and add the other fields, since it's an object
subfields.forEach((subfield) => {
if (subfield.ui !== false && subfield.ui[mode]) {
store.push(
Object.assign({}, base, {
id: `${field.id}.${subfield.id}`,
isMany: subfield.isArray,
title: this.getI18NKey(subfield),
description: this.getI18NKey(subfield, true),
required: subfield.isRequired,
order: subfield.ui && subfield.ui.order,
dataIndexStr: `["${field.id}", "${subfield.id}"]`,
rendererType: this.getRendererType(subfield),
enumValues: this.getEnumValuesLabels(
subfield.enumValues as EnumConfigType[]
),
})
);
}
});
}
} else {
// FORM MODE CREATE/EDIT
store.push(
Object.assign({}, base, {
subfields: subfields.map((subfield: Field) => {
return Object.assign({}, base, {
id: subfield.id,
isMany: subfield.isArray,
required: subfield.isRequired,
order: subfield.ui && subfield.ui.order,
title: this.getI18NKey(subfield),
description: this.getI18NKey(subfield, true),
dataIndexStr: `["${field.id}", "${subfield.id}"]`,
rendererType: this.getRendererType(subfield),
enumValues: this.getEnumValuesLabels(
subfield.enumValues as EnumConfigType[]
),
});
}),
})
);
}
} else {
// check sanity
if (this.isForm(mode)) {
if (field.isRelationStorageField || field.isReducer) {
throw new Error(
`(${this.studioCollection.id}:${field.id}) You cannot add a relational storage field or a reducer to the ui components: "edit" nor "create", for relations use the relation's ui options for their presence. Reducers are derived from existing data so they cannot belong in forms.`
);
}
}
store.push(base);
}
}
getI18NKey(element: Field | Relation, isDescription = false): string | null {
let label = `management.${this.generateI18NName()}.fields.`;
label += element.getI18NSignature().key;
if (!element.description && isDescription) {
return null;
}
if (isDescription) {
return (label += "_description");
}
return label;
}
protected getEnumValuesLabels(values: EnumConfigType[]) {
return values.map((enumElement) => {
return {
label: enumElement.label,
value: enumElement.value,
};
});
}
protected isForm(mode: UIModeType) {
return ["edit", "create", "listFilters"].includes(mode);
}
protected getRendererType(field: Field): RendererType {
if (["integer", "float"].includes(field.type)) {
return "number";
}
return field.type;
}
protected getRendererTypeForRelation(_relation: Relation): RendererType {
const relation = _relation.cleaned;
if (relation.to.id === "AppFiles" && !relation.isMany) {
return "file";
}
if (relation.to.id === "AppFiles" && relation.isMany) {
return "files";
}
if (relation.to.id === "AppFileGroups") {
return "fileGroup";
}
return "relation";
}
} | the_stack |
* @module iModels
*/
import { join } from "path";
import { IModelJsNative } from "@bentley/imodeljs-native";
import {
AccessToken, BeEvent, BentleyStatus, ChangeSetStatus, DbResult, Guid, GuidString, Id64, Id64Arg, Id64Array, Id64Set, Id64String, IModelStatus,
JsonUtils, Logger, OpenMode,
} from "@itwin/core-bentley";
import {
AxisAlignedBox3d, BRepGeometryCreate, BriefcaseId, BriefcaseIdValue, CategorySelectorProps, ChangesetIdWithIndex, ChangesetIndexAndId, Code,
CodeSpec, CreateEmptySnapshotIModelProps, CreateEmptyStandaloneIModelProps, CreateSnapshotIModelProps, DbQueryRequest, DisplayStyleProps,
DomainOptions, EcefLocation, ECSqlReader, ElementAspectProps, ElementGeometryRequest, ElementGeometryUpdate, ElementGraphicsRequestProps,
ElementLoadProps, ElementProps, EntityMetaData, EntityProps, EntityQueryParams, FilePropertyProps, FontMap, FontProps, GeoCoordinatesRequestProps,
GeoCoordinatesResponseProps, GeometryContainmentRequestProps, GeometryContainmentResponseProps, IModel, IModelCoordinatesRequestProps,
IModelCoordinatesResponseProps, IModelError, IModelNotFoundResponse, IModelTileTreeProps, LocalFileName, MassPropertiesRequestProps,
MassPropertiesResponseProps, ModelLoadProps, ModelProps, ModelSelectorProps, OpenBriefcaseProps, ProfileOptions, PropertyCallback, QueryBinder,
QueryOptions, QueryOptionsBuilder, QueryRowFormat, RpcActivity, SchemaState, SheetProps, SnapRequestProps, SnapResponseProps, SnapshotOpenOptions,
SpatialViewDefinitionProps, StandaloneOpenOptions, TextureData, TextureLoadProps, ThumbnailProps, UpgradeOptions, ViewDefinitionProps,
ViewQueryParams, ViewStateLoadProps, ViewStateProps,
} from "@itwin/core-common";
import { Range3d } from "@itwin/core-geometry";
import { BackendLoggerCategory } from "./BackendLoggerCategory";
import { BriefcaseManager, PullChangesArgs, PushChangesArgs } from "./BriefcaseManager";
import { CheckpointManager, CheckpointProps, V2CheckpointManager } from "./CheckpointManager";
import { ClassRegistry, MetaDataRegistry } from "./ClassRegistry";
import { CodeSpecs } from "./CodeSpecs";
import { ConcurrentQuery } from "./ConcurrentQuery";
import { ECSqlStatement } from "./ECSqlStatement";
import { Element, SectionDrawing, Subject } from "./Element";
import { ElementAspect, ElementMultiAspect, ElementUniqueAspect } from "./ElementAspect";
import { generateElementGraphics } from "./ElementGraphics";
import { Entity, EntityClassType } from "./Entity";
import { ExportGraphicsOptions, ExportPartGraphicsOptions } from "./ExportGraphics";
import { IModelHost } from "./IModelHost";
import { IModelJsFs } from "./IModelJsFs";
import { IpcHost } from "./IpcHost";
import { Model } from "./Model";
import { Relationships } from "./Relationship";
import { ServerBasedLocks } from "./ServerBasedLocks";
import { SqliteStatement, StatementCache } from "./SqliteStatement";
import { TxnManager } from "./TxnManager";
import { DrawingViewDefinition, SheetViewDefinition, ViewDefinition } from "./ViewDefinition";
const loggerCategory: string = BackendLoggerCategory.IModelDb;
/** Options for [[IModelDb.Models.updateModel]]
* @note To mark *only* the geometry as changed, use [[IModelDb.Models.updateGeometryGuid]] instead.
* @public
*/
export interface UpdateModelOptions extends ModelProps {
/** If defined, update the last modify time of the Model */
updateLastMod?: boolean;
/** If defined, update the GeometryGuid of the Model */
geometryChanged?: boolean;
}
/** Options supplied to [[IModelDb.computeProjectExtents]].
* @public
*/
export interface ComputeProjectExtentsOptions {
/** If true, the result will include `extentsWithOutliers`. */
reportExtentsWithOutliers?: boolean;
/** If true, the result will include `outliers`. */
reportOutliers?: boolean;
}
/** The result of [[IModelDb.computeProjectExtents]].
* @public
*/
export interface ComputedProjectExtents {
/** The computed extents, excluding any outlier elements. */
extents: Range3d;
/** If requested by caller, the computed extents, *including* any outlier elements. */
extentsWithOutliers?: Range3d;
/** If requested by caller, the Ids of outlier elements excluded from the computed extents. */
outliers?: Id64Array;
}
/**
* Interface for acquiring element locks to coordinate simultaneous edits from multiple briefcases.
* @beta
*/
export interface LockControl {
/**
* true if this LockControl uses a server-based concurrency approach.
*/
readonly isServerBased: boolean;
/**
* Close the local lock control database
* @internal
*/
close(): void;
/**
* Notification that a new element was just created. Called by [[Element.onInserted]]
* @internal
*/
elementWasCreated(id: Id64String): void;
/**
* Throw if locks are required and the exclusive lock is not held on the supplied element.
* Note: there is no need to check the shared locks on parents/models since an element cannot hold the exclusive lock without first obtaining them.
* Called by [[Element.onUpdate]], [[Element.onDelete]], etc.
* @internal
*/
checkExclusiveLock(id: Id64String, type: string, operation: string): void;
/**
* Throw if locks are required and a shared lock is not held on the supplied element.
* Called by [[Element.onInsert]] to ensure shared lock is held on model and parent.
* @internal
*/
checkSharedLock(id: Id64String, type: string, operation: string): void;
/**
* Determine whether the supplied element currently holds the exclusive lock
*/
holdsExclusiveLock(id: Id64String): boolean;
/**
* Determine whether the supplied element currently holds a shared lock
*/
holdsSharedLock(id: Id64String): boolean;
/**
* Acquire the exclusive lock on one or more elements from the lock server, if locks are required and not already held.
* If any required lock is not available, this method throws an exception and *none* of the requested locks are acquired.
* > Note: acquiring the exclusive lock on an element requires also obtaining a shared lock on all its owner elements. This method will
* attempt to acquire all necessary locks for the set of input ids.
*/
acquireExclusiveLock(ids: Id64Arg): Promise<void>;
/**
* Acquire a shared lock on one or more elements from the lock server, if locks are required and not already held.
* If any required lock is not available, this method throws an exception and *none* of the requested locks are acquired.
* > Note: acquiring the shared lock on an element requires also obtaining a shared lock on all its owner elements. This method will
* attempt to acquire all necessary locks for the set of input ids.
*/
acquireSharedLock(ids: Id64Arg): Promise<void>;
/**
* Release all locks currently held by this Briefcase from the lock server.
*/
releaseAllLocks(): Promise<void>;
}
/** A null-implementation of LockControl that does not attempt to limit access between briefcases. This relies on change-merging to resolve conflicts. */
class NoLocks implements LockControl {
public get isServerBased() { return false; }
public close(): void { }
public clearAllLocks(): void { }
public holdsExclusiveLock(): boolean { return false; }
public holdsSharedLock(): boolean { return false; }
public checkExclusiveLock(): void { }
public checkSharedLock(): void { }
public elementWasCreated(): void { }
public async acquireExclusiveLock(): Promise<void> { }
public async acquireSharedLock(): Promise<void> { }
public async releaseAllLocks(): Promise<void> { }
}
/** @internal */
export enum BriefcaseLocalValue {
StandaloneEdit = "StandaloneEdit",
NoLocking = "NoLocking"
}
// function to open an briefcaseDb, perform an operation, and then close it.
const withBriefcaseDb = async (briefcase: OpenBriefcaseArgs, fn: (_db: BriefcaseDb) => Promise<any>) => {
const db = await BriefcaseDb.open(briefcase);
try {
return await fn(db);
} finally {
db.close();
}
};
/** An iModel database file. The database file can either be a briefcase or a snapshot.
* @see [Accessing iModels]($docs/learning/backend/AccessingIModels.md)
* @see [About IModelDb]($docs/learning/backend/IModelDb.md)
* @public
*/
export abstract class IModelDb extends IModel {
private _initialized = false;
/** Keep track of open imodels to support `tryFind` for RPC purposes */
private static readonly _openDbs = new Map<string, IModelDb>();
public static readonly defaultLimit = 1000; // default limit for batching queries
public static readonly maxLimit = 10000; // maximum limit for batching queries
public readonly models = new IModelDb.Models(this);
public readonly elements = new IModelDb.Elements(this);
public readonly views = new IModelDb.Views(this);
public readonly tiles = new IModelDb.Tiles(this);
private _relationships?: Relationships;
private readonly _statementCache = new StatementCache<ECSqlStatement>();
private readonly _sqliteStatementCache = new StatementCache<SqliteStatement>();
private _codeSpecs?: CodeSpecs;
private _classMetaDataRegistry?: MetaDataRegistry;
protected _fontMap?: FontMap;
protected _concurrentQueryStats = { resetTimerHandle: (null as any), logTimerHandle: (null as any), lastActivityTime: Date.now(), dispose: () => { } };
private readonly _snaps = new Map<string, IModelJsNative.SnapRequest>();
private static _shutdownListener: VoidFunction | undefined; // so we only register listener once
/** @internal */
protected _locks?: LockControl = new NoLocks();
/**
* Get the lock control for this iModel.
* @beta
*/
public get locks() { return this._locks!; }
/** Acquire the exclusive schema lock on this iModel.
* > Note: To acquire the schema lock, all other briefcases must first release *all* their locks. No other briefcases
* will be able to acquire *any* locks while the schema lock is held.
*/
public async acquireSchemaLock(): Promise<void> {
return this.locks.acquireExclusiveLock(IModel.repositoryModelId);
}
/** determine whether the schema lock is currently held for this iModel. */
public get holdsSchemaLock() {
return this.locks.holdsExclusiveLock(IModel.repositoryModelId);
}
/** Event called after a changeset is applied to this IModelDb. */
public readonly onChangesetApplied = new BeEvent<() => void>();
/** @internal */
public notifyChangesetApplied() {
this.changeset = this.nativeDb.getCurrentChangeset();
this.onChangesetApplied.raiseEvent();
}
public get fontMap(): FontMap { return this._fontMap ?? (this._fontMap = new FontMap(this.nativeDb.readFontMap())); }
public embedFont(prop: FontProps): FontProps { this._fontMap = undefined; return this.nativeDb.embedFont(prop); }
/** Check if this iModel has been opened read-only or not. */
public get isReadonly(): boolean { return this.openMode === OpenMode.Readonly; }
/** The Guid that identifies this iModel. */
public override get iModelId(): GuidString { return super.iModelId!; } // GuidString | undefined for the IModel superclass, but required for all IModelDb subclasses
private _nativeDb?: IModelJsNative.DgnDb;
/** @internal*/
public get nativeDb(): IModelJsNative.DgnDb { return this._nativeDb!; }
/** Get the full path fileName of this iModelDb
* @note this member is only valid while the iModel is opened.
*/
public get pathName(): LocalFileName { return this.nativeDb.getFilePath(); }
/** @internal */
protected constructor(args: { nativeDb: IModelJsNative.DgnDb, key: string, changeset?: ChangesetIdWithIndex }) {
super({ ...args, iTwinId: args.nativeDb.getITwinId(), iModelId: args.nativeDb.getIModelId() });
this._nativeDb = args.nativeDb;
this.nativeDb.setIModelDb(this);
this.initializeIModelDb();
IModelDb._openDbs.set(this._fileKey, this);
if (undefined === IModelDb._shutdownListener) { // the first time we create an IModelDb, add a listener to close any orphan files at shutdown.
IModelDb._shutdownListener = IModelHost.onBeforeShutdown.addListener(() => {
IModelDb._openDbs.forEach((db) => { // N.B.: db.close() removes from _openedDbs
try {
db.abandonChanges();
db.close();
} catch { }
});
});
}
}
/** Close this IModel, if it is currently open. */
public close(): void {
if (!this.isOpen)
return; // don't continue if already closed
this.beforeClose();
IModelDb._openDbs.delete(this._fileKey);
this.locks.close();
this._locks = undefined;
this.nativeDb.closeIModel();
this._nativeDb = undefined; // the underlying nativeDb has been freed by closeIModel
}
/** @internal */
public async reattachDaemon(_accessToken: AccessToken): Promise<void> { }
/** Event called when the iModel is about to be closed. */
public readonly onBeforeClose = new BeEvent<() => void>();
/**
* Called by derived classes before closing the connection
* @internal
*/
protected beforeClose() {
this.onBeforeClose.raiseEvent();
this.clearCaches();
this._concurrentQueryStats.dispose();
}
/** @internal */
protected initializeIModelDb() {
const props = this.nativeDb.getIModelProps();
super.initialize(props.rootSubject.name, props);
if (this._initialized)
return;
this._initialized = true;
const db = this.isBriefcaseDb() ? this : undefined;
if (!db || !IpcHost.isValid)
return;
db.onNameChanged.addListener(() => IpcHost.notifyTxns(db, "notifyIModelNameChanged", db.name));
db.onRootSubjectChanged.addListener(() => IpcHost.notifyTxns(db, "notifyRootSubjectChanged", db.rootSubject));
db.onProjectExtentsChanged.addListener(() => IpcHost.notifyTxns(db, "notifyProjectExtentsChanged", db.projectExtents.toJSON()));
db.onGlobalOriginChanged.addListener(() => IpcHost.notifyTxns(db, "notifyGlobalOriginChanged", db.globalOrigin.toJSON()));
db.onEcefLocationChanged.addListener(() => IpcHost.notifyTxns(db, "notifyEcefLocationChanged", db.ecefLocation?.toJSON()));
db.onGeographicCoordinateSystemChanged.addListener(() => IpcHost.notifyTxns(db, "notifyGeographicCoordinateSystemChanged", db.geographicCoordinateSystem?.toJSON()));
}
/** Returns true if this is a BriefcaseDb
* @see [[BriefcaseDb.open]]
*/
public get isBriefcase(): boolean { return false; }
/** Type guard for instanceof [[BriefcaseDb]] */
public isBriefcaseDb(): this is BriefcaseDb { return this.isBriefcase; }
/** Returns true if this is a SnapshotDb
* @see [[SnapshotDb.open]]
*/
public get isSnapshot(): boolean { return false; }
/** Type guard for instanceof [[SnapshotDb]] */
public isSnapshotDb(): this is SnapshotDb { return this.isSnapshot; }
/** Returns true if this is a *standalone* iModel
* @see [[StandaloneDb.open]]
* @internal
*/
public get isStandalone(): boolean { return false; }
/** Type guard for instanceof [[StandaloneDb]]
* @internal
*/
public isStandaloneDb(): this is StandaloneDb { return this.isStandalone; }
/** Return `true` if the underlying nativeDb is open and valid.
* @internal
*/
public get isOpen(): boolean { return undefined !== this.nativeDb; }
/** Get the briefcase Id of this iModel */
public getBriefcaseId(): BriefcaseId { return this.isOpen ? this.nativeDb.getBriefcaseId() : BriefcaseIdValue.Illegal; }
/**
* Use a prepared ECSQL statement, potentially from the statement cache. If the requested statement doesn't exist
* in the statement cache, a new statement is prepared. After the callback completes, the statement is reset and saved
* in the statement cache so it can be reused in the future. Use this method for ECSQL statements that will be
* reused often and are expensive to prepare. The statement cache holds the most recently used statements, discarding
* the oldest statements as it fills. For statements you don't intend to reuse, instead use [[withStatement]].
* @param sql The SQLite SQL statement to execute
* @param callback the callback to invoke on the prepared statement
* @param logErrors Determines if error will be logged if statement fail to prepare
* @returns the value returned by `callback`.
* @see [[withStatement]]
* @public
*/
public withPreparedStatement<T>(ecsql: string, callback: (stmt: ECSqlStatement) => T, logErrors = true): T {
const stmt = this._statementCache.findAndRemove(ecsql) ?? this.prepareStatement(ecsql, logErrors);
const release = () => this._statementCache.addOrDispose(stmt);
try {
const val = callback(stmt);
if (val instanceof Promise) {
val.then(release, release);
} else {
release();
}
return val;
} catch (err: any) {
release();
throw err;
}
}
/**
* Prepared and execute a callback on an ECSQL statement. After the callback completes the statement is disposed.
* Use this method for ECSQL statements are either not expected to be reused, or are not expensive to prepare.
* For statements that will be reused often, instead use [[withPreparedStatement]].
* @param sql The SQLite SQL statement to execute
* @param callback the callback to invoke on the prepared statement
* @param logErrors Determines if error will be logged if statement fail to prepare
* @returns the value returned by `callback`.
* @see [[withPreparedStatement]]
* @public
*/
public withStatement<T>(ecsql: string, callback: (stmt: ECSqlStatement) => T, logErrors = true): T {
const stmt = this.prepareStatement(ecsql, logErrors);
const release = () => stmt.dispose();
try {
const val = callback(stmt);
if (val instanceof Promise) {
val.then(release, release);
} else {
release();
}
return val;
} catch (err: any) {
release();
throw err;
}
}
/** Allow to execute query and read results along with meta data. The result are streamed.
* @param params The values to bind to the parameters (if the ECSQL has any).
* @param config Allow to specify certain flags which control how query is executed.
* @returns Returns *ECSqlQueryReader* which help iterate over result set and also give access to meta data.
* @beta
* */
public createQueryReader(ecsql: string, params?: QueryBinder, config?: QueryOptions): ECSqlReader {
if (!this._nativeDb || !this._nativeDb.isOpen()) {
throw new IModelError(DbResult.BE_SQLITE_ERROR, "db not open");
}
const executor = {
execute: async (request: DbQueryRequest) => {
return ConcurrentQuery.executeQueryRequest(this._nativeDb!, request);
},
};
return new ECSqlReader(executor, ecsql, params, config);
}
/** Execute a query and stream its results
* The result of the query is async iterator over the rows. The iterator will get next page automatically once rows in current page has been read.
* [ECSQL row]($docs/learning/ECSQLRowFormat).
*
* See also:
* - [ECSQL Overview]($docs/learning/backend/ExecutingECSQL)
* - [Code Examples]($docs/learning/backend/ECSQLCodeExamples)
*
* @param ecsql The ECSQL statement to execute
* @param params The values to bind to the parameters (if the ECSQL has any).
* @param rowFormat Specify what format the row will be returned. It default to Array format though to make it compilable with previous version use *QueryRowFormat.UseJsPropertyNames*
* @param options Allow to specify certain flags which control how query is executed.
* @returns Returns the query result as an *AsyncIterableIterator<any>* which lazy load result as needed. The row format is determined by *rowFormat* parameter.
* See [ECSQL row format]($docs/learning/ECSQLRowFormat) for details about the format of the returned rows.
* @throws [IModelError]($common) If there was any error while submitting, preparing or stepping into query
*/
public async * query(ecsql: string, params?: QueryBinder, rowFormat = QueryRowFormat.UseArrayIndexes, options?: QueryOptions): AsyncIterableIterator<any> {
const builder = new QueryOptionsBuilder(options);
if (rowFormat === QueryRowFormat.UseJsPropertyNames) {
builder.setConvertClassIdsToNames(true);
}
const reader = this.createQueryReader(ecsql, params, builder.getOptions());
while (await reader.step())
yield reader.formatCurrentRow(rowFormat);
}
/** Compute number of rows that would be returned by the ECSQL.
*
* See also:
* - [ECSQL Overview]($docs/learning/backend/ExecutingECSQL)
* - [Code Examples]($docs/learning/backend/ECSQLCodeExamples)
*
* @param ecsql The ECSQL statement to execute
* @param params The values to bind to the parameters (if the ECSQL has any).
* See "[iModel.js Types used in ECSQL Parameter Bindings]($docs/learning/ECSQLParameterTypes)" for details.
* @returns Return row count.
* @throws [IModelError]($common) If the statement is invalid
*/
public async queryRowCount(ecsql: string, params?: QueryBinder): Promise<number> {
for await (const row of this.query(`select count(*) from (${ecsql})`, params)) {
return row[0] as number;
}
throw new IModelError(DbResult.BE_SQLITE_ERROR, "Failed to get row count");
}
/** Cancel any previous query with same token and run execute the current specified query.
* The result of the query is async iterator over the rows. The iterator will get next page automatically once rows in current page has been read.
* [ECSQL row]($docs/learning/ECSQLRowFormat).
*
* See also:
* - [ECSQL Overview]($docs/learning/backend/ExecutingECSQL)
* - [Code Examples]($docs/learning/backend/ECSQLCodeExamples)
*
* @param ecsql The ECSQL statement to execute
* @param token None empty restart token. The previous query with same token would be cancelled. This would cause
* exception which user code must handle.
* @param params The values to bind to the parameters (if the ECSQL has any).
* @param rowFormat Specify what format the row will be returned. It default to Array format though to make it compilable with previous version use *QueryRowFormat.UseJsPropertyNames*
* @param options Allow to specify certain flags which control how query is executed.
* @returns Returns the query result as an *AsyncIterableIterator<any>* which lazy load result as needed. The row format is determined by *rowFormat* parameter.
* See [ECSQL row format]($docs/learning/ECSQLRowFormat) for details about the format of the returned rows.
* @throws [IModelError]($common) If there was any error while submitting, preparing or stepping into query
*/
public async * restartQuery(token: string, ecsql: string, params?: QueryBinder, rowFormat = QueryRowFormat.UseArrayIndexes, options?: QueryOptions): AsyncIterableIterator<any> {
for await (const row of this.query(ecsql, params, rowFormat, new QueryOptionsBuilder(options).setRestartToken(token).getOptions())) {
yield row;
}
}
/**
* Use a prepared SQL statement, potentially from the statement cache. If the requested statement doesn't exist
* in the statement cache, a new statement is prepared. After the callback completes, the statement is reset and saved
* in the statement cache so it can be reused in the future. Use this method for SQL statements that will be
* reused often and are expensive to prepare. The statement cache holds the most recently used statements, discarding
* the oldest statements as it fills. For statements you don't intend to reuse, instead use [[withSqliteStatement]].
* @param sql The SQLite SQL statement to execute
* @param callback the callback to invoke on the prepared statement
* @param logErrors Determine if errors are logged or not
* @returns the value returned by `callback`.
* @see [[withPreparedStatement]]
* @public
*/
public withPreparedSqliteStatement<T>(sql: string, callback: (stmt: SqliteStatement) => T, logErrors = true): T {
const stmt = this._sqliteStatementCache.findAndRemove(sql) ?? this.prepareSqliteStatement(sql, logErrors);
const release = () => this._sqliteStatementCache.addOrDispose(stmt);
try {
const val: T = callback(stmt);
if (val instanceof Promise) {
val.then(release, release);
} else {
release();
}
return val;
} catch (err: any) {
release();
throw err;
}
}
/**
* Prepared and execute a callback on a SQL statement. After the callback completes the statement is disposed.
* Use this method for SQL statements are either not expected to be reused, or are not expensive to prepare.
* For statements that will be reused often, instead use [[withPreparedSqliteStatement]].
* @param sql The SQLite SQL statement to execute
* @param callback the callback to invoke on the prepared statement
* @param logErrors Determine if errors are logged or not
* @returns the value returned by `callback`.
* @public
*/
public withSqliteStatement<T>(sql: string, callback: (stmt: SqliteStatement) => T, logErrors = true): T {
const stmt = this.prepareSqliteStatement(sql, logErrors);
const release = () => stmt.dispose();
try {
const val: T = callback(stmt);
if (val instanceof Promise) {
val.then(release, release);
} else {
release();
}
return val;
} catch (err: any) {
release();
throw err;
}
}
/** Prepare an SQL statement.
* @param sql The SQL statement to prepare
* @throws [[IModelError]] if there is a problem preparing the statement.
* @internal
*/
public prepareSqliteStatement(sql: string, logErrors = true): SqliteStatement {
const stmt = new SqliteStatement(sql);
stmt.prepare(this.nativeDb, logErrors);
return stmt;
}
/** Query for a set of entity ids, given an EntityQueryParams
* @param params The query parameters. The `limit` and `offset` members should be used to page results.
* @returns an Id64Set with results of query
* @throws [[IModelError]] if the generated statement is invalid or [IModelDb.maxLimit]($backend) exceeded when collecting ids.
*
* *Example:*
* ``` ts
* [[include:ECSQL-backend-queries.select-element-by-code-value-using-queryEntityIds]]
* ```
*/
public queryEntityIds(params: EntityQueryParams): Id64Set {
let sql = "SELECT ECInstanceId FROM ";
if (params.only)
sql += "ONLY ";
sql += params.from;
if (params.where) sql += ` WHERE ${params.where}`;
if (params.orderBy) sql += ` ORDER BY ${params.orderBy}`;
if (typeof params.limit === "number" && params.limit > 0) sql += ` LIMIT ${params.limit}`;
if (typeof params.offset === "number" && params.offset > 0) sql += ` OFFSET ${params.offset}`;
const ids = new Set<string>();
this.withPreparedStatement(sql, (stmt) => {
if (params.bindings)
stmt.bindValues(params.bindings);
for (const row of stmt) {
if (row.id !== undefined) {
ids.add(row.id);
if (ids.size > IModelDb.maxLimit) {
throw new IModelError(IModelStatus.BadRequest, "Max LIMIT exceeded in SELECT statement");
}
}
}
});
return ids;
}
/** Clear all in-memory caches held in this IModelDb. */
public clearCaches() {
this._statementCache.clear();
this._sqliteStatementCache.clear();
}
/** Update the project extents for this iModel.
* <p><em>Example:</em>
* ``` ts
* [[include:IModelDb.updateProjectExtents]]
* ```
*/
public updateProjectExtents(newExtents: AxisAlignedBox3d) {
this.projectExtents = newExtents;
this.updateIModelProps();
}
/** Compute an appropriate project extents for this iModel based on the ranges of all spatial elements.
* Typically, the result is simply the union of the ranges of all spatial elements. However, the algorithm also detects "outlier elements",
* whose placements locate them so far from the rest of the spatial geometry that they are considered statistically insignificant. The
* range of an outlier element does not contribute to the computed extents.
* @param options Specifies the level of detail desired in the return value.
* @returns the computed extents.
* @note This method does not modify the IModel's stored project extents. @see [[updateProjectExtents]].
*/
public computeProjectExtents(options?: ComputeProjectExtentsOptions): ComputedProjectExtents {
const wantFullExtents = true === options?.reportExtentsWithOutliers;
const wantOutliers = true === options?.reportOutliers;
const result = this.nativeDb.computeProjectExtents(wantFullExtents, wantOutliers);
return {
extents: Range3d.fromJSON(result.extents),
extentsWithOutliers: result.fullExtents ? Range3d.fromJSON(result.fullExtents) : undefined,
outliers: result.outliers,
};
}
/** Update the [EcefLocation]($docs/learning/glossary#eceflocation) of this iModel. */
public updateEcefLocation(ecef: EcefLocation) {
this.setEcefLocation(ecef);
this.updateIModelProps();
}
/** Update the IModelProps of this iModel in the database. */
public updateIModelProps(): void {
this.nativeDb.updateIModelProps(this.toJSON());
}
/** Commit pending changes to this iModel.
* @param description Optional description of the changes
* @throws [[IModelError]] if there is a problem saving changes or if there are pending, un-processed lock or code requests.
*/
public saveChanges(description?: string): void {
if (this.openMode === OpenMode.Readonly)
throw new IModelError(IModelStatus.ReadOnly, "IModelDb was opened read-only");
const stat = this.nativeDb.saveChanges(description);
if (DbResult.BE_SQLITE_OK !== stat)
throw new IModelError(stat, `Could not save changes (${description})`);
}
/** Abandon pending changes in this iModel. */
public abandonChanges(): void {
this.nativeDb.abandonChanges();
}
/** @internal */
public reverseTxns(numOperations: number): IModelStatus {
return this.nativeDb.reverseTxns(numOperations);
}
/** @internal */
public reinstateTxn(): IModelStatus {
return this.nativeDb.reinstateTxn();
}
/** @internal */
public restartTxnSession(): void {
return this.nativeDb.restartTxnSession();
}
/** Import an ECSchema. On success, the schema definition is stored in the iModel.
* This method is asynchronous (must be awaited) because, in the case where this IModelDb is a briefcase, this method first obtains the schema lock from the iModel server.
* You must import a schema into an iModel before you can insert instances of the classes in that schema. See [[Element]]
* @param schemaFileName array of Full paths to ECSchema.xml files to be imported.
* @throws [[IModelError]] if the schema lock cannot be obtained or there is a problem importing the schema.
* @note Changes are saved if importSchemas is successful and abandoned if not successful.
* @see querySchemaVersion
*/
public async importSchemas(schemaFileNames: LocalFileName[]): Promise<void> {
if (this.isSnapshot || this.isStandalone) {
const status = this.nativeDb.importSchemas(schemaFileNames);
if (DbResult.BE_SQLITE_OK !== status)
throw new IModelError(status, "Error importing schema");
this.clearCaches();
return;
}
await this.acquireSchemaLock();
const stat = this.nativeDb.importSchemas(schemaFileNames);
if (DbResult.BE_SQLITE_OK !== stat) {
throw new IModelError(stat, "Error importing schema");
}
this.clearCaches();
}
/** Import ECSchema(s) serialized to XML. On success, the schema definition is stored in the iModel.
* This method is asynchronous (must be awaited) because, in the case where this IModelDb is a briefcase, this method first obtains the schema lock from the iModel server.
* You must import a schema into an iModel before you can insert instances of the classes in that schema. See [[Element]]
* @param serializedXmlSchemas The xml string(s) created from a serialized ECSchema.
* @throws [[IModelError]] if the schema lock cannot be obtained or there is a problem importing the schema.
* @note Changes are saved if importSchemaStrings is successful and abandoned if not successful.
* @see querySchemaVersion
* @alpha
*/
public async importSchemaStrings(serializedXmlSchemas: string[]): Promise<void> {
if (this.isSnapshot || this.isStandalone) {
const status = this.nativeDb.importXmlSchemas(serializedXmlSchemas);
if (DbResult.BE_SQLITE_OK !== status) {
throw new IModelError(status, "Error importing schema");
}
this.clearCaches();
return;
}
await this.acquireSchemaLock();
const stat = this.nativeDb.importXmlSchemas(serializedXmlSchemas);
if (DbResult.BE_SQLITE_OK !== stat)
throw new IModelError(stat, "Error importing schema");
this.clearCaches();
}
/** Find an opened instance of any subclass of IModelDb, by filename
* @note this method returns an IModelDb if the filename is open for *any* subclass of IModelDb
*/
public static findByFilename(fileName: LocalFileName): IModelDb | undefined {
for (const entry of this._openDbs) {
if (entry[1].pathName === fileName)
return entry[1];
}
return undefined;
}
/** Find an open IModelDb by its key.
* @note This method is mainly for use by RPC implementations.
* @throws [[IModelNotFoundResponse]] if an open IModelDb matching the key is not found.
* @see [IModel.key]($common)
*/
public static findByKey(key: string): IModelDb {
const iModelDb = this.tryFindByKey(key);
if (undefined === iModelDb)
throw new IModelNotFoundResponse(); // a very specific status for the RpcManager
return iModelDb;
}
/** Attempt to find an open IModelDb by key.
* @returns The matching IModelDb or `undefined`.
*/
public static tryFindByKey(key: string): IModelDb | undefined {
return this._openDbs.get(key);
}
/** @internal */
public static openDgnDb(file: { path: LocalFileName, key?: string }, openMode: OpenMode, upgradeOptions?: UpgradeOptions, props?: SnapshotOpenOptions): IModelJsNative.DgnDb {
file.key = file.key ?? Guid.createValue();
if (this.tryFindByKey(file.key))
throw new IModelError(IModelStatus.AlreadyOpen, `key [${file.key}] for file [${file.path}] is already in use`);
const isUpgradeRequested = upgradeOptions?.domain === DomainOptions.Upgrade || upgradeOptions?.profile === ProfileOptions.Upgrade;
if (isUpgradeRequested && openMode !== OpenMode.ReadWrite)
throw new IModelError(IModelStatus.UpgradeFailed, "Cannot upgrade a Readonly Db");
try {
const nativeDb = new IModelHost.platform.DgnDb();
nativeDb.openIModel(file.path, openMode, upgradeOptions, props);
return nativeDb;
} catch (err: any) {
throw new IModelError(err.errorNumber, `Could not open iModel [${err.message}], ${file.path}`);
}
}
/**
* Determines if the schemas in the Db must or can be upgraded by comparing them with those included in the
* current version of the software.
* @param filePath Full name of the briefcase including path
* @param forReadWrite Pass true if validating for read-write scenarios - note that the schema version requirements
* for opening the DgnDb read-write is more stringent than when opening the database read-only
* @throws [[IModelError]] If the Db was in an invalid state and that causes a problem with validating schemas
* @see [[BriefcaseDb.upgradeSchemas]] or [[StandaloneDb.upgradeSchemas]]
* @see ($docs/learning/backend/IModelDb.md#upgrading-schemas-in-an-imodel)
*/
public static validateSchemas(filePath: LocalFileName, forReadWrite: boolean): SchemaState {
const openMode = forReadWrite ? OpenMode.ReadWrite : OpenMode.Readonly;
const file = { path: filePath };
let result = DbResult.BE_SQLITE_OK;
try {
const upgradeOptions: UpgradeOptions = {
domain: DomainOptions.CheckRecommendedUpgrades,
};
const nativeDb = this.openDgnDb(file, openMode, upgradeOptions);
nativeDb.closeIModel();
} catch (err: any) {
result = err.errorNumber;
}
let schemaState: SchemaState = SchemaState.UpToDate;
switch (result) {
case DbResult.BE_SQLITE_OK:
schemaState = SchemaState.UpToDate;
break;
case DbResult.BE_SQLITE_ERROR_ProfileTooOld:
case DbResult.BE_SQLITE_ERROR_ProfileTooOldForReadWrite:
case DbResult.BE_SQLITE_ERROR_SchemaTooOld:
schemaState = SchemaState.TooOld;
break;
case DbResult.BE_SQLITE_ERROR_ProfileTooNew:
case DbResult.BE_SQLITE_ERROR_ProfileTooNewForReadWrite:
case DbResult.BE_SQLITE_ERROR_SchemaTooNew:
schemaState = SchemaState.TooNew;
break;
case DbResult.BE_SQLITE_ERROR_SchemaUpgradeRecommended:
schemaState = SchemaState.UpgradeRecommended;
break;
case DbResult.BE_SQLITE_ERROR_SchemaUpgradeRequired:
schemaState = SchemaState.UpgradeRequired;
break;
case DbResult.BE_SQLITE_ERROR_InvalidProfileVersion:
throw new IModelError(DbResult.BE_SQLITE_ERROR_InvalidProfileVersion, "The profile of the Db is invalid. Cannot upgrade or open the Db.");
default:
throw new IModelError(DbResult.BE_SQLITE_ERROR, "Error validating schemas. Cannot upgrade or open the Db.");
}
return schemaState;
}
/** Get the ClassMetaDataRegistry for this iModel.
* @internal
*/
public get classMetaDataRegistry(): MetaDataRegistry {
if (this._classMetaDataRegistry === undefined) this._classMetaDataRegistry = new MetaDataRegistry();
return this._classMetaDataRegistry;
}
/** Get the linkTableRelationships for this IModel */
public get relationships(): Relationships { return this._relationships || (this._relationships = new Relationships(this)); }
/** Get the CodeSpecs in this IModel. */
public get codeSpecs(): CodeSpecs { return (this._codeSpecs !== undefined) ? this._codeSpecs : (this._codeSpecs = new CodeSpecs(this)); }
/** @internal */
public insertCodeSpec(codeSpec: CodeSpec): Id64String {
return this.nativeDb.insertCodeSpec(codeSpec.name, codeSpec.properties);
}
/** Prepare an ECSQL statement.
* @param sql The ECSQL statement to prepare
* @param logErrors Determines if error will be logged if statement fail to prepare
* @throws [[IModelError]] if there is a problem preparing the statement.
*/
public prepareStatement(sql: string, logErrors = true): ECSqlStatement {
const stmt = new ECSqlStatement();
stmt.prepare(this.nativeDb, sql, logErrors);
return stmt;
}
/** Prepare an ECSQL statement.
* @param sql The ECSQL statement to prepare
* @returns `undefined` if there is a problem preparing the statement.
*/
public tryPrepareStatement(sql: string): ECSqlStatement | undefined {
const statement = new ECSqlStatement();
const result = statement.tryPrepare(this.nativeDb, sql);
return DbResult.BE_SQLITE_OK === result.status ? statement : undefined;
}
/** Construct an entity (Element or Model) from an iModel.
* @throws [[IModelError]] if the entity cannot be constructed.
*/
public constructEntity<T extends Entity>(props: EntityProps): T {
const jsClass = this.getJsClass(props.classFullName);
return new jsClass(props, this) as T;
}
/** Get the JavaScript class that handles a given entity class. */
public getJsClass<T extends typeof Entity>(classFullName: string): T {
try {
return ClassRegistry.getClass(classFullName, this) as T;
} catch (err) {
if (!ClassRegistry.isNotFoundError(err)) {
throw err;
}
this.loadMetaData(classFullName);
return ClassRegistry.getClass(classFullName, this) as T;
}
}
/** Get metadata for a class. This method will load the metadata from the iModel into the cache as a side-effect, if necessary.
* @throws [[IModelError]] if the metadata cannot be found nor loaded.
*/
public getMetaData(classFullName: string): EntityMetaData {
let metadata = this.classMetaDataRegistry.find(classFullName);
if (metadata === undefined) {
this.loadMetaData(classFullName);
metadata = this.classMetaDataRegistry.find(classFullName);
if (metadata === undefined)
throw ClassRegistry.makeMetaDataNotFoundError(classFullName); // do not log
}
return metadata;
}
/** Invoke a callback on each property of the specified class, optionally including superclass properties.
* @param iModel The IModel that contains the schema
* @param classFullName The full class name to load the metadata, if necessary
* @param wantSuper If true, superclass properties will also be processed
* @param func The callback to be invoked on each property
* @param includeCustom If true (default), include custom-handled properties in the iteration. Otherwise, skip custom-handled properties.
* @note Custom-handled properties are core properties that have behavior enforced by C++ handlers.
*/
public static forEachMetaData(iModel: IModelDb, classFullName: string, wantSuper: boolean, func: PropertyCallback, includeCustom: boolean = true) {
const meta = iModel.getMetaData(classFullName); // will load if necessary
for (const propName in meta.properties) { // eslint-disable-line guard-for-in
const propMeta = meta.properties[propName];
if (includeCustom || !propMeta.isCustomHandled || propMeta.isCustomHandledOrphan)
func(propName, propMeta);
}
if (wantSuper && meta.baseClasses && meta.baseClasses.length > 0)
meta.baseClasses.forEach((baseClass) => this.forEachMetaData(iModel, baseClass, true, func, includeCustom));
}
/** @internal */
private loadMetaData(classFullName: string) {
if (this.classMetaDataRegistry.find(classFullName))
return;
const className = classFullName.split(":");
if (className.length !== 2)
throw new IModelError(IModelStatus.BadArg, `Invalid classFullName: ${classFullName}`);
const val = this.nativeDb.getECClassMetaData(className[0], className[1]);
if (val.error)
throw new IModelError(val.error.status, `Error getting class meta data for: ${classFullName}`);
const metaData = new EntityMetaData(JSON.parse(val.result!));
this.classMetaDataRegistry.add(classFullName, metaData);
// Recursive, to make sure that base classes are cached.
if (metaData.baseClasses !== undefined && metaData.baseClasses.length > 0)
metaData.baseClasses.forEach((baseClassName: string) => this.loadMetaData(baseClassName));
}
/** Query if this iModel contains the definition of the specified class.
* @param classFullName The full name of the class, for example, SomeSchema:SomeClass
* @returns true if the iModel contains the class definition or false if not.
* @see querySchemaVersion
* @see importSchema
*/
public containsClass(classFullName: string): boolean {
const classNameParts = classFullName.replace(".", ":").split(":");
return classNameParts.length === 2 && this.nativeDb.getECClassMetaData(classNameParts[0], classNameParts[1]).error === undefined;
}
/** Query for a schema of the specified name in this iModel.
* @returns The schema version as a semver-compatible string or `undefined` if the schema has not been imported.
*/
public querySchemaVersion(schemaName: string): string | undefined {
const sql = `SELECT VersionMajor,VersionWrite,VersionMinor FROM ECDbMeta.ECSchemaDef WHERE Name=:schemaName LIMIT 1`;
return this.withPreparedStatement(sql, (statement: ECSqlStatement): string | undefined => {
statement.bindString("schemaName", schemaName);
if (DbResult.BE_SQLITE_ROW === statement.step()) {
const versionMajor: number = statement.getValue(0).getInteger(); // ECSchemaDef.VersionMajor --> semver.major
const versionWrite: number = statement.getValue(1).getInteger(); // ECSchemaDef.VersionWrite --> semver.minor
const versionMinor: number = statement.getValue(2).getInteger(); // ECSchemaDef.VersionMinor --> semver.patch
return `${versionMajor}.${versionWrite}.${versionMinor}`;
}
return undefined;
});
}
/** Retrieve a named texture image from this iModel, as a TextureData.
* @param props the texture load properties which must include the name of the texture to load
* @returns the TextureData or undefined if the texture image is not present.
* @alpha
*/
public async queryTextureData(props: TextureLoadProps): Promise<TextureData | undefined> {
return this.nativeDb.queryTextureData(props);
}
/** Query a "file property" from this iModel, as a string.
* @returns the property string or undefined if the property is not present.
*/
public queryFilePropertyString(prop: FilePropertyProps): string | undefined {
return this.nativeDb.queryFileProperty(prop, true) as string | undefined;
}
/** Query a "file property" from this iModel, as a blob.
* @returns the property blob or undefined if the property is not present.
*/
public queryFilePropertyBlob(prop: FilePropertyProps): Uint8Array | undefined {
return this.nativeDb.queryFileProperty(prop, false) as Uint8Array | undefined;
}
/** Save a "file property" to this iModel
* @param prop the FilePropertyProps that describes the new property
* @param value either a string or a blob to save as the file property
*/
public saveFileProperty(prop: FilePropertyProps, strValue: string | undefined, blobVal?: Uint8Array): void {
this.nativeDb.saveFileProperty(prop, strValue, blobVal);
}
/** delete a "file property" from this iModel
* @param prop the FilePropertyProps that describes the property
*/
public deleteFileProperty(prop: FilePropertyProps): void {
this.nativeDb.saveFileProperty(prop, undefined, undefined);
}
/** Query for the next available major id for a "file property" from this iModel.
* @param prop the FilePropertyProps that describes the property
* @returns the next available (that is, an unused) id for prop. If none are present, will return 0.
*/
public queryNextAvailableFileProperty(prop: FilePropertyProps) { return this.nativeDb.queryNextAvailableFileProperty(prop); }
/** @internal */
public async requestSnap(sessionId: string, props: SnapRequestProps): Promise<SnapResponseProps> {
let request = this._snaps.get(sessionId);
if (undefined === request) {
request = new IModelHost.platform.SnapRequest();
this._snaps.set(sessionId, request);
} else
request.cancelSnap();
try {
return await request.doSnap(this.nativeDb, JsonUtils.toObject(props));
} finally {
this._snaps.delete(sessionId);
}
}
/** Cancel a previously requested snap.
* @internal
*/
public cancelSnap(sessionId: string): void {
const request = this._snaps.get(sessionId);
if (undefined !== request) {
request.cancelSnap();
this._snaps.delete(sessionId);
}
}
/** Get the clip containment status for the supplied elements. */
public async getGeometryContainment(props: GeometryContainmentRequestProps): Promise<GeometryContainmentResponseProps> {
return this.nativeDb.getGeometryContainment(JsonUtils.toObject(props));
}
/** Get the mass properties for the supplied elements. */
public async getMassProperties(props: MassPropertiesRequestProps): Promise<MassPropertiesResponseProps> {
return this.nativeDb.getMassProperties(JsonUtils.toObject(props));
}
/** Get the IModel coordinate corresponding to each GeoCoordinate point in the input */
public async getIModelCoordinatesFromGeoCoordinates(props: IModelCoordinatesRequestProps): Promise<IModelCoordinatesResponseProps> {
return this.nativeDb.getIModelCoordinatesFromGeoCoordinates(props);
}
/** Get the GeoCoordinate (longitude, latitude, elevation) corresponding to each IModel Coordinate point in the input */
public async getGeoCoordinatesFromIModelCoordinates(props: GeoCoordinatesRequestProps): Promise<GeoCoordinatesResponseProps> {
return this.nativeDb.getGeoCoordinatesFromIModelCoordinates(props);
}
/** Export meshes suitable for graphics APIs from arbitrary geometry in elements in this IModelDb.
* * Requests can be slow when processing many elements so it is expected that this function be used on a dedicated backend,
* or that shared backends export a limited number of elements at a time.
* * Vertices are exported in the IModelDb's world coordinate system, which is right-handed with Z pointing up.
* * The results of changing [ExportGraphicsOptions]($core-backend) during the [ExportGraphicsOptions.onGraphics]($core-backend) callback are not defined.
*
* Example that prints the mesh for element 1 to stdout in [OBJ format](https://en.wikipedia.org/wiki/Wavefront_.obj_file)
* ```ts
* const onGraphics: ExportGraphicsFunction = (info: ExportGraphicsInfo) => {
* const mesh: ExportGraphicsMesh = info.mesh;
* for (let i = 0; i < mesh.points.length; i += 3) {
* process.stdout.write(`v ${mesh.points[i]} ${mesh.points[i + 1]} ${mesh.points[i + 2]}\n`);
* process.stdout.write(`vn ${mesh.normals[i]} ${mesh.normals[i + 1]} ${mesh.normals[i + 2]}\n`);
* }
*
* for (let i = 0; i < mesh.params.length; i += 2) {
* process.stdout.write(`vt ${mesh.params[i]} ${mesh.params[i + 1]}\n`);
* }
*
* for (let i = 0; i < mesh.indices.length; i += 3) {
* const p1 = mesh.indices[i];
* const p2 = mesh.indices[i + 1];
* const p3 = mesh.indices[i + 2];
* process.stdout.write(`f ${p1}/${p1}/${p1} ${p2}/${p2}/${p2} ${p3}/${p3}/${p3}\n`);
* }
* };
*
* iModel.exportGraphics(({ onGraphics, elementIdArray: ["0x1"] }));
* ```
* @returns 0 if successful, status otherwise
* @public
*/
public exportGraphics(exportProps: ExportGraphicsOptions): DbResult {
return this.nativeDb.exportGraphics(exportProps);
}
/**
* Exports meshes suitable for graphics APIs from a specified [GeometryPart]($core-backend)
* in this IModelDb.
* The expected use case is to call [IModelDb.exportGraphics]($core-backend) and supply the
* optional partInstanceArray argument, then call this function for each unique GeometryPart from
* that list.
* * The results of changing [ExportPartGraphicsOptions]($core-backend) during the
* [ExportPartGraphicsOptions.onPartGraphics]($core-backend) callback are not defined.
* * See export-gltf under test-apps in the iModel.js monorepo for a working reference.
* @returns 0 is successful, status otherwise
* @public
*/
public exportPartGraphics(exportProps: ExportPartGraphicsOptions): DbResult {
return this.nativeDb.exportPartGraphics(exportProps);
}
/** Request geometry stream information from an element in binary format instead of json.
* @returns DbResult.BE_SQLITE_OK if successful
* @alpha
*/
public elementGeometryRequest(requestProps: ElementGeometryRequest): DbResult {
return this.nativeDb.processGeometryStream(requestProps);
}
/** Update the geometry stream for the supplied element from binary format data instead of json.
* @returns DbResult.BE_SQLITE_OK if successful
* @alpha
*/
public elementGeometryUpdate(updateProps: ElementGeometryUpdate): DbResult {
return this.nativeDb.updateGeometryStream(updateProps);
}
/** Create brep geometry for inclusion in an element's geometry stream.
* @returns DbResult.BE_SQLITE_OK if successful
* @throws [[IModelError]] to report issues with input geometry or parameters
* @see [IModelDb.elementGeometryUpdate]($core-backend)
* @alpha
*/
public createBRepGeometry(createProps: BRepGeometryCreate): DbResult {
return this.nativeDb.createBRepGeometry(createProps);
}
/** Generate graphics for an element or geometry stream.
* @see [readElementGraphics]($frontend) to convert the result to a [RenderGraphic]($frontend) for display.
*/
public async generateElementGraphics(request: ElementGraphicsRequestProps): Promise<Uint8Array | undefined> {
return generateElementGraphics(request, this);
}
}
/** @public */
export namespace IModelDb { // eslint-disable-line no-redeclare
/** The collection of models in an [[IModelDb]].
* @public
*/
export class Models {
/** @internal */
public constructor(private _iModel: IModelDb) { }
/** Get the ModelProps with the specified identifier.
* @param modelId The Model identifier.
* @throws [[IModelError]] if the model is not found or cannot be loaded.
* @see tryGetModelProps
*/
public getModelProps<T extends ModelProps>(id: Id64String): T {
return this.getModelJson<T>({ id });
}
/** Get the ModelProps with the specified identifier.
* @param modelId The Model identifier.
* @returns The ModelProps or `undefined` if the model is not found.
* @throws [[IModelError]] if the model cannot be loaded.
* @note Useful for cases when a model may or may not exist and throwing an `Error` would be overkill.
* @see getModelProps
*/
public tryGetModelProps<T extends ModelProps>(id: Id64String): T | undefined {
return this.tryGetModelJson({ id });
}
/** Query for the last modified time of the specified Model.
* @internal
*/
public queryLastModifiedTime(modelId: Id64String): string {
const sql = `SELECT LastMod FROM ${Model.classFullName} WHERE ECInstanceId=:modelId`;
return this._iModel.withPreparedStatement(sql, (statement) => {
statement.bindId("modelId", modelId);
if (DbResult.BE_SQLITE_ROW === statement.step()) {
return statement.getValue(0).getDateTime();
}
throw new IModelError(IModelStatus.InvalidId, `Can't get lastMod time for Model ${modelId}`);
});
}
/** Get the Model with the specified identifier.
* @param modelId The Model identifier.
* @param modelClass Optional class to validate instance against. This parameter can accept abstract or concrete classes, but should be the same as the template (`T`) parameter.
* @throws [[IModelError]] if the model is not found, cannot be loaded, or fails validation when `modelClass` is specified.
* @see tryGetModel
*/
public getModel<T extends Model>(modelId: Id64String, modelClass?: EntityClassType<Model>): T {
const model: T | undefined = this.tryGetModel(modelId, modelClass);
if (undefined === model) {
throw new IModelError(IModelStatus.NotFound, `Model=${modelId}`);
}
return model;
}
/** Get the Model with the specified identifier.
* @param modelId The Model identifier.
* @param modelClass Optional class to validate instance against. This parameter can accept abstract or concrete classes, but should be the same as the template (`T`) parameter.
* @returns The Model or `undefined` if the model is not found or fails validation when `modelClass` is specified.
* @throws [[IModelError]] if the model cannot be loaded.
* @note Useful for cases when a model may or may not exist and throwing an `Error` would be overkill.
* @see getModel
*/
public tryGetModel<T extends Model>(modelId: Id64String, modelClass?: EntityClassType<Model>): T | undefined {
const modelProps = this.tryGetModelProps<T>(modelId);
if (undefined === modelProps) {
return undefined; // no Model with that modelId found
}
const model = this._iModel.constructEntity<T>(modelProps);
if (undefined === modelClass) {
return model; // modelClass was not specified, cannot call instanceof to validate
}
return model instanceof modelClass ? model : undefined;
}
/** Read the properties for a Model as a json string.
* @param modelIdArg a json string with the identity of the model to load. Must have either "id" or "code".
* @returns a json string with the properties of the model.
* @throws [[IModelError]] if the model is not found or cannot be loaded.
* @see tryGetModelJson
* @internal
*/
public getModelJson<T extends ModelProps>(modelIdArg: ModelLoadProps): T {
const modelJson = this.tryGetModelJson<T>(modelIdArg);
if (undefined === modelJson) {
throw new IModelError(IModelStatus.NotFound, `Model=${modelIdArg}`);
}
return modelJson;
}
/** Read the properties for a Model as a json string.
* @param modelIdArg a json string with the identity of the model to load. Must have either "id" or "code".
* @returns a json string with the properties of the model or `undefined` if the model is not found.
* @see getModelJson
*/
private tryGetModelJson<T extends ModelProps>(modelIdArg: ModelLoadProps): T | undefined {
try {
return this._iModel.nativeDb.getModel(modelIdArg) as T;
} catch (err: any) {
return undefined;
}
}
/** Get the sub-model of the specified Element.
* See [[IModelDb.Elements.queryElementIdByCode]] for more on how to find an element by Code.
* @param modeledElementId Identifies the modeled element.
* @param modelClass Optional class to validate instance against. This parameter can accept abstract or concrete classes, but should be the same as the template (`T`) parameter.
* @throws [[IModelError]] if the sub-model is not found, cannot be loaded, or fails validation when `modelClass` is specified.
* @see tryGetSubModel
*/
public getSubModel<T extends Model>(modeledElementId: Id64String | GuidString | Code, modelClass?: EntityClassType<Model>): T {
const modeledElementProps = this._iModel.elements.getElementProps<ElementProps>(modeledElementId);
if (modeledElementProps.id === IModel.rootSubjectId)
throw new IModelError(IModelStatus.NotFound, "Root subject does not have a sub-model");
return this.getModel<T>(modeledElementProps.id!, modelClass);
}
/** Get the sub-model of the specified Element.
* See [[IModelDb.Elements.queryElementIdByCode]] for more on how to find an element by Code.
* @param modeledElementId Identifies the modeled element.
* @param modelClass Optional class to validate instance against. This parameter can accept abstract or concrete classes, but should be the same as the template (`T`) parameter.
* @returns The sub-model or `undefined` if the specified element does not have a sub-model or fails validation when `modelClass` is specified.
* @see getSubModel
*/
public tryGetSubModel<T extends Model>(modeledElementId: Id64String | GuidString | Code, modelClass?: EntityClassType<Model>): T | undefined {
const modeledElementProps = this._iModel.elements.tryGetElementProps(modeledElementId);
if ((undefined === modeledElementProps) || (IModel.rootSubjectId === modeledElementProps.id))
return undefined;
return this.tryGetModel<T>(modeledElementProps.id!, modelClass);
}
/** Create a new model in memory.
* See the example in [[InformationPartitionElement]].
* @param modelProps The properties to use when creating the model.
* @throws [[IModelError]] if there is a problem creating the model.
*/
public createModel<T extends Model>(modelProps: ModelProps): T { return this._iModel.constructEntity<T>(modelProps); }
/** Insert a new model.
* @param props The data for the new model.
* @returns The newly inserted model's Id.
* @throws [[IModelError]] if unable to insert the model.
*/
public insertModel(props: ModelProps): Id64String {
try {
return props.id = this._iModel.nativeDb.insertModel(props instanceof Model ? props.toJSON() : props);
} catch (err: any) {
throw new IModelError(err.errorNumber, `Error inserting model [${err.message}], class=${props.classFullName}`);
}
}
/** Update an existing model.
* @param props the properties of the model to change
* @throws [[IModelError]] if unable to update the model.
*/
public updateModel(props: UpdateModelOptions): void {
try {
this._iModel.nativeDb.updateModel(props instanceof Model ? props.toJSON() : props);
} catch (err: any) {
throw new IModelError(err.errorNumber, `error updating model [${err.message}] id=${props.id}`);
}
}
/** Mark the geometry of [[GeometricModel]] as having changed, by recording an indirect change to its GeometryGuid property.
* Typically the GeometryGuid changes automatically when [[GeometricElement]]s within the model are modified, but
* explicitly updating it is occasionally useful after modifying definition elements like line styles or materials that indirectly affect the appearance of
* [[GeometricElement]]s that reference those definition elements in their geometry streams.
* Cached [Tile]($frontend)s are only invalidated after the geometry guid of the model changes.
* @note This will throw IModelError with [IModelStatus.VersionTooOld]($core-bentley) if a version of the BisCore schema older than 1.0.11 is present in the iModel.
* @throws IModelError if unable to update the geometry guid.
* @see [[TxnManager.onModelGeometryChanged]] for the event emitted in response to such a change.
*/
public updateGeometryGuid(modelId: Id64String): void {
const error = this._iModel.nativeDb.updateModelGeometryGuid(modelId);
if (error !== IModelStatus.Success)
throw new IModelError(error, `updating geometry guid for model ${modelId}`);
}
/** Delete one or more existing models.
* @param ids The Ids of the models to be deleted
* @throws [[IModelError]]
*/
public deleteModel(ids: Id64Arg): void {
Id64.toIdSet(ids).forEach((id) => {
try {
this._iModel.nativeDb.deleteModel(id);
} catch (err: any) {
throw new IModelError(err.errorNumber, `error deleting model [${err.message}] id ${id}`);
}
});
}
}
/** The collection of elements in an [[IModelDb]].
* @public
*/
export class Elements {
/** @internal */
public constructor(private _iModel: IModelDb) { }
/** Read element data from the iModel as JSON
* @param elementIdArg a json string with the identity of the element to load. Must have one of "id", "federationGuid", or "code".
* @returns The JSON properties of the element.
* @throws [[IModelError]] if the element is not found or cannot be loaded.
* @see tryGetElementJson
* @internal
*/
public getElementJson<T extends ElementProps>(elementId: ElementLoadProps): T {
const elementProps = this.tryGetElementJson<T>(elementId);
if (undefined === elementProps)
throw new IModelError(IModelStatus.NotFound, `reading element=${elementId}`);
return elementProps;
}
/** Read element data from the iModel as JSON
* @param elementIdArg a json string with the identity of the element to load. Must have one of "id", "federationGuid", or "code".
* @returns The JSON properties of the element or `undefined` if the element is not found.
* @throws [[IModelError]] if the element exists, but cannot be loaded.
* @see getElementJson
*/
private tryGetElementJson<T extends ElementProps>(loadProps: ElementLoadProps): T | undefined {
try {
return this._iModel.nativeDb.getElement(loadProps) as T;
} catch (err: any) {
return undefined;
}
}
/** Get properties of an Element by Id, FederationGuid, or Code
* @throws [[IModelError]] if the element is not found or cannot be loaded.
* @see tryGetElementProps
*/
public getElementProps<T extends ElementProps>(elementId: Id64String | GuidString | Code | ElementLoadProps): T {
const elementProps = this.tryGetElementProps<T>(elementId);
if (undefined === elementProps)
throw new IModelError(IModelStatus.NotFound, `reading element=${elementId}`);
return elementProps;
}
/** Get properties of an Element by Id, FederationGuid, or Code
* @returns The properties of the element or `undefined` if the element is not found.
* @throws [[IModelError]] if the element exists, but cannot be loaded.
* @note Useful for cases when an element may or may not exist and throwing an `Error` would be overkill.
* @see getElementProps
*/
public tryGetElementProps<T extends ElementProps>(elementId: Id64String | GuidString | Code | ElementLoadProps): T | undefined {
if (typeof elementId === "string") {
elementId = Id64.isId64(elementId) ? { id: elementId } : { federationGuid: elementId };
} else if (elementId instanceof Code) {
elementId = { code: elementId };
}
return this.tryGetElementJson<T>(elementId);
}
/** Get an element by Id, FederationGuid, or Code
* @param elementId either the element's Id, Code, or FederationGuid, or an ElementLoadProps
* @param elementClass Optional class to validate instance against. This parameter can accept abstract or concrete classes, but should be the same as the template (`T`) parameter.
* @throws [[IModelError]] if the element is not found, cannot be loaded, or fails validation when `elementClass` is specified.
* @see tryGetElement
*/
public getElement<T extends Element>(elementId: Id64String | GuidString | Code | ElementLoadProps, elementClass?: EntityClassType<Element>): T {
const element = this.tryGetElement<T>(elementId, elementClass);
if (undefined === element)
throw new IModelError(IModelStatus.NotFound, `Element=${elementId}`);
return element;
}
/** Get an element by Id, FederationGuid, or Code
* @param elementId either the element's Id, Code, or FederationGuid, or an ElementLoadProps
* @param elementClass Optional class to validate instance against. This parameter can accept abstract or concrete classes, but should be the same as the template (`T`) parameter.
* @returns The element or `undefined` if the element is not found or fails validation when `elementClass` is specified.
* @throws [[IModelError]] if the element exists, but cannot be loaded.
* @note Useful for cases when an element may or may not exist and throwing an `Error` would be overkill.
* @see getElement
*/
public tryGetElement<T extends Element>(elementId: Id64String | GuidString | Code | ElementLoadProps, elementClass?: EntityClassType<Element>): T | undefined {
if (typeof elementId === "string") {
elementId = Id64.isId64(elementId) ? { id: elementId } : { federationGuid: elementId };
} else if (elementId instanceof Code) {
elementId = { code: elementId };
}
const elementProps = this.tryGetElementJson<T>(elementId);
if (undefined === elementProps) {
return undefined; // no Element with that elementId found
}
const element = this._iModel.constructEntity<T>(elementProps);
if (undefined === elementClass) {
return element; // elementClass was not specified, cannot call instanceof to validate
}
return element instanceof elementClass ? element : undefined;
}
/** Query for the Id of the element that has a specified code.
* This method is for the case where you know the element's Code.
* If you only know the code *value*, then in the simplest case, you can query on that
* and filter the results.
* In the simple case, call [[IModelDb.queryEntityIds]], specifying the code value in the where clause of the query params.
* Or, you can execute an ECSQL select statement. See
* [frequently used ECSQL queries]($docs/learning/backend/ECSQL-queries.md) for an example.
* @param code The code to look for
* @returns The element that uses the code or undefined if the code is not used.
* @throws IModelError if the code is invalid
*/
public queryElementIdByCode(code: Code): Id64String | undefined {
if (Id64.isInvalid(code.spec))
throw new IModelError(IModelStatus.InvalidCodeSpec, "Invalid CodeSpec");
if (code.value === undefined)
throw new IModelError(IModelStatus.InvalidCode, "Invalid Code");
return this._iModel.withPreparedStatement("SELECT ECInstanceId FROM BisCore:Element WHERE CodeSpec.Id=? AND CodeScope.Id=? AND CodeValue=?", (stmt: ECSqlStatement) => {
stmt.bindId(1, code.spec);
stmt.bindId(2, Id64.fromString(code.scope));
stmt.bindString(3, code.value);
if (DbResult.BE_SQLITE_ROW !== stmt.step())
return undefined;
return stmt.getValue(0).getId();
});
}
/** Query for the last modified time of the specified element.
* @internal
*/
public queryLastModifiedTime(elementId: Id64String): string {
const sql = "SELECT LastMod FROM BisCore:Element WHERE ECInstanceId=:elementId";
return this._iModel.withPreparedStatement<string>(sql, (statement: ECSqlStatement): string => {
statement.bindId("elementId", elementId);
if (DbResult.BE_SQLITE_ROW === statement.step())
return statement.getValue(0).getDateTime();
throw new IModelError(IModelStatus.InvalidId, `Can't get lastMod time for Element ${elementId}`);
});
}
/** Create a new instance of an element.
* @param elProps The properties of the new element.
* @throws [[IModelError]] if there is a problem creating the element.
*/
public createElement<T extends Element>(elProps: ElementProps): T { return this._iModel.constructEntity<T>(elProps); }
/** Insert a new element into the iModel.
* @param elProps The properties of the new element.
* @returns The newly inserted element's Id.
* @throws [[IModelError]] if unable to insert the element.
*/
public insertElement(elProps: ElementProps): Id64String {
try {
return elProps.id = this._iModel.nativeDb.insertElement(elProps instanceof Element ? elProps.toJSON() : elProps);
} catch (err: any) {
throw new IModelError(err.errorNumber, `insertElement with class=${elProps.classFullName}: ${err.message}`,);
}
}
/** Update some properties of an existing element.
* To support clearing a property value, every property name that is present in the `elProps` object will be updated even if the value is `undefined`.
* To keep an individual element property unchanged, it should either be excluded from the `elProps` parameter or set to its current value.
* @param elProps the properties of the element to update.
* @note As described above, this is a special case where there is a difference between a property being excluded and a property being present in `elProps` but set to `undefined`.
* @throws [[IModelError]] if unable to update the element.
*/
public updateElement(elProps: ElementProps): void {
try {
this._iModel.nativeDb.updateElement(elProps instanceof Element ? elProps.toJSON() : elProps);
} catch (err: any) {
throw new IModelError(err.errorNumber, `Error updating element [${err.message}], id:${elProps.id}`);
}
}
/** Delete one or more elements from this iModel.
* @param ids The set of Ids of the element(s) to be deleted
* @throws [[IModelError]]
* @see deleteDefinitionElements
*/
public deleteElement(ids: Id64Arg): void {
const iModel = this._iModel;
Id64.toIdSet(ids).forEach((id) => {
try {
iModel.nativeDb.deleteElement(id);
} catch (err: any) {
throw new IModelError(err.errorNumber, `Error deleting element [${err.message}], id:${id}`);
}
});
}
/** DefinitionElements can only be deleted if it can be determined that they are not referenced by other Elements.
* This *usage query* can be expensive since it may involve scanning the GeometryStreams of all GeometricElements.
* Since [[deleteElement]] does not perform these additional checks, it fails in order to prevent potentially referenced DefinitionElements from being deleted.
* This method performs those expensive checks and then calls *delete* if not referenced.
* @param ids The Ids of the DefinitionElements to attempt to delete. To prevent multiple passes over the same GeometricElements, it is best to pass in the entire array of
* DefinitionElements rather than calling this method separately for each one. Ids that are not valid DefinitionElements will be ignored.
* @returns An IdSet of the DefinitionElements that are used and were therefore not deleted.
* @see deleteElement
* @beta
*/
public deleteDefinitionElements(definitionElementIds: Id64Array): Id64Set {
const usageInfo = this._iModel.nativeDb.queryDefinitionElementUsage(definitionElementIds);
if (!usageInfo) {
throw new IModelError(IModelStatus.BadRequest, "Error querying for DefinitionElement usage");
}
const usedIdSet = usageInfo.usedIds ? Id64.toIdSet(usageInfo.usedIds) : new Set<Id64String>();
const deleteIfUnused = (ids: Id64Array | undefined, used: Id64Set): void => {
if (ids) { ids.forEach((id) => { if (!used.has(id)) { this._iModel.elements.deleteElement(id); } }); }
};
try {
this._iModel.nativeDb.beginPurgeOperation();
deleteIfUnused(usageInfo.spatialCategoryIds, usedIdSet);
deleteIfUnused(usageInfo.drawingCategoryIds, usedIdSet);
deleteIfUnused(usageInfo.viewDefinitionIds, usedIdSet);
deleteIfUnused(usageInfo.geometryPartIds, usedIdSet);
deleteIfUnused(usageInfo.lineStyleIds, usedIdSet);
deleteIfUnused(usageInfo.renderMaterialIds, usedIdSet);
deleteIfUnused(usageInfo.subCategoryIds, usedIdSet);
deleteIfUnused(usageInfo.textureIds, usedIdSet);
deleteIfUnused(usageInfo.displayStyleIds, usedIdSet);
deleteIfUnused(usageInfo.categorySelectorIds, usedIdSet);
deleteIfUnused(usageInfo.modelSelectorIds, usedIdSet);
if (usageInfo.otherDefinitionElementIds) {
this._iModel.elements.deleteElement(usageInfo.otherDefinitionElementIds);
}
} finally {
this._iModel.nativeDb.endPurgeOperation();
}
if (usageInfo.viewDefinitionIds) {
// take another pass in case a deleted ViewDefinition was the only usage of these view-related DefinitionElements
let viewRelatedIds: Id64Array = [];
if (usageInfo.displayStyleIds) { viewRelatedIds = viewRelatedIds.concat(usageInfo.displayStyleIds.filter((id) => usedIdSet.has(id))); }
if (usageInfo.categorySelectorIds) { viewRelatedIds = viewRelatedIds.concat(usageInfo.categorySelectorIds.filter((id) => usedIdSet.has(id))); }
if (usageInfo.modelSelectorIds) { viewRelatedIds = viewRelatedIds.concat(usageInfo.modelSelectorIds.filter((id) => usedIdSet.has(id))); }
if (viewRelatedIds.length > 0) {
const viewRelatedUsageInfo = this._iModel.nativeDb.queryDefinitionElementUsage(viewRelatedIds);
if (viewRelatedUsageInfo) {
const usedViewRelatedIdSet: Id64Set = viewRelatedUsageInfo.usedIds ? Id64.toIdSet(viewRelatedUsageInfo.usedIds) : new Set<Id64String>();
try {
this._iModel.nativeDb.beginPurgeOperation();
deleteIfUnused(viewRelatedUsageInfo.displayStyleIds, usedViewRelatedIdSet);
deleteIfUnused(viewRelatedUsageInfo.categorySelectorIds, usedViewRelatedIdSet);
deleteIfUnused(viewRelatedUsageInfo.modelSelectorIds, usedViewRelatedIdSet);
} finally {
this._iModel.nativeDb.endPurgeOperation();
}
viewRelatedIds.forEach((id) => { if (!usedViewRelatedIdSet.has(id)) { usedIdSet.delete(id); } });
}
}
}
return usedIdSet;
}
/** Query for the child elements of the specified element.
* @returns Returns an array of child element identifiers.
* @throws [[IModelError]]
*/
public queryChildren(elementId: Id64String): Id64String[] {
const sql = "SELECT ECInstanceId FROM BisCore:Element WHERE Parent.Id=:elementId";
return this._iModel.withPreparedStatement(sql, (statement: ECSqlStatement): Id64String[] => {
statement.bindId("elementId", elementId);
const childIds: Id64String[] = [];
while (DbResult.BE_SQLITE_ROW === statement.step()) {
childIds.push(statement.getValue(0).getId());
}
return childIds;
});
}
/** Returns true if the specified Element has a sub-model.
* @see [[IModelDb.Models.getSubModel]]
*/
public hasSubModel(elementId: Id64String): boolean {
if (IModel.rootSubjectId === elementId)
return false; // Special case since the RepositoryModel does not sub-model the root Subject
// A sub-model will have the same Id value as the element it is describing
const sql = "SELECT ECInstanceId FROM BisCore:Model WHERE ECInstanceId=:elementId";
return this._iModel.withPreparedStatement(sql, (statement: ECSqlStatement): boolean => {
statement.bindId("elementId", elementId);
return DbResult.BE_SQLITE_ROW === statement.step();
});
}
/** Get the root subject element. */
public getRootSubject(): Subject { return this.getElement(IModel.rootSubjectId); }
/** Query for aspects of a particular class (polymorphically) associated with this element.
* @throws [[IModelError]]
* @note Most cases should use the [[getAspects]] wrapper rather than calling this method directly.
* @internal
*/
public _queryAspects(elementId: Id64String, fromClassFullName: string, excludedClassFullNames?: Set<string>): ElementAspect[] { // eslint-disable-line @typescript-eslint/naming-convention
const sql = `SELECT ECInstanceId,ECClassId FROM ${fromClassFullName} WHERE Element.Id=:elementId ORDER BY ECClassId,ECInstanceId`; // ORDER BY to maximize statement reuse
return this._iModel.withPreparedStatement(sql, (statement: ECSqlStatement): ElementAspect[] => {
statement.bindId("elementId", elementId);
const aspects: ElementAspect[] = [];
while (DbResult.BE_SQLITE_ROW === statement.step()) {
const aspectInstanceId: Id64String = statement.getValue(0).getId();
const aspectClassFullName: string = statement.getValue(1).getClassNameForClassId().replace(".", ":");
if ((undefined === excludedClassFullNames) || (!excludedClassFullNames.has(aspectClassFullName))) {
aspects.push(this._queryAspect(aspectInstanceId, aspectClassFullName));
}
}
return aspects;
});
}
/** Query for aspect by ECInstanceId
* @throws [[IModelError]]
*/
private _queryAspect(aspectInstanceId: Id64String, aspectClassName: string): ElementAspect {
const sql = `SELECT * FROM ${aspectClassName} WHERE ECInstanceId=:aspectInstanceId`;
const aspect: ElementAspectProps | undefined = this._iModel.withPreparedStatement(sql, (statement: ECSqlStatement): ElementAspectProps | undefined => {
statement.bindId("aspectInstanceId", aspectInstanceId);
if (DbResult.BE_SQLITE_ROW === statement.step()) {
const aspectProps: ElementAspectProps = statement.getRow(); // start with everything that SELECT * returned
aspectProps.classFullName = (aspectProps as any).className.replace(".", ":"); // add in property required by EntityProps
(aspectProps as any).className = undefined; // clear property from SELECT * that we don't want in the final instance
return aspectProps;
}
return undefined;
});
if (undefined === aspect) {
throw new IModelError(IModelStatus.NotFound, `ElementAspect not found ${aspectInstanceId}, ${aspectClassName}`);
}
return this._iModel.constructEntity<ElementAspect>(aspect);
}
/** Get a single ElementAspect by its instance Id.
* @throws [[IModelError]]
*/
public getAspect(aspectInstanceId: Id64String): ElementAspect {
const sql = "SELECT ECClassId FROM BisCore:ElementAspect WHERE ECInstanceId=:aspectInstanceId";
const aspectClassFullName = this._iModel.withPreparedStatement(sql, (statement: ECSqlStatement): string | undefined => {
statement.bindId("aspectInstanceId", aspectInstanceId);
return (DbResult.BE_SQLITE_ROW === statement.step()) ? statement.getValue(0).getClassNameForClassId().replace(".", ":") : undefined;
});
if (undefined === aspectClassFullName) {
throw new IModelError(IModelStatus.NotFound, `ElementAspect not found ${aspectInstanceId}`);
}
return this._queryAspect(aspectInstanceId, aspectClassFullName);
}
/** Get the ElementAspect instances that are owned by the specified element.
* @param elementId Get ElementAspects associated with this Element
* @param aspectClassFullName Optionally filter ElementAspects polymorphically by this class name
* @throws [[IModelError]]
*/
public getAspects(elementId: Id64String, aspectClassFullName?: string): ElementAspect[] {
if (undefined === aspectClassFullName) {
const uniqueAspects: ElementAspect[] = this._queryAspects(elementId, ElementUniqueAspect.classFullName);
const multiAspects: ElementAspect[] = this._queryAspects(elementId, ElementMultiAspect.classFullName);
return uniqueAspects.concat(multiAspects);
}
const aspects: ElementAspect[] = this._queryAspects(elementId, aspectClassFullName);
return aspects;
}
/** Insert a new ElementAspect into the iModel.
* @param aspectProps The properties of the new ElementAspect.
* @throws [[IModelError]] if unable to insert the ElementAspect.
*/
public insertAspect(aspectProps: ElementAspectProps): void {
try {
this._iModel.nativeDb.insertElementAspect(aspectProps);
} catch (err: any) {
throw new IModelError(err.errorNumber, `Error inserting ElementAspect [${err.message}], class: ${aspectProps.classFullName}`);
}
}
/** Update an exist ElementAspect within the iModel.
* @param aspectProps The properties to use to update the ElementAspect.
* @throws [[IModelError]] if unable to update the ElementAspect.
*/
public updateAspect(aspectProps: ElementAspectProps): void {
try {
this._iModel.nativeDb.updateElementAspect(aspectProps);
} catch (err: any) {
throw new IModelError(err.errorNumber, `Error updating ElementAspect [${err.message}], id: ${aspectProps.id}`);
}
}
/** Delete one or more ElementAspects from this iModel.
* @param aspectInstanceIds The set of instance Ids of the ElementAspect(s) to be deleted
* @throws [[IModelError]] if unable to delete the ElementAspect.
*/
public deleteAspect(aspectInstanceIds: Id64Arg): void {
const iModel = this._iModel;
Id64.toIdSet(aspectInstanceIds).forEach((aspectInstanceId) => {
try {
iModel.nativeDb.deleteElementAspect(aspectInstanceId);
} catch (err: any) {
throw new IModelError(err.errorNumber, `Error deleting ElementAspect [${err.message}], id: ${aspectInstanceId}`);
}
});
}
}
/** The collection of views in an [[IModelDb]].
* @public
*/
export class Views {
/** @internal */
public constructor(private _iModel: IModelDb) { }
/** Query for the array of ViewDefinitionProps of the specified class and matching the specified IsPrivate setting.
* @param className Query for view definitions of this class.
* @param wantPrivate If true, include private view definitions.
*/
public queryViewDefinitionProps(className: string = "BisCore.ViewDefinition", limit = IModelDb.defaultLimit, offset = 0, wantPrivate: boolean = false): ViewDefinitionProps[] {
const where = (wantPrivate === false) ? "IsPrivate=FALSE" : "";
const ids = this._iModel.queryEntityIds({ from: className, limit, offset, where });
const props: ViewDefinitionProps[] = [];
const imodel = this._iModel;
ids.forEach((id) => {
try {
props.push(imodel.elements.getElementProps<ViewDefinitionProps>(id));
} catch (err: any) { }
});
return props;
}
/** Default parameters for iterating/querying ViewDefinitions. Includes all subclasses of ViewDefinition, excluding only those marked 'private'. */
public static readonly defaultQueryParams: ViewQueryParams = { from: "BisCore.ViewDefinition", where: "IsPrivate=FALSE" };
/** Iterate all ViewDefinitions matching the supplied query.
* @param params Specifies the query by which views are selected.
* @param callback Function invoked for each ViewDefinition matching the query. Return false to terminate iteration, true to continue.
* @returns true if all views were iterated, false if iteration was terminated early due to callback returning false.
*
* **Example: Finding all views of a specific DrawingModel**
* ``` ts
* [[include:IModelDb.Views.iterateViews]]
* ```
*/
public iterateViews(params: ViewQueryParams, callback: (view: ViewDefinition) => boolean): boolean {
const ids = this._iModel.queryEntityIds(params);
let finished = true;
for (const id of ids) {
try {
const view = this._iModel.elements.getElement(id);
if (undefined !== view && view instanceof ViewDefinition) {
finished = callback(view);
if (!finished)
break;
}
} catch (err: any) { }
}
return finished;
}
public getViewStateData(viewDefinitionId: string, options?: ViewStateLoadProps): ViewStateProps {
const elements = this._iModel.elements;
const viewDefinitionElement = elements.getElement<ViewDefinition>(viewDefinitionId);
const viewDefinitionProps = viewDefinitionElement.toJSON();
const categorySelectorProps = elements.getElementProps<CategorySelectorProps>(viewDefinitionProps.categorySelectorId);
const displayStyleOptions: ElementLoadProps = {
id: viewDefinitionProps.displayStyleId,
displayStyle: options?.displayStyle,
};
const displayStyleProps = elements.getElementProps<DisplayStyleProps>(displayStyleOptions);
const viewStateData: ViewStateProps = { viewDefinitionProps, displayStyleProps, categorySelectorProps };
const modelSelectorId = (viewDefinitionProps as SpatialViewDefinitionProps).modelSelectorId;
if (modelSelectorId !== undefined) {
viewStateData.modelSelectorProps = elements.getElementProps<ModelSelectorProps>(modelSelectorId);
} else if (viewDefinitionElement instanceof SheetViewDefinition) {
viewStateData.sheetProps = elements.getElementProps<SheetProps>(viewDefinitionElement.baseModelId);
viewStateData.sheetAttachments = Array.from(this._iModel.queryEntityIds({
from: "BisCore.ViewAttachment",
where: `Model.Id=${viewDefinitionElement.baseModelId}`,
}));
} else if (viewDefinitionElement instanceof DrawingViewDefinition) {
// Ensure view has known extents
try {
const extentsJson = this._iModel.nativeDb.queryModelExtents({ id: viewDefinitionElement.baseModelId }).modelExtents;
viewStateData.modelExtents = Range3d.fromJSON(extentsJson);
} catch {
//
}
// Include information about the associated [[SectionDrawing]], if any.
// NB: The SectionDrawing ECClass may not exist in the iModel's version of the BisCore ECSchema.
try {
const sectionDrawing = this._iModel.elements.tryGetElement<SectionDrawing>(viewDefinitionElement.baseModelId);
if (sectionDrawing && sectionDrawing.spatialView && Id64.isValidId64(sectionDrawing.spatialView.id)) {
viewStateData.sectionDrawing = {
spatialView: sectionDrawing.spatialView.id,
displaySpatialView: true === sectionDrawing.jsonProperties.displaySpatialView,
drawingToSpatialTransform: sectionDrawing.jsonProperties.drawingToSpatialTransform,
};
}
} catch {
//
}
}
return viewStateData;
}
private getViewThumbnailArg(viewDefinitionId: Id64String): FilePropertyProps {
return { namespace: "dgn_View", name: "Thumbnail", id: viewDefinitionId };
}
/** Get the thumbnail for a view.
* @param viewDefinitionId The Id of the view for thumbnail
* @returns the ThumbnailProps, or undefined if no thumbnail exists.
*/
public getThumbnail(viewDefinitionId: Id64String): ThumbnailProps | undefined {
const viewArg = this.getViewThumbnailArg(viewDefinitionId);
const sizeProps = this._iModel.nativeDb.queryFileProperty(viewArg, true) as string;
if (undefined === sizeProps)
return undefined;
const out = JSON.parse(sizeProps) as ThumbnailProps;
out.image = this._iModel.nativeDb.queryFileProperty(viewArg, false) as Uint8Array;
return out;
}
/** Save a thumbnail for a view.
* @param viewDefinitionId The Id of the view for thumbnail
* @param thumbnail The thumbnail data.
* @returns 0 if successful
*/
public saveThumbnail(viewDefinitionId: Id64String, thumbnail: ThumbnailProps): number {
const viewArg = this.getViewThumbnailArg(viewDefinitionId);
const props = { format: thumbnail.format, height: thumbnail.height, width: thumbnail.width };
this._iModel.nativeDb.saveFileProperty(viewArg, JSON.stringify(props), thumbnail.image);
return 0;
}
/** Set the default view property the iModel
* @param viewId The Id of the ViewDefinition to use as the default
*/
public setDefaultViewId(viewId: Id64String): void {
const spec = { namespace: "dgn_View", name: "DefaultView" };
const blob32 = new Uint32Array(2);
blob32[0] = Id64.getLowerUint32(viewId);
blob32[1] = Id64.getUpperUint32(viewId);
const blob8 = new Uint8Array(blob32.buffer);
this._iModel.saveFileProperty(spec, undefined, blob8);
}
}
/** Represents the current state of a pollable tile content request.
* Note: lack of a "completed" state because polling a completed request returns the content as a Uint8Array.
* @internal
*/
export enum TileContentState {
New, // Request was just created and enqueued.
Pending, // Request is enqueued but not yet being processed.
Loading, // Request is being actively processed.
}
/** @internal */
export class Tiles {
/** @internal */
public constructor(private _iModel: IModelDb) { }
/** @internal */
public async requestTileTreeProps(id: string): Promise<IModelTileTreeProps> {
return new Promise<IModelTileTreeProps>((resolve, reject) => {
this._iModel.nativeDb.getTileTree(id, (ret: IModelJsNative.ErrorStatusOrResult<IModelStatus, any>) => {
if (undefined !== ret.error)
reject(new IModelError(ret.error.status, `TreeId=${id}`));
else
resolve(ret.result as IModelTileTreeProps);
});
});
}
private pollTileContent(resolve: (arg0: IModelJsNative.TileContent) => void, reject: (err: unknown) => void, treeId: string, tileId: string) {
let ret;
try {
ret = this._iModel.nativeDb.pollTileContent(treeId, tileId);
} catch (err) {
// Typically "imodel not open".
reject(err);
return;
}
if (undefined !== ret.error) {
reject(new IModelError(ret.error.status, `TreeId=${treeId} TileId=${tileId}`));
} else if (typeof ret.result !== "number") { // if type is not a number, it's the TileContent interface
const res = ret.result as IModelJsNative.TileContent;
const iModelId = this._iModel.iModelId;
const tileSizeThreshold = IModelHost.logTileSizeThreshold;
const tileSize = res.content.length;
if (tileSize > tileSizeThreshold) {
Logger.logWarning(loggerCategory, "Tile size (in bytes) larger than specified threshold", () => ({ tileSize, tileSizeThreshold, treeId, tileId, iModelId }));
}
const loadTimeThreshold = IModelHost.logTileLoadTimeThreshold;
const loadTime = res.elapsedSeconds;
if (loadTime > loadTimeThreshold) {
Logger.logWarning(loggerCategory, "Tile load time (in seconds) greater than specified threshold", () => ({ loadTime, loadTimeThreshold, treeId, tileId, iModelId }));
}
resolve(res);
} else { // if the type is a number, it's the TileContentState enum
// ###TODO: Decide appropriate timeout interval. May want to switch on state (new vs loading vs pending)
setTimeout(() => this.pollTileContent(resolve, reject, treeId, tileId), 10);
}
}
/** @internal */
public async requestTileContent(treeId: string, tileId: string): Promise<IModelJsNative.TileContent> {
return new Promise<IModelJsNative.TileContent>((resolve, reject) => {
this.pollTileContent(resolve, reject, treeId, tileId);
});
}
/** @internal */
public async getTileContent(treeId: string, tileId: string): Promise<Uint8Array> {
const ret = await new Promise<IModelJsNative.ErrorStatusOrResult<any, Uint8Array>>((resolve) => {
this._iModel.nativeDb.getTileContent(treeId, tileId, resolve);
});
if (undefined !== ret.error) {
throw new IModelError(ret.error.status, `TreeId=${treeId} TileId=${tileId}`);
}
return ret.result!;
}
}
}
/**
* Argument to a function that can accept a valid access token.
* @public
*/
export interface TokenArg {
/** If present, the access token for the requested operation. If not present, use [[IModelHost.getAccessToken]] */
readonly accessToken?: AccessToken;
}
/**
* Arguments to open a BriefcaseDb
* @public
*/
export type OpenBriefcaseArgs = OpenBriefcaseProps & { rpcActivity?: RpcActivity };
/**
* A local copy of an iModel from iModelHub that can pull and potentially push changesets.
* BriefcaseDb raises a set of events to allow apps and subsystems to track its object life cycle, including [[onOpen]] and [[onOpened]].
* @public
*/
export class BriefcaseDb extends IModelDb {
/** Manages local changes to this briefcase. */
public readonly txns: TxnManager = new TxnManager(this);
/** override superclass method */
public override get isBriefcase(): boolean { return true; }
/* the BriefcaseId of the briefcase opened with this BriefcaseDb */
public readonly briefcaseId: BriefcaseId;
/**
* Event raised just before a BriefcaseDb is opened. Supplies the arguments that will be used to open the BriefcaseDb.
* Throw an exception to stop the open.
*
* **Example:**
* ``` ts
* [[include:BriefcaseDb.onOpen]]
* ```
*/
public static readonly onOpen = new BeEvent<(_args: OpenBriefcaseArgs) => void>();
/**
* Event raised just after a BriefcaseDb is opened. Supplies the newly opened BriefcaseDb and the arguments that were used to open it.
*
* **Example:**
* ``` ts
* [[include:BriefcaseDb.onOpened]]
* ```
*/
public static readonly onOpened = new BeEvent<(_iModelDb: BriefcaseDb, _args: OpenBriefcaseArgs) => void>();
public static override findByKey(key: string): BriefcaseDb {
return super.findByKey(key) as BriefcaseDb;
}
public static override tryFindByKey(key: string): BriefcaseDb | undefined {
const db = super.tryFindByKey(key);
return db?.isBriefcaseDb() ? db : undefined;
}
/** The Guid that identifies the *context* that owns this iModel. */
public override get iTwinId(): GuidString { return super.iTwinId!; } // GuidString | undefined for the superclass, but required for BriefcaseDb
/**
* Determine whether this BriefcaseDb should use a lock server.
* All must be true:
* - file is open for write
* - has an assigned briefcaseId
* - the "no locking" flag is not present. This is a property of an iModel, established when the iModel is created in IModelHub.
*/
protected get useLockServer(): boolean {
return !this.isReadonly && (this.briefcaseId !== BriefcaseIdValue.Unassigned) && (undefined === this.nativeDb.queryLocalValue(BriefcaseLocalValue.NoLocking));
}
protected constructor(args: { nativeDb: IModelJsNative.DgnDb, key: string, openMode: OpenMode, briefcaseId: number }) {
super({ ...args, changeset: args.nativeDb.getCurrentChangeset() });
this._openMode = args.openMode;
this.briefcaseId = args.briefcaseId;
if (this.useLockServer) // if the iModel uses a lock server, create a ServerBasedLocks LockControl for this BriefcaseDb.
this._locks = new ServerBasedLocks(this);
}
/** Upgrades the profile or domain schemas. File must be closed before this call and is always left closed. */
private static async doUpgrade(briefcase: OpenBriefcaseArgs, upgradeOptions: UpgradeOptions, description: string): Promise<void> {
const nativeDb = this.openDgnDb({ path: briefcase.fileName }, OpenMode.ReadWrite, upgradeOptions); // performs the upgrade
const wasChanges = nativeDb.hasPendingTxns();
nativeDb.closeIModel();
if (wasChanges)
await withBriefcaseDb(briefcase, async (db) => db.pushChanges({ ...briefcase, description, retainLocks: true }));
}
/** Upgrades the schemas in the iModel based on the current version of the software. Follows a sequence of operations -
* * Acquires a schema lock to prevent other users from making any other changes while upgrade is happening
* * Updates the local briefcase with the schema changes.
* * Pushes the resulting changeset(s) to iModelHub.
* Note that the upgrade requires that the local briefcase be closed, and may result in one or two change sets depending on whether both
* profile and domain schemas need to get upgraded.
* @see ($docs/learning/backend/IModelDb.md#upgrading-schemas-in-an-imodel)
*/
public static async upgradeSchemas(briefcase: OpenBriefcaseArgs): Promise<void> {
// upgrading schemas involves closing and reopening the file repeatedly. That's because the process of upgrading
// happens on a file open. We have to open-and-close the file at *each* of these steps:
// - acquire schema lock
// - upgrade profile
// - push changes
// - upgrade domain
// - push changes
// - release schema lock
// good thing computers are fast. Fortunately upgrading should be rare (and the push time will dominate anyway.) Don't try to optimize any of this away.
await withBriefcaseDb(briefcase, async (db) => db.acquireSchemaLock()); // may not really acquire lock if iModel uses "noLocks" mode.
try {
await this.doUpgrade(briefcase, { profile: ProfileOptions.Upgrade }, "Upgraded profile");
await this.doUpgrade(briefcase, { domain: DomainOptions.Upgrade }, "Upgraded domain schemas");
} finally {
await withBriefcaseDb(briefcase, async (db) => db.locks.releaseAllLocks());
}
}
/** Open a briefcase file and return a new BriefcaseDb to interact with it.
* @param args parameters that specify the file name, and options for opening the briefcase file
*/
public static async open(args: OpenBriefcaseArgs): Promise<BriefcaseDb> {
this.onOpen.raiseEvent(args);
const file = { path: args.fileName, key: args.key };
const openMode = args.readonly ? OpenMode.Readonly : OpenMode.ReadWrite;
const nativeDb = this.openDgnDb(file, openMode);
const briefcaseDb = new BriefcaseDb({ nativeDb, key: file.key ?? Guid.createValue(), openMode, briefcaseId: nativeDb.getBriefcaseId() });
BriefcaseManager.logUsage(briefcaseDb);
this.onOpened.raiseEvent(briefcaseDb, args);
return briefcaseDb;
}
private closeAndReopen(openMode: OpenMode) {
const fileName = this.pathName;
this.nativeDb.closeIModel();
this.nativeDb.openIModel(fileName, openMode);
}
/** Pull and apply changesets from iModelHub */
public async pullChanges(arg?: PullChangesArgs): Promise<void> {
if (this.isReadonly) // we allow pulling changes into a briefcase that is readonly - close and reopen it writeable
this.closeAndReopen(OpenMode.ReadWrite);
try {
await BriefcaseManager.pullAndApplyChangesets(this, arg ?? {});
this.initializeIModelDb();
} finally {
if (this.isReadonly) // if the briefcase was opened readonly - close and reopen it readonly
this.closeAndReopen(OpenMode.Readonly);
}
IpcHost.notifyTxns(this, "notifyPulledChanges", this.changeset as ChangesetIndexAndId);
}
/** Push changes to iModelHub. */
public async pushChanges(arg: PushChangesArgs): Promise<void> {
if (this.briefcaseId === BriefcaseIdValue.Unassigned)
return;
if (this.nativeDb.hasUnsavedChanges())
throw new IModelError(ChangeSetStatus.HasUncommittedChanges, "Cannot push with unsaved changes");
if (!this.nativeDb.hasPendingTxns())
return; // nothing to push
await BriefcaseManager.pullMergePush(this, arg);
this.initializeIModelDb();
const changeset = this.changeset as ChangesetIndexAndId;
IpcHost.notifyTxns(this, "notifyPushedChanges", changeset);
}
}
/** Used to reattach Daemon from a user's accessToken for V2 checkpoints.
* @note Reattach only happens if the previous access token either has expired or is about to expire within an application-supplied safety duration.
*/
class DaemonReattach {
/** the time at which the current token should be refreshed (its expiry minus safetySeconds) */
private _timestamp = 0;
/** while a refresh is happening, all callers get this promise. */
private _promise: Promise<void> | undefined;
/** Time, in seconds, before the current token expires to obtain a new token. Default is 1 hour. */
private _safetySeconds: number;
constructor(expiry: number, safetySeconds?: number) {
this._safetySeconds = safetySeconds ?? 60 * 60; // default to 1 hour
this.setTimestamp(expiry);
}
private async performReattach(accessToken: AccessToken, iModel: IModelDb): Promise<void> {
this._timestamp = 0; // everyone needs to wait until token is valid
// we're going to request that the checkpoint manager use this user's accessToken to obtain a new access token for this checkpoint's storage account.
Logger.logInfo(BackendLoggerCategory.Authorization, "attempting to reattach checkpoint");
try {
// this exchanges the supplied user accessToken for an expiring blob-store token to read the checkpoint.
const response = await V2CheckpointManager.attach({ accessToken, iTwinId: iModel.iTwinId!, iModelId: iModel.iModelId, changeset: iModel.changeset });
Logger.logInfo(BackendLoggerCategory.Authorization, "reattached checkpoint successfully");
this.setTimestamp(response.expiryTimestamp);
} finally {
this._promise = undefined;
}
}
private setTimestamp(expiryTimestamp: number) {
this._timestamp = expiryTimestamp - (this._safetySeconds * 1000);
if (this._timestamp < Date.now())
Logger.logError(BackendLoggerCategory.Authorization, "attached with timestamp that expires before safety interval");
}
public async reattach(accessToken: AccessToken, iModel: IModelDb): Promise<void> {
if (this._timestamp > Date.now())
return; // current token is fine
if (undefined === this._promise) // has reattach already begun?
this._promise = this.performReattach(accessToken, iModel); // no, start it
return this._promise;
}
}
/** A *snapshot* iModel database file that is used for archival and data transfer purposes.
* @see [Snapshot iModels]($docs/learning/backend/AccessingIModels.md#snapshot-imodels)
* @see [About IModelDb]($docs/learning/backend/IModelDb.md)
* @public
*/
export class SnapshotDb extends IModelDb {
public override get isSnapshot() { return true; }
private _daemonReattach: DaemonReattach | undefined;
private _createClassViewsOnClose?: boolean;
private constructor(nativeDb: IModelJsNative.DgnDb, key: string) {
super({ nativeDb, key, changeset: nativeDb.getCurrentChangeset() });
this._openMode = nativeDb.isReadonly() ? OpenMode.Readonly : OpenMode.ReadWrite;
}
public static override findByKey(key: string): SnapshotDb {
return super.findByKey(key) as SnapshotDb;
}
public static override tryFindByKey(key: string): SnapshotDb | undefined {
const db = super.tryFindByKey(key);
return db?.isSnapshotDb() ? db : undefined;
}
/** Create an *empty* local [Snapshot]($docs/learning/backend/AccessingIModels.md#snapshot-imodels) iModel file.
* Snapshots are not synchronized with iModelHub, so do not have a change timeline.
* > Note: A *snapshot* cannot be modified after [[close]] is called.
* @param filePath The file that will contain the new iModel *snapshot*
* @param options The parameters that define the new iModel *snapshot*
* @returns A writeable SnapshotDb
* @see [Snapshot iModels]($docs/learning/backend/AccessingIModels.md#snapshot-imodels)
*/
public static createEmpty(filePath: LocalFileName, options: CreateEmptySnapshotIModelProps): SnapshotDb {
const nativeDb = new IModelHost.platform.DgnDb();
nativeDb.createIModel(filePath, options);
nativeDb.resetBriefcaseId(BriefcaseIdValue.Unassigned);
const snapshotDb = new SnapshotDb(nativeDb, Guid.createValue());
if (options.createClassViews)
snapshotDb._createClassViewsOnClose = true; // save flag that will be checked when close() is called
return snapshotDb;
}
/** Create a local [Snapshot]($docs/learning/backend/AccessingIModels.md#snapshot-imodels) iModel file, using this iModel as a *seed* or starting point.
* Snapshots are not synchronized with iModelHub, so do not have a change timeline.
* > Note: A *snapshot* cannot be modified after [[close]] is called.
* @param iModelDb The snapshot will be initialized from the current contents of this iModelDb
* @param snapshotFile The file that will contain the new iModel *snapshot*
* @param options Optional properties that determine how the snapshot iModel is created.
* @returns A writeable SnapshotDb
* @see [Snapshot iModels]($docs/learning/backend/AccessingIModels.md#snapshot-imodels)
*/
public static createFrom(iModelDb: IModelDb, snapshotFile: string, options?: CreateSnapshotIModelProps): SnapshotDb {
if (iModelDb.nativeDb.isEncrypted())
throw new IModelError(DbResult.BE_SQLITE_MISUSE, "Cannot create a snapshot from an encrypted iModel");
IModelJsFs.copySync(iModelDb.pathName, snapshotFile);
IModelHost.platform.DgnDb.vacuum(snapshotFile);
if (options?.password)
IModelHost.platform.DgnDb.encryptDb(snapshotFile, options);
const nativeDb = new IModelHost.platform.DgnDb();
nativeDb.openIModel(snapshotFile, OpenMode.ReadWrite, undefined, options);
// Replace iModelId if seedFile is a snapshot, preserve iModelId if seedFile is an iModelHub-managed briefcase
if (!BriefcaseManager.isValidBriefcaseId(nativeDb.getBriefcaseId()))
nativeDb.setIModelId(Guid.createValue());
nativeDb.deleteLocalValue(BriefcaseLocalValue.StandaloneEdit);
nativeDb.saveChanges();
nativeDb.deleteAllTxns();
nativeDb.resetBriefcaseId(BriefcaseIdValue.Unassigned);
nativeDb.saveChanges();
const snapshotDb = new SnapshotDb(nativeDb, Guid.createValue());
if (options?.createClassViews)
snapshotDb._createClassViewsOnClose = true; // save flag that will be checked when close() is called
return snapshotDb;
}
/** open this SnapshotDb read/write, strictly to apply incoming changesets. Used for creating new checkpoints.
* @internal
*/
public static openForApplyChangesets(path: LocalFileName, props?: SnapshotOpenOptions): SnapshotDb {
const file = { path, key: props?.key };
const nativeDb = this.openDgnDb(file, OpenMode.ReadWrite, undefined, props);
return new SnapshotDb(nativeDb, file.key!);
}
/** Open a read-only iModel *snapshot*.
* @param path the full path of the snapshot iModel file to open.
* @param props options for opening snapshot
* @see [[close]]
* @throws [[IModelError]] If the file is not found or is not a valid *snapshot*.
*/
public static openFile(path: LocalFileName, opts?: SnapshotOpenOptions): SnapshotDb {
const file = { path, key: opts?.key };
const nativeDb = this.openDgnDb(file, OpenMode.Readonly, undefined, opts);
return new SnapshotDb(nativeDb, file.key!);
}
/** Open a previously downloaded V1 checkpoint file.
* @note The key is generated by this call is predictable and is formed from the IModelId and ChangeSetId.
* This is so every backend working on the same checkpoint will use the same key, to permit multiple backends
* servicing the same checkpoint.
* @internal
*/
public static openCheckpointV1(fileName: LocalFileName, checkpoint: CheckpointProps) {
const snapshot = this.openFile(fileName, { key: CheckpointManager.getKey(checkpoint) });
snapshot._iTwinId = checkpoint.iTwinId;
return snapshot;
}
/** Open a V2 *checkpoint*, a special form of snapshot iModel that represents a read-only snapshot of an iModel from iModelHub at a particular point in time.
* > Note: The checkpoint daemon must already be running and a checkpoint must already exist in iModelHub's storage *before* this function is called.
* @param checkpoint The checkpoint to open
* @note The key is generated by this call is predictable and is formed from the IModelId and ChangeSetId.
* This is so every backend working on the same checkpoint will use the same key, to permit multiple backends
* servicing the same checkpoint.
* @throws [[IModelError]] If the checkpoint is not found in iModelHub or the checkpoint daemon is not supported in the current environment.
* @internal
*/
public static async openCheckpointV2(checkpoint: CheckpointProps): Promise<SnapshotDb> {
const { filePath, expiryTimestamp } = await V2CheckpointManager.attach(checkpoint);
const key = CheckpointManager.getKey(checkpoint);
// NOTE: Currently the key contains a ':' which can not be part of a filename on windows, so it can not be used as the tempFileBase.
const tempFileBase = join(IModelHost.cacheDir, `${checkpoint.iModelId}\$${checkpoint.changeset.id}`); // temp files for this checkpoint should go in the cacheDir.
const snapshot = SnapshotDb.openFile(filePath, { lazyBlockCache: true, key, tempFileBase });
snapshot._iTwinId = checkpoint.iTwinId;
try {
CheckpointManager.validateCheckpointGuids(checkpoint, snapshot.nativeDb);
} catch (err: any) {
snapshot.close();
throw err;
}
snapshot._daemonReattach = new DaemonReattach(expiryTimestamp, checkpoint.reattachSafetySeconds);
return snapshot;
}
/** Used to refresh the daemon's accessToken if this is a V2 checkpoint.
* @internal
*/
public override async reattachDaemon(accessToken: AccessToken): Promise<void> {
return this._daemonReattach?.reattach(accessToken, this);
}
/** @internal */
public override beforeClose(): void {
super.beforeClose();
if (this._createClassViewsOnClose) { // check for flag set during create
if (BentleyStatus.SUCCESS !== this.nativeDb.createClassViewsInDb()) {
throw new IModelError(IModelStatus.SQLiteError, "Error creating class views");
} else {
this.saveChanges();
}
}
}
}
/**
* Standalone iModels are read/write files that are not associated with an iTwin or managed by iModelHub.
* They are relevant only for testing, or for small-scale single-user scenarios.
* Standalone iModels are designed such that the API for Standalone iModels and Briefcase
* iModels (those synchronized with iModelHub) are as similar and consistent as possible.
* This leads to a straightforward process where the a user starts with StandaloneDb and can
* optionally choose to upgrade to an iTwin.
*
* Some additional details. Standalone iModels:
* - always have [Guid.empty]($bentley) for their iTwinId (they are "unassociated" files)
* - always have BriefcaseId === [BriefcaseIdValue.Unassigned]($common)
* - are connected to the frontend via [BriefcaseConnection.openStandalone]($frontend)
* - may be opened without supplying any user credentials
* - may be opened read/write
* - cannot apply a changeset to nor generate a changesets (since there is no timeline from which to get/push changesets)
* @public
*/
export class StandaloneDb extends BriefcaseDb {
public override get isStandalone(): boolean { return true; }
protected override get useLockServer() { return false; } // standalone iModels have no lock server
public static override findByKey(key: string): StandaloneDb {
return super.findByKey(key) as StandaloneDb;
}
public static override tryFindByKey(key: string): StandaloneDb | undefined {
const db = super.tryFindByKey(key);
return db?.isStandaloneDb() ? db : undefined;
}
/** Create an *empty* standalone iModel.
* @param filePath The file path for the iModel
* @param args The parameters that define the new iModel
*/
public static createEmpty(filePath: LocalFileName, args: CreateEmptyStandaloneIModelProps): StandaloneDb {
const nativeDb = new IModelHost.platform.DgnDb();
nativeDb.createIModel(filePath, args);
nativeDb.saveLocalValue(BriefcaseLocalValue.StandaloneEdit, args.allowEdit);
nativeDb.setITwinId(Guid.empty);
nativeDb.resetBriefcaseId(BriefcaseIdValue.Unassigned);
nativeDb.saveChanges();
return new StandaloneDb({ nativeDb, key: Guid.createValue(), briefcaseId: BriefcaseIdValue.Unassigned, openMode: OpenMode.ReadWrite });
}
/**
* Upgrades the schemas in the standalone iModel file.
* Note that the upgrade requires that the file be closed, and will leave it back in the closed state.
* @param filePath Full path name of the standalone iModel file.
* @see ($docs/learning/backend/IModelDb.md#upgrading-schemas-in-an-imodel)
* @see [[StandaloneDb.validateSchemas]]
*/
public static upgradeStandaloneSchemas(filePath: LocalFileName) {
let nativeDb = this.openDgnDb({ path: filePath }, OpenMode.ReadWrite, { profile: ProfileOptions.Upgrade });
nativeDb.closeIModel();
nativeDb = this.openDgnDb({ path: filePath }, OpenMode.ReadWrite, { domain: DomainOptions.Upgrade });
nativeDb.closeIModel();
}
/** Open a standalone iModel file.
* @param filePath The path of the standalone iModel file.
* @param openMode Optional open mode for the standalone iModel. The default is read/write.
* @throws [[IModelError]] if the file is not a standalone iModel.
* @see [BriefcaseConnection.openStandalone]($frontend) to open a StandaloneDb from the frontend
*/
public static openFile(filePath: LocalFileName, openMode: OpenMode = OpenMode.ReadWrite, options?: StandaloneOpenOptions): StandaloneDb {
const file = { path: filePath, key: options?.key };
const nativeDb = this.openDgnDb(file, openMode);
try {
const iTwinId = nativeDb.getITwinId();
if (iTwinId !== Guid.empty) // a "standalone" iModel means it is not associated with an iTwin
throw new IModelError(IModelStatus.WrongIModel, `${filePath} is not a Standalone iModel. iTwinId=${iTwinId}`);
return new StandaloneDb({ nativeDb, key: file.key!, openMode, briefcaseId: BriefcaseIdValue.Unassigned });
} catch (error) {
nativeDb.closeIModel();
throw error;
}
}
} | the_stack |
* @module ModelState
*/
import { Id64, Id64String, JsonUtils } from "@itwin/core-bentley";
import {
GeometricModel2dProps, GeometricModel3dProps, GeometricModelProps, ModelProps, RealityDataFormat, RealityDataSourceKey, RelatedElement, SpatialClassifiers,
} from "@itwin/core-common";
import { Point2d, Range3d } from "@itwin/core-geometry";
import { EntityState } from "./EntityState";
import { HitDetail } from "./HitDetail";
import { IModelConnection } from "./IModelConnection";
import { RealityDataSource } from "./RealityDataSource";
import { createOrbitGtTileTreeReference, createPrimaryTileTreeReference, createRealityTileTreeReference, TileTreeReference } from "./tile/internal";
import { ViewState } from "./ViewState";
/** Represents the front-end state of a [Model]($backend).
* @public
*/
export class ModelState extends EntityState implements ModelProps {
/** @internal */
public static override get className() { return "Model"; }
public readonly modeledElement: RelatedElement;
public readonly name: string;
public parentModel: Id64String;
public readonly isPrivate: boolean;
public readonly isTemplate: boolean;
constructor(props: ModelProps, iModel: IModelConnection, state?: ModelState) {
super(props, iModel, state);
this.modeledElement = RelatedElement.fromJSON(props.modeledElement)!;
this.name = props.name ? props.name : "";
this.parentModel = Id64.fromJSON(props.parentModel)!; // NB! Must always match the model of the modeledElement!
this.isPrivate = JsonUtils.asBool(props.isPrivate);
this.isTemplate = JsonUtils.asBool(props.isTemplate);
}
/** Add all custom-handled properties of a Model to a json object. */
public override toJSON(): ModelProps {
const val = super.toJSON() as ModelProps;
val.modeledElement = this.modeledElement;
val.parentModel = this.parentModel;
val.name = this.name;
if (this.isPrivate)
val.isPrivate = this.isPrivate;
if (this.isTemplate)
val.isTemplate = this.isTemplate;
return val;
}
/** Determine whether this is a GeometricModel */
public get isGeometricModel(): boolean { return false; }
/** Attempts to cast this model to a geometric model. */
public get asGeometricModel(): GeometricModelState | undefined { return undefined; }
/** Attempts to cast this model to a 3d geometric model. */
public get asGeometricModel3d(): GeometricModel3dState | undefined { return undefined; }
/** Attempts to cast this model to a 2d geometric model. */
public get asGeometricModel2d(): GeometricModel2dState | undefined { return undefined; }
/** Attempts to cast this model to a spatial model. */
public get asSpatialModel(): SpatialModelState | undefined { return undefined; }
/**
* Return the tool tip for this model. This is called only if the hit does not return a tooltip.
* @internal
*/
public getToolTip(_hit: HitDetail): HTMLElement | string | undefined { return undefined; }
}
/** Represents the front-end state of a [GeometricModel]($backend).
* The contents of a GeometricModelState can be rendered inside a [[Viewport]].
* @public
*/
export abstract class GeometricModelState extends ModelState implements GeometricModelProps {
/** @internal */
public static override get className() { return "GeometricModel"; }
/** @internal */
public geometryGuid?: string;
private _modelRange?: Range3d;
constructor(props: GeometricModelProps, iModel: IModelConnection, state?: GeometricModelState) {
super(props, iModel, state);
this.geometryGuid = props.geometryGuid;
}
/** Returns true if this is a 3d model (a [[GeometricModel3dState]]). */
public abstract get is3d(): boolean;
/** @internal */
public override get asGeometricModel(): GeometricModelState { return this; }
/** Returns true if this is a 2d model (a [[GeometricModel2dState]]). */
public get is2d(): boolean { return !this.is3d; }
/** @internal */
public override get isGeometricModel(): boolean { return true; }
/** @internal */
public get treeModelId(): Id64String { return this.id; }
/** Query for the union of the ranges of all the elements in this GeometricModel.
* @internal
*/
public async queryModelRange(): Promise<Range3d> {
if (undefined === this._modelRange) {
const ranges = await this.iModel.models.queryModelRanges(this.id);
this._modelRange = Range3d.fromJSON(ranges[0]);
}
return this._modelRange;
}
/** @internal */
public createTileTreeReference(view: ViewState): TileTreeReference {
// If this is a reality model, its tile tree is obtained from reality data service URL.
const spatialModel = this.asSpatialModel;
const rdSourceKey = this.jsonProperties.rdSourceKey;
if (rdSourceKey) {
const useOrbitGtTileTreeReference = rdSourceKey.format === RealityDataFormat.OPC;
const treeRef = (!useOrbitGtTileTreeReference) ?
createRealityTileTreeReference({
rdSourceKey,
iModel: this.iModel,
source: view,
modelId: this.id,
// url: tilesetUrl, // If rdSourceKey is defined, url is not used
classifiers: undefined !== spatialModel ? spatialModel.classifiers : undefined,
}) :
createOrbitGtTileTreeReference({
rdSourceKey,
iModel: this.iModel,
source: view,
modelId: this.id,
// orbitGtBlob: props.orbitGtBlob!, // If rdSourceKey is defined, orbitGtBlob is not used
classifiers: undefined !== spatialModel ? spatialModel.classifiers : undefined,
});
return treeRef;
}
const orbitGtBlob = this.jsonProperties.orbitGtBlob;
// If this is an OrbitGt reality model, create it's reference
if(orbitGtBlob) {
let orbitGtName = "";
if (orbitGtBlob.blobFileName !== "") {
if (orbitGtBlob.blobFileName[0] === "/")
orbitGtName = orbitGtBlob.blobFileName.substring(1);
else
orbitGtName = orbitGtBlob.blobFileName;
}
// Create rdSourceKey if not provided
const rdSourceKeyOGT: RealityDataSourceKey = RealityDataSource.createKeyFromOrbitGtBlobProps(orbitGtBlob);
return createOrbitGtTileTreeReference({
rdSourceKey: rdSourceKeyOGT,
iModel: this.iModel,
source: view,
modelId: this.id,
orbitGtBlob,
name: orbitGtName,
classifiers: undefined !== spatialModel ? spatialModel.classifiers : undefined,
});
}
// If this is a TileTree reality model, create it's reference
const tilesetUrl = this.jsonProperties.tilesetUrl;
if(tilesetUrl) {
const rdSourceKeyCS = RealityDataSource.createRealityDataSourceKeyFromUrl(tilesetUrl);
return createRealityTileTreeReference({
rdSourceKey: rdSourceKeyCS,
url : tilesetUrl,
iModel: this.iModel,
source: view,
modelId: this.id,
tilesetToDbTransform: this.jsonProperties.tilesetToDbTransform,
classifiers: undefined !== spatialModel ? spatialModel.classifiers : undefined,
});
}
return createPrimaryTileTreeReference(view, this);
}
}
/** Represents the front-end state of a [GeometricModel2d]($backend).
* @public
*/
export class GeometricModel2dState extends GeometricModelState implements GeometricModel2dProps {
/** @internal */
public static override get className() { return "GeometricModel2d"; }
/** @internal */
public readonly globalOrigin: Point2d;
constructor(props: GeometricModel2dProps, iModel: IModelConnection, state?: GeometricModel2dState) {
super(props, iModel, state);
this.globalOrigin = Point2d.fromJSON(props.globalOrigin);
}
/** @internal */
public get is3d(): boolean { return false; }
/** @internal */
public override get asGeometricModel2d(): GeometricModel2dState { return this; }
public override toJSON(): GeometricModel2dProps {
const val = super.toJSON() as GeometricModel2dProps;
val.globalOrigin = this.globalOrigin;
return val;
}
}
/** Represents the front-end state of a [GeometricModel3d]($backend).
* @public
*/
export class GeometricModel3dState extends GeometricModelState {
/** @internal */
public static override get className() { return "GeometricModel3d"; }
constructor(props: GeometricModel3dProps, iModel: IModelConnection, state?: GeometricModel3dState) {
super(props, iModel, state);
this.isNotSpatiallyLocated = JsonUtils.asBool(props.isNotSpatiallyLocated);
this.isPlanProjection = JsonUtils.asBool(props.isPlanProjection);
}
/** @internal */
public override toJSON(): GeometricModel3dProps {
const val = super.toJSON() as GeometricModel3dProps;
if (this.isNotSpatiallyLocated)
val.isNotSpatiallyLocated = true;
if (this.isPlanProjection)
val.isPlanProjection = true;
return val;
}
/** @internal */
public get is3d(): boolean { return true; }
/** @internal */
public override get asGeometricModel3d(): GeometricModel3dState { return this; }
/** If true, then the elements in this GeometricModel3dState are expected to be in an XY plane.
* @note The associated ECProperty was added to the BisCore schema in version 1.0.8
*/
public readonly isPlanProjection: boolean;
/** If true, then the elements in this GeometricModel3dState are not in real-world coordinates and will not be in the spatial index.
* @note The associated ECProperty was added to the BisCore schema in version 1.0.8
*/
public readonly isNotSpatiallyLocated: boolean;
/** If true, then the elements in this GeometricModel3dState are in real-world coordinates and will be in the spatial index. */
public get isSpatiallyLocated(): boolean { return !this.isNotSpatiallyLocated; }
}
/** Represents the front-end state of a [SheetModel]($backend).
* @public
*/
export class SheetModelState extends GeometricModel2dState {
/** @internal */
public static override get className() { return "SheetModel"; }
}
/** Represents the front-end state of a [SpatialModel]($backend).
* @public
*/
export class SpatialModelState extends GeometricModel3dState {
/** If this is a reality model, provides access to a list of available spatial classifiers that can be applied to it. */
public readonly classifiers?: SpatialClassifiers;
/** @internal */
public static override get className() { return "SpatialModel"; }
/** @internal */
public override get asSpatialModel(): SpatialModelState { return this; }
public constructor(props: ModelProps, iModel: IModelConnection, state?: SpatialModelState) {
super(props, iModel, state);
if (this.isRealityModel)
this.classifiers = new SpatialClassifiers(this.jsonProperties);
}
/** Return true if this is a reality model (represented by a 3d tile set). */
public get isRealityModel(): boolean {
return undefined !== this.jsonProperties.tilesetUrl;
}
}
/** Represents the front-end state of a [PhysicalModel]($backend).
* @public
*/
export class PhysicalModelState extends SpatialModelState {
/** @internal */
public static override get className() { return "PhysicalModel"; }
}
/** Represents the front-end state of a [SpatialLocationModel]($backend).
* @public
*/
export class SpatialLocationModelState extends SpatialModelState {
/** @internal */
public static override get className() { return "SpatialLocationModel"; }
}
/** Represents the front-end state of a [DrawingModel]($backend).
* @public
*/
export class DrawingModelState extends GeometricModel2dState {
/** @internal */
public static override get className() { return "DrawingModel"; }
}
/** Represents the front-end state of a [SectionDrawingModel]($backend).
* @public
*/
export class SectionDrawingModelState extends DrawingModelState {
/** @internal */
public static override get className() { return "SectionDrawingModel"; }
} | the_stack |
* @title: 2D Physics constraints
* @description:
* This sample shows how to create each of the 2D physics constraints
* (point, weld, distance, line, angle, motor, pulley and custom).
* Each object in the scene can be manipulated with the mouse to see how the constraints work.
*/
/*{{ javascript("jslib/observer.js") }}*/
/*{{ javascript("jslib/requesthandler.js") }}*/
/*{{ javascript("jslib/utilities.js") }}*/
/*{{ javascript("jslib/services/turbulenzservices.js") }}*/
/*{{ javascript("jslib/services/turbulenzbridge.js") }}*/
/*{{ javascript("jslib/services/gamesession.js") }}*/
/*{{ javascript("jslib/services/mappingtable.js") }}*/
/*{{ javascript("jslib/shadermanager.js") }}*/
/*{{ javascript("jslib/physics2ddevice.js") }}*/
/*{{ javascript("jslib/draw2d.js") }}*/
/*{{ javascript("jslib/boxtree.js") }}*/
/*{{ javascript("jslib/physics2ddebugdraw.js") }}*/
/*{{ javascript("jslib/fontmanager.js") }}*/
/*{{ javascript("scripts/htmlcontrols.js") }}*/
/*global TurbulenzEngine: true */
/*global TurbulenzServices: false */
/*global RequestHandler: false */
/*global Physics2DDevice: false */
/*global Draw2D: false */
/*global FontManager: false */
/*global ShaderManager: false */
/*global Physics2DDebugDraw: false */
/*global HTMLControls: false */
TurbulenzEngine.onload = function onloadFn()
{
//==========================================================================
// HTML Controls
//==========================================================================
var htmlControls;
var elasticConstraints = false;
var frequency = 1;
var damping = 0.1;
//==========================================================================
// Turbulenz Initialization
//==========================================================================
var graphicsDevice = TurbulenzEngine.createGraphicsDevice({});
var mathDevice = TurbulenzEngine.createMathDevice({});
var requestHandler = RequestHandler.create({});
var fontManager = FontManager.create(graphicsDevice, requestHandler);
var shaderManager = ShaderManager.create(graphicsDevice, requestHandler);
var font, shader, gameSession;
function sessionCreated()
{
TurbulenzServices.createMappingTable(
requestHandler,
gameSession,
function (mappingTable)
{
var urlMapping = mappingTable.urlMapping;
var assetPrefix = mappingTable.assetPrefix;
shaderManager.setPathRemapping(urlMapping, assetPrefix);
fontManager.setPathRemapping(urlMapping, assetPrefix);
fontManager.load('fonts/hero.fnt', function (fontObject)
{
font = fontObject;
});
shaderManager.load('shaders/font.cgfx', function (shaderObject)
{
shader = shaderObject;
});
}
);
}
gameSession = TurbulenzServices.createGameSession(requestHandler, sessionCreated);
//==========================================================================
// Physics2D/Draw2D (Use Draw2D to define viewport scalings)
//==========================================================================
// set up.
var phys2D = Physics2DDevice.create();
// size of physics stage.
var stageWidth = 40; // meters
var stageHeight = 20; // meters
var draw2D = Draw2D.create({
graphicsDevice : graphicsDevice
});
var debug = Physics2DDebugDraw.create({
graphicsDevice : graphicsDevice
});
// Configure draw2D viewport to the physics stage.
// As well as the physics2D debug-draw viewport.
draw2D.configure({
viewportRectangle : [0, 0, stageWidth, stageHeight],
scaleMode : 'scale'
});
debug.setPhysics2DViewport([0, 0, stageWidth, stageHeight]);
var world = phys2D.createWorld({
gravity : [0, 20] // 20 m/s/s
});
// Create a static body at (0, 0) with no rotation
// which we add to the world to use as the first body
// in hand constraint. We set anchor for this body
// as the cursor position in physics coordinates.
var staticReferenceBody = phys2D.createRigidBody({
type : 'static'
});
world.addRigidBody(staticReferenceBody);
var handConstraint = null;
function reset()
{
// Remove all bodies and constraints from world.
world.clear();
handConstraint = null;
// Create a static body around the stage to stop objects leaving the viewport.
// And walls between each constraint section.
var border = phys2D.createRigidBody({
type : 'static'
});
var thickness = 0.01; // 1cm
var i;
for (i = 0; i <= 4; i += 1)
{
var x = (stageWidth / 4) * i;
border.addShape(phys2D.createPolygonShape({
vertices : phys2D.createRectangleVertices(x - thickness, 0, x + thickness, stageHeight)
}));
}
for (i = 0; i <= 2; i += 1)
{
var y = (stageHeight / 2) * i;
border.addShape(phys2D.createPolygonShape({
vertices : phys2D.createRectangleVertices(0, y - thickness, stageWidth, y + thickness)
}));
}
world.addRigidBody(border);
function circle(x: number, y: number, radius: number, pinned?: boolean)
{
var body = phys2D.createRigidBody({
shapes : [
phys2D.createCircleShape({
radius : radius
})
],
position : [x, y]
});
world.addRigidBody(body);
if (pinned)
{
var pin = phys2D.createPointConstraint({
bodyA : staticReferenceBody,
bodyB : body,
anchorA : [x, y],
anchorB : [0, 0],
userData : "pin"
});
world.addConstraint(pin);
}
return body;
}
var bodyA, bodyB, worldAnchor;
// ------------------------------------
// Point Constraint
bodyA = circle(3.3, 5, 1);
bodyB = circle(6.6, 5, 1);
worldAnchor = [5, 5];
var pointConstraint = phys2D.createPointConstraint({
bodyA : bodyA,
bodyB : bodyB,
anchorA : bodyA.transformWorldPointToLocal(worldAnchor),
anchorB : bodyB.transformWorldPointToLocal(worldAnchor),
stiff : (!elasticConstraints),
frequency : frequency,
damping : damping
});
world.addConstraint(pointConstraint);
// ------------------------------------
// Weld Constraint
bodyA = circle(13.3, 5, 1);
bodyB = circle(16.6, 5, 1);
worldAnchor = [15, 5];
var weldConstraint = phys2D.createWeldConstraint({
bodyA : bodyA,
bodyB : bodyB,
anchorA : bodyA.transformWorldPointToLocal(worldAnchor),
anchorB : bodyB.transformWorldPointToLocal(worldAnchor),
phase : 0,
stiff : (!elasticConstraints),
frequency : frequency,
damping : damping
});
world.addConstraint(weldConstraint);
// ------------------------------------
// Distance Constraint
bodyA = circle(23.3, 5, 1);
bodyB = circle(26.6, 5, 1);
var distanceConstraint = phys2D.createDistanceConstraint({
bodyA : bodyA,
bodyB : bodyB,
anchorA : [1, 0],
anchorB : [-1, 0],
lowerBound : 1,
upperBound : 3,
stiff : (!elasticConstraints),
frequency : frequency,
damping : damping
});
world.addConstraint(distanceConstraint);
// ------------------------------------
// Line Constraint
bodyA = circle(33.3, 5, 1);
bodyB = circle(36.6, 5, 1);
worldAnchor = [35, 5];
var lineConstraint = phys2D.createLineConstraint({
bodyA : bodyA,
bodyB : bodyB,
anchorA : bodyA.transformWorldPointToLocal(worldAnchor),
anchorB : bodyB.transformWorldPointToLocal(worldAnchor),
axis : [0, 1],
lowerBound : -1,
upperBound : 1,
stiff : (!elasticConstraints),
frequency : frequency,
damping : damping
});
world.addConstraint(lineConstraint);
// ------------------------------------
// Angle Constraint
bodyA = circle(3, 15, 1.5, true);
bodyB = circle(7, 15, 1.5, true);
var angleConstraint = phys2D.createAngleConstraint({
bodyA : bodyA,
bodyB : bodyB,
ratio : 3,
lowerBound : -Math.PI * 2,
upperBound : Math.PI * 2,
stiff : (!elasticConstraints),
frequency : frequency,
damping : damping
});
world.addConstraint(angleConstraint);
// ------------------------------------
// Motor Constraint
bodyA = circle(13, 15, 1.5, true);
bodyB = circle(17, 15, 1.5, true);
var motorConstraint = phys2D.createMotorConstraint({
bodyA : bodyA,
bodyB : bodyB,
ratio : 4,
rate : 20
});
world.addConstraint(motorConstraint);
// ------------------------------------
// Pulley Constraint
var bodyC;
bodyA = circle(23.3, 16.6, 0.5);
bodyB = circle(25, 13.3, 1, true);
bodyC = circle(26.6, 16.6, 0.5);
// Additional distance constraints to prevent pulley
// becoming degenerate when one side becomes 0 length.
var distanceA = phys2D.createDistanceConstraint({
bodyA : bodyA,
bodyB : bodyB,
lowerBound : 0.25,
upperBound : Number.POSITIVE_INFINITY,
anchorA : [0, -0.5],
anchorB : [-1, 0],
userData : 'pin'
});
world.addConstraint(distanceA);
var distanceB = phys2D.createDistanceConstraint({
bodyA : bodyC,
bodyB : bodyB,
lowerBound : 0.25,
upperBound : Number.POSITIVE_INFINITY,
anchorA : [0, -0.5],
anchorB : [1, 0],
userData : 'pin'
});
world.addConstraint(distanceB);
var pulleyConstraint = phys2D.createPulleyConstraint({
bodyA : bodyA,
bodyB : bodyB,
bodyC : bodyB,
bodyD : bodyC,
anchorA : [0, -0.5],
anchorB : [-1, 0],
anchorC : [1, 0],
anchorD : [0, -0.5],
ratio : 2,
lowerBound : 6,
upperBound : 8,
stiff : (!elasticConstraints),
frequency : frequency,
damping : damping
});
world.addConstraint(pulleyConstraint);
// ------------------------------------
// Custom Constraint
bodyA = circle(35, 13.3, 1);
bodyB = circle(35, 16.6, 1, true);
// Additional line constraint to pin upper body to rack.
var line = phys2D.createLineConstraint({
bodyA : staticReferenceBody,
bodyB : bodyA,
anchorA : [35, 13.3],
anchorB : [0, 0],
axis : [1, 0],
lowerBound : -5,
upperBound : 5,
userData : 'pin'
});
world.addConstraint(line);
// Custom constraint defined so that the x-position of
// the first body, is equal to the rotation of the
// second body.
//
// Constraint equation:
// (pi / 5) * (bodyA.posX - 35) - bodyB.rotation = 0
//
// Time Derivative (Velocity constraint):
// (pi / 5) * bodyA.velX - bodyB.angularVel = 0
//
// Partial derivatives of velocity constraint (Jacobian)
// velAx velAy angVelA velBx velBy angVelB
// [ (pi / 5), 0, 0, 0, 0, -1 ]
//
var user = phys2D.createCustomConstraint({
bodies : [bodyA, bodyB],
dimension : 1,
position : function positionFn(data, index)
{
var bodyA = this.bodies[0];
var bodyB = this.bodies[1];
data[index] = (Math.PI / 5 * (bodyA.getPosition()[0] - 35)) - bodyB.getRotation();
},
jacobian : function jacobianFn(data, index)
{
data[index] = (Math.PI / 5);
data[index + 1] = 0;
data[index + 2] = 0;
data[index + 3] = 0;
data[index + 4] = 0;
data[index + 5] = -1;
},
debugDraw : function debugDrawFn(debug, stiff)
{
if (stiff)
{
return;
}
var bodyA = this.bodies[0];
var bodyB = this.bodies[1];
var posA = bodyA.getPosition();
var posB = bodyB.getPosition();
// target for x-position of bodyA
var targetX = ((bodyB.getRotation()) / (Math.PI / 5)) + 35;
// target for rotation of bodyB
var targetR = (Math.PI / 5 * (posA[0] - 35));
// 3 pixel spring radius
var radius = 3 * debug.screenToPhysics2D;
debug.drawLinearSpring(posA[0], posA[1], targetX, posA[1], 3, radius, [1, 0, 0, 1]);
debug.drawSpiralSpring(posB[0], posB[1], targetR, bodyB.getRotation(), radius, radius * 2, [0, 0, 1, 1]);
},
stiff : (!elasticConstraints),
frequency : frequency,
damping : damping
});
world.addConstraint(user);
}
reset();
function invalidateConstraints()
{
var constraints = world.constraints;
var limit = constraints.length;
var i;
for (i = 0; i < limit; i += 1)
{
var con = constraints[i];
// Don't re-configure hand or pin constraints.
if (con === handConstraint || con.userData === "pin")
{
continue;
}
con.configure({
stiff : (!elasticConstraints),
frequency : frequency,
damping : damping
});
}
}
//==========================================================================
// Mouse/Keyboard controls
//==========================================================================
var inputDevice = TurbulenzEngine.createInputDevice({});
var keyCodes = inputDevice.keyCodes;
var mouseCodes = inputDevice.mouseCodes;
var mouseX = 0;
var mouseY = 0;
var onMouseOver = function mouseOverFn(x, y)
{
mouseX = x;
mouseY = y;
};
inputDevice.addEventListener('mouseover', onMouseOver);
var onKeyUp = function onKeyUpFn(keynum)
{
if (keynum === keyCodes.R) // 'r' key
{
reset();
}
};
inputDevice.addEventListener('keyup', onKeyUp);
var onMouseDown = function onMouseDownFn(code, x, y)
{
mouseX = x;
mouseY = y;
if (handConstraint)
{
return;
}
var point = draw2D.viewportMap(x, y);
var body;
if (code === mouseCodes.BUTTON_0) // Left button
{
var bodies = [];
var numBodies = world.bodyPointQuery(point, bodies);
var i;
for (i = 0; i < numBodies; i += 1)
{
body = bodies[i];
if (body.isDynamic())
{
handConstraint = phys2D.createPointConstraint({
bodyA : staticReferenceBody,
bodyB : body,
anchorA : point,
anchorB : body.transformWorldPointToLocal(point),
stiff : false,
maxForce : 1e5
});
world.addConstraint(handConstraint);
}
}
}
};
inputDevice.addEventListener('mousedown', onMouseDown);
var onMouseLeaveUp = function onMouseLeaveUpFn()
{
if (handConstraint)
{
world.removeConstraint(handConstraint);
handConstraint = null;
}
};
inputDevice.addEventListener('mouseleave', onMouseLeaveUp);
inputDevice.addEventListener('mouseup', onMouseLeaveUp);
//==========================================================================
// Main loop.
//==========================================================================
var realTime = 0;
var prevTime = TurbulenzEngine.time;
var fontTechnique, fontTechniqueParameters;
function mainLoop()
{
if (!graphicsDevice.beginFrame())
{
return;
}
inputDevice.update();
graphicsDevice.clear([0.3, 0.3, 0.3, 1.0]);
if (handConstraint)
{
handConstraint.setAnchorA(draw2D.viewportMap(mouseX, mouseY));
}
var curTime = TurbulenzEngine.time;
var timeDelta = (curTime - prevTime);
// Prevent trying to simulate too much time at once!
if (timeDelta > (1 / 20))
{
timeDelta = (1 / 20);
}
realTime += timeDelta;
prevTime = curTime;
while (world.simulatedTime < realTime)
{
world.step(1 / 60);
}
// physics2D debug drawing.
debug.setScreenViewport(draw2D.getScreenSpaceViewport());
debug.begin();
debug.drawWorld(world);
debug.end();
// Draw fonts.
graphicsDevice.setTechnique(fontTechnique);
fontTechniqueParameters.clipSpace = mathDevice.v4Build(2 / graphicsDevice.width, -2 / graphicsDevice.height, -1, 1,
fontTechniqueParameters.clipSpace);
graphicsDevice.setTechniqueParameters(fontTechniqueParameters);
function segmentFont(x, y, text, height)
{
var topLeft = draw2D.viewportUnmap(x, y);
var bottomRight = draw2D.viewportUnmap(x + 10, y + height);
font.drawTextRect(text, {
rect : [topLeft[0], topLeft[1], bottomRight[0] - topLeft[0], bottomRight[1] - topLeft[1]],
scale : 1.0,
spacing : 0,
alignment : 1
});
}
var titleHeight = 0.75;
segmentFont(0, 0, "Point", titleHeight);
segmentFont(10, 0, "Weld", titleHeight);
segmentFont(20, 0, "Distance", titleHeight);
segmentFont(30, 0, "Line", titleHeight);
segmentFont(0, 10, "Angle", titleHeight);
segmentFont(10, 10, "Motor", titleHeight);
segmentFont(20, 10, "Pulley", titleHeight);
segmentFont(30, 10, "Custom", titleHeight);
graphicsDevice.endFrame();
}
var intervalID = 0;
function loadingLoop()
{
if (font && shader)
{
fontTechnique = shader.getTechnique('font');
fontTechniqueParameters = graphicsDevice.createTechniqueParameters({
clipSpace : mathDevice.v4BuildZero(),
alphaRef : 0.01,
color : mathDevice.v4BuildOne()
});
TurbulenzEngine.clearInterval(intervalID);
intervalID = TurbulenzEngine.setInterval(mainLoop, 1000 / 60);
}
}
intervalID = TurbulenzEngine.setInterval(loadingLoop, 100);
//==========================================================================
function loadHtmlControls()
{
htmlControls = HTMLControls.create();
htmlControls.addCheckboxControl({
id : "elasticConstraints",
value : "elasticConstraints",
isSelected : elasticConstraints,
fn: function ()
{
elasticConstraints = !elasticConstraints;
invalidateConstraints();
return elasticConstraints;
}
});
htmlControls.addSliderControl({
id: "frequencySlider",
value: (frequency),
max: 10,
min: 0.25,
step: 0.25,
fn: function ()
{
frequency = this.value;
htmlControls.updateSlider("frequencySlider", frequency);
if (elasticConstraints)
{
invalidateConstraints();
}
}
});
htmlControls.addSliderControl({
id: "dampingSlider",
value: (damping),
max: 2,
min: 0,
step: 0.25,
fn: function ()
{
damping = this.value;
htmlControls.updateSlider("dampingSlider", damping);
if (elasticConstraints)
{
invalidateConstraints();
}
}
});
htmlControls.register();
}
loadHtmlControls();
// Create a scene destroy callback to run when the window is closed
TurbulenzEngine.onunload = function destroyScene()
{
if (intervalID)
{
TurbulenzEngine.clearInterval(intervalID);
}
if (gameSession)
{
gameSession.destroy();
gameSession = null;
}
};
}; | the_stack |
import { scan, Token, Literal } from './scan';
import { Expr, Prim, StringLiteral, IntLiteral, BytesLiteral, sourceReference, List, SourceReference } from './micheline';
import { expandMacros } from './macros';
import { ProtocolOptions } from './michelson-types';
export class MichelineParseError extends Error {
/**
* @param token A token caused the error
* @param message An error message
*/
constructor(public token: Token | null, message?: string) {
super(message);
Object.setPrototypeOf(this, MichelineParseError.prototype);
}
}
export class JSONParseError extends Error {
/**
* @param node A node caused the error
* @param message An error message
*/
constructor(public node: any, message?: string) {
super(message);
Object.setPrototypeOf(this, JSONParseError.prototype);
}
}
const errEOF = new MichelineParseError(null, 'Unexpected EOF');
function isAnnotation(tok: Token): boolean {
return tok.t === Literal.Ident && (tok.v[0] === '@' || tok.v[0] === '%' || tok.v[0] === ':');
}
const intRe = new RegExp('^-?[0-9]+$');
const bytesRe = new RegExp('^([0-9a-fA-F]{2})*$');
export interface ParserOptions extends ProtocolOptions {
/**
* Expand [Michelson macros](https://tezos.gitlab.io/whitedoc/michelson.html#macros) during parsing.
*/
expandMacros?: boolean;
}
/**
* Converts and validates Michelson expressions between JSON-based Michelson and Micheline
*
* Pretty Print a Michelson Smart Contract:
* ```
* const contract = await Tezos.contract.at("KT1Vsw3kh9638gqWoHTjvHCoHLPKvCbMVbCg");
* const p = new Parser();
*
* const michelsonCode = p.parseJSON(contract.script.code);
* const storage = p.parseJSON(contract.script.storage);
*
* console.log("Pretty print Michelson smart contract:");
* console.log(emitMicheline(michelsonCode, {indent:" ", newline: "\n",}));
*
* console.log("Pretty print Storage:");
* console.log(emitMicheline(storage, {indent:" ", newline: "\n",}));
* ```
*
* Encode a Michelson expression for inital storage of a smart contract
* ```
* const src = `(Pair (Pair { Elt 1
* (Pair (Pair "tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN" "tz1KqTpEZ7Yob7QbPE4Hy4Wo8fHG8LhKxZSx")
* 0x0501000000026869) }
* 10000000)
* (Pair 2 333))`;
*
* const p = new Parser();
*
* const exp = p.parseMichelineExpression(src);
* console.log(JSON.stringify(exp));
* ```
*/
export class Parser {
constructor(private opt?: ParserOptions) {
}
private expand(ex: Prim): Expr {
if (this.opt?.expandMacros !== undefined ? this.opt?.expandMacros : true) {
const ret = expandMacros(ex, this.opt);
if (ret !== ex) {
ret[sourceReference] = { ...(ex[sourceReference] || { first: 0, last: 0 }), macro: ex };
}
return ret;
} else {
return ex;
}
}
private parseListExpr(scanner: Iterator<Token>, start: Token): Expr {
const ref: SourceReference = {
first: start.first,
last: start.last,
};
const expectBracket = start.t === "(";
let tok: IteratorResult<Token>;
if (expectBracket) {
tok = scanner.next();
if (tok.done) {
throw errEOF;
}
ref.last = tok.value.last;
} else {
tok = { value: start };
}
if (tok.value.t !== Literal.Ident) {
throw new MichelineParseError(tok.value, `not an identifier: ${tok.value.v}`);
}
const ret: Prim = {
prim: tok.value.v,
[sourceReference]: ref,
};
for (; ;) {
const tok = scanner.next();
if (tok.done) {
if (expectBracket) {
throw errEOF;
}
break;
} else if (tok.value.t === ')') {
if (!expectBracket) {
throw new MichelineParseError(tok.value, `unexpected closing bracket`);
}
ref.last = tok.value.last;
break;
} else if (isAnnotation(tok.value)) {
ret.annots = ret.annots || [];
ret.annots.push(tok.value.v);
ref.last = tok.value.last;
} else {
ret.args = ret.args || [];
const arg = this.parseExpr(scanner, tok.value);
ref.last = arg[sourceReference]?.last || ref.last;
ret.args.push(arg);
}
}
return this.expand(ret);
}
private parseArgs(scanner: Iterator<Token>, start: Token): [Prim, IteratorResult<Token>] {
// Identifier with arguments
const ref: SourceReference = {
first: start.first,
last: start.last,
};
const p: Prim = {
prim: start.v,
[sourceReference]: ref,
};
for (; ;) {
const t = scanner.next();
if (t.done || t.value.t === '}' || t.value.t === ';') {
return [p, t];
}
if (isAnnotation(t.value)) {
ref.last = t.value.last;
p.annots = p.annots || [];
p.annots.push(t.value.v);
} else {
const arg = this.parseExpr(scanner, t.value);
ref.last = arg[sourceReference]?.last || ref.last;
p.args = p.args || [];
p.args.push(arg);
}
}
}
private parseSequenceExpr(scanner: Iterator<Token>, start: Token): List<Expr> {
const ref: SourceReference = {
first: start.first,
last: start.last,
};
const seq: List<Expr> = [];
seq[sourceReference] = ref;
const expectBracket = start.t === "{";
let tok: IteratorResult<Token> | null = start.t === "{" ? null : { value: start };
for (; ;) {
if (tok === null) {
tok = scanner.next();
if (!tok.done) {
ref.last = tok.value.last;
}
}
if (tok.done) {
if (expectBracket) {
throw errEOF;
} else {
return seq;
}
}
if (tok.value.t === "}") {
if (!expectBracket) {
throw new MichelineParseError(tok.value, `unexpected closing bracket`);
} else {
return seq;
}
} else if (tok.value.t === Literal.Ident) {
// Identifier with arguments
const [itm, n] = this.parseArgs(scanner, tok.value);
ref.last = itm[sourceReference]?.last || ref.last;
seq.push(this.expand(itm));
tok = n;
} else {
// Other
const ex = this.parseExpr(scanner, tok.value);
ref.last = ex[sourceReference]?.last || ref.last;
seq.push(ex);
tok = null;
}
if (tok === null) {
tok = scanner.next();
if (!tok.done) {
ref.last = tok.value.last;
}
}
if (!tok.done && tok.value.t === ";") {
tok = null;
}
}
}
private parseExpr(scanner: Iterator<Token>, tok: Token): Expr {
switch (tok.t) {
case Literal.Ident:
return this.expand({ prim: tok.v, [sourceReference]: { first: tok.first, last: tok.last } });
case Literal.Number:
return { int: tok.v, [sourceReference]: { first: tok.first, last: tok.last } };
case Literal.String:
return { string: JSON.parse(tok.v) as string, [sourceReference]: { first: tok.first, last: tok.last } };
case Literal.Bytes:
return { bytes: tok.v.slice(2), [sourceReference]: { first: tok.first, last: tok.last } };
case '{':
return this.parseSequenceExpr(scanner, tok);
default:
return this.parseListExpr(scanner, tok);
}
}
/**
* Parses a Micheline sequence expression, such as smart contract source. Enclosing curly brackets may be omitted.
* @param src A Micheline sequence `{parameter ...; storage int; code { DUP ; ...};}` or `parameter ...; storage int; code { DUP ; ...};`
*/
parseSequence(src: string): Expr[] | null {
// tslint:disable-next-line: strict-type-predicates
if (typeof src !== "string") {
throw new TypeError(`string type was expected, got ${typeof src} instead`);
}
const scanner = scan(src);
const tok = scanner.next();
if (tok.done) {
return null;
}
return this.parseSequenceExpr(scanner, tok.value);
}
/**
* Parse a Micheline sequence expression. Enclosing curly brackets may be omitted.
* @param src A Michelson list expression such as `(Pair {Elt "0" 0} 0)` or `Pair {Elt "0" 0} 0`
* @returns An AST node or null for empty document.
*/
parseList(src: string): Expr | null {
// tslint:disable-next-line: strict-type-predicates
if (typeof src !== "string") {
throw new TypeError(`string type was expected, got ${typeof src} instead`);
}
const scanner = scan(src);
const tok = scanner.next();
if (tok.done) {
return null;
}
return this.parseListExpr(scanner, tok.value);
}
/**
* Parse any Michelson expression
* @param src A Michelson expression such as `(Pair {Elt "0" 0} 0)` or `{parameter ...; storage int; code { DUP ; ...};}`
* @returns An AST node or null for empty document.
*/
parseMichelineExpression(src: string): Expr | null {
// tslint:disable-next-line: strict-type-predicates
if (typeof src !== "string") {
throw new TypeError(`string type was expected, got ${typeof src} instead`);
}
const scanner = scan(src);
const tok = scanner.next();
if (tok.done) {
return null;
}
return this.parseExpr(scanner, tok.value);
}
/**
* Parse a Micheline sequence expression, such as smart contract source. Enclosing curly brackets may be omitted.
* An alias for `parseSequence`
* @param src A Micheline sequence `{parameter ...; storage int; code { DUP ; ...};}` or `parameter ...; storage int; code { DUP ; ...};`
*/
parseScript(src: string): Expr[] | null {
return this.parseSequence(src);
}
/**
* Parse a Micheline sequence expression. Enclosing curly brackets may be omitted.
* An alias for `parseList`
* @param src A Michelson list expression such as `(Pair {Elt "0" 0} 0)` or `Pair {Elt "0" 0} 0`
* @returns An AST node or null for empty document.
*/
parseData(src: string): Expr | null {
return this.parseList(src);
}
/**
* Takes a JSON-encoded Michelson, validates it, strips away unneeded properties and optionally expands macros (See {@link ParserOptions}).
* @param src An object containing JSON-encoded Michelson, usually returned by `JSON.parse()`
*/
parseJSON(src: object): Expr {
// tslint:disable-next-line: strict-type-predicates
if (typeof src !== "object") {
throw new TypeError(`object type was expected, got ${typeof src} instead`);
}
if (Array.isArray(src)) {
const ret: Expr[] = [];
for (const n of src) {
if (n === null || typeof n !== 'object') {
throw new JSONParseError(n, `unexpected sequence element: ${n}`);
}
ret.push(this.parseJSON(n));
}
return ret;
} else if ('prim' in src) {
const p = src as { prim: any, annots?: any[], args?: any[] };
if (
typeof p.prim === 'string' &&
(p.annots === undefined || Array.isArray(p.annots)) &&
(p.args === undefined || Array.isArray(p.args))
) {
const ret: Prim = {
prim: p.prim,
};
if (p.annots !== undefined) {
for (const a of p.annots) {
if (typeof a !== 'string') {
throw new JSONParseError(a, `string expected: ${a}`);
}
}
ret.annots = p.annots;
}
if (p.args !== undefined) {
ret.args = [];
for (const a of p.args) {
if (a === null || typeof a !== 'object') {
throw new JSONParseError(a, `unexpected argument: ${a}`);
}
ret.args.push(this.parseJSON(a));
}
}
return this.expand(ret);
}
throw new JSONParseError(src, `malformed prim expression: ${src}`);
} else if ('string' in src) {
if (typeof (src as any).string === 'string') {
return { string: (src as StringLiteral).string };
}
throw new JSONParseError(src, `malformed string literal: ${src}`);
} else if ('int' in src) {
if (typeof (src as any).int === 'string' && intRe.test((src as IntLiteral).int)) {
return { int: (src as IntLiteral).int };
}
throw new JSONParseError(src, `malformed int literal: ${src}`);
} else if ('bytes' in src) {
if (
typeof (src as any).bytes === 'string' &&
bytesRe.test((src as BytesLiteral).bytes)
) {
return { bytes: (src as BytesLiteral).bytes };
}
throw new JSONParseError(src, `malformed bytes literal: ${src}`);
} else {
throw new JSONParseError(src, `unexpected object: ${src}`);
}
}
} | the_stack |
import { LayoutConfig } from './config/config';
import { ResolvedComponentItemConfig, ResolvedLayoutConfig, ResolvedPopoutLayoutConfig } from './config/resolved-config';
import { ComponentContainer } from './container/component-container';
import { BindError } from './errors/external-error';
import { UnexpectedUndefinedError } from './errors/internal-error';
import { LayoutManager } from './layout-manager';
import { DomConstants } from './utils/dom-constants';
import { I18nStringId, i18nStrings } from './utils/i18n-strings';
/** @public */
export class VirtualLayout extends LayoutManager {
/**
* @deprecated Use {@link (VirtualLayout:class).bindComponentEvent} and
* {@link (VirtualLayout:class).unbindComponentEvent} with virtual components
*/
getComponentEvent: VirtualLayout.GetComponentEventHandler | undefined;
/**
* @deprecated Use {@link (VirtualLayout:class).bindComponentEvent} and
* {@link (VirtualLayout:class).unbindComponentEvent} with virtual components
*/
releaseComponentEvent: VirtualLayout.ReleaseComponentEventHandler | undefined;
bindComponentEvent: VirtualLayout.BindComponentEventHandler | undefined;
unbindComponentEvent: VirtualLayout.UnbindComponentEventHandler | undefined;
/** @internal @deprecated use while constructor is not determinate */
private _bindComponentEventHanlderPassedInConstructor = false; // remove when constructor is determinate
/** @internal @deprecated use while constructor is not determinate */
private _creationTimeoutPassed = false; // remove when constructor is determinate
/**
* @param container - A Dom HTML element. Defaults to body
* @param bindComponentEventHandler - Event handler to bind components
* @param bindComponentEventHandler - Event handler to unbind components
* If bindComponentEventHandler is defined, then constructor will be determinate. It will always call the init()
* function and the init() function will always complete. This means that the bindComponentEventHandler will be called
* if constructor is for a popout window. Make sure bindComponentEventHandler is ready for events.
*/
constructor(
container?: HTMLElement,
bindComponentEventHandler?: VirtualLayout.BindComponentEventHandler,
unbindComponentEventHandler?: VirtualLayout.UnbindComponentEventHandler,
);
/** @deprecated specify layoutConfig in {@link (LayoutManager:class).loadLayout} */
constructor(config: LayoutConfig, container?: HTMLElement);
/** @internal */
constructor(configOrOptionalContainer: LayoutConfig | HTMLElement | undefined,
containerOrBindComponentEventHandler: HTMLElement | VirtualLayout.BindComponentEventHandler | undefined,
unbindComponentEventHandler: VirtualLayout.UnbindComponentEventHandler | undefined,
skipInit: true,
);
/** @internal */
constructor(configOrOptionalContainer: LayoutConfig | HTMLElement | undefined,
containerOrBindComponentEventHandler?: HTMLElement | VirtualLayout.BindComponentEventHandler,
unbindComponentEventHandler?: VirtualLayout.UnbindComponentEventHandler,
skipInit?: true,
) {
super(VirtualLayout.createLayoutManagerConstructorParameters(configOrOptionalContainer, containerOrBindComponentEventHandler));
if (containerOrBindComponentEventHandler !== undefined) {
if (typeof containerOrBindComponentEventHandler === 'function') {
this.bindComponentEvent = containerOrBindComponentEventHandler;
this._bindComponentEventHanlderPassedInConstructor = true;
if (unbindComponentEventHandler !== undefined) {
this.unbindComponentEvent = unbindComponentEventHandler;
}
}
}
if (!this._bindComponentEventHanlderPassedInConstructor) {
// backward compatibility
if (this.isSubWindow) {
// document.body.style.visibility = 'hidden';
// Set up layoutConfig since constructor is not determinate and may exit early. Other functions may need
// this.layoutConfig. this.layoutConfig is again calculated in the same way when init() completes.
// Remove this when constructor is determinate.
if (this._constructorOrSubWindowLayoutConfig === undefined) {
throw new UnexpectedUndefinedError('VLC98823');
} else {
const resolvedLayoutConfig = LayoutConfig.resolve(this._constructorOrSubWindowLayoutConfig);
// remove root from layoutConfig
this.layoutConfig = {
...resolvedLayoutConfig,
root: undefined,
}
}
}
}
if (skipInit !== true) {
if (!this.deprecatedConstructor) {
this.init();
}
}
}
override destroy(): void {
this.bindComponentEvent = undefined;
this.unbindComponentEvent = undefined;
super.destroy();
}
/**
* Creates the actual layout. Must be called after all initial components
* are registered. Recurses through the configuration and sets up
* the item tree.
*
* If called before the document is ready it adds itself as a listener
* to the document.ready event
* @deprecated LayoutConfig should not be loaded in {@link (LayoutManager:class)} constructor, but rather in a
* {@link (LayoutManager:class).loadLayout} call. If LayoutConfig is not specified in {@link (LayoutManager:class)} constructor,
* then init() will be automatically called internally and should not be called externally.
*/
override init(): void {
/**
* If the document isn't ready yet, wait for it.
*/
if (!this._bindComponentEventHanlderPassedInConstructor && (document.readyState === 'loading' || document.body === null)) {
document.addEventListener('DOMContentLoaded', () => this.init(), { passive: true });
return;
}
/**
* If this is a subwindow, wait a few milliseconds for the original
* page's js calls to be executed, then replace the bodies content
* with GoldenLayout
*/
if (!this._bindComponentEventHanlderPassedInConstructor && this.isSubWindow === true && !this._creationTimeoutPassed) {
setTimeout(() => this.init(), 7);
this._creationTimeoutPassed = true;
return;
}
if (this.isSubWindow === true) {
if (!this._bindComponentEventHanlderPassedInConstructor) {
this.clearHtmlAndAdjustStylesForSubWindow();
}
// Expose this instance on the window object to allow the opening window to interact with it
window.__glInstance = this;
}
super.init();
}
/**
* Clears existing HTML and adjusts style to make window suitable to be a popout sub window
* Curently is automatically called when window is a subWindow and bindComponentEvent is not passed in the constructor
* If bindComponentEvent is not passed in the constructor, the application must either call this function explicitly or
* (preferably) make the window suitable as a subwindow.
* In the future, it is planned that this function is NOT automatically called in any circumstances. Applications will
* need to determine whether a window is a Golden Layout popout window and either call this function explicitly or
* hide HTML not relevant to the popout.
* See apitest for an example of how HTML is hidden when popout windows are displayed
*/
clearHtmlAndAdjustStylesForSubWindow(): void {
const headElement = document.head;
const appendNodeLists = new Array<NodeListOf<Element>>(4);
appendNodeLists[0] = document.querySelectorAll('body link');
appendNodeLists[1] = document.querySelectorAll('body style');
appendNodeLists[2] = document.querySelectorAll('template');
appendNodeLists[3] = document.querySelectorAll('.gl_keep');
for (let listIdx = 0; listIdx < appendNodeLists.length; listIdx++) {
const appendNodeList = appendNodeLists[listIdx];
for (let nodeIdx = 0; nodeIdx < appendNodeList.length; nodeIdx++) {
const node = appendNodeList[nodeIdx];
headElement.appendChild(node);
}
}
const bodyElement = document.body;
bodyElement.innerHTML = '';
bodyElement.style.visibility = 'visible';
this.checkAddDefaultPopinButton();
/*
* This seems a bit pointless, but actually causes a reflow/re-evaluation getting around
* slickgrid's "Cannot find stylesheet." bug in chrome
*/
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const x = document.body.offsetHeight;
}
/**
* Will add button if not popinOnClose specified in settings
* @returns true if added otherwise false
*/
checkAddDefaultPopinButton(): boolean {
if (this.layoutConfig.settings.popInOnClose) {
return false;
} else {
const popInButtonElement = document.createElement('div');
popInButtonElement.classList.add(DomConstants.ClassName.Popin);
popInButtonElement.setAttribute('title', this.layoutConfig.header.dock);
const iconElement = document.createElement('div');
iconElement.classList.add(DomConstants.ClassName.Icon);
const bgElement = document.createElement('div');
bgElement.classList.add(DomConstants.ClassName.Bg);
popInButtonElement.appendChild(iconElement);
popInButtonElement.appendChild(bgElement);
popInButtonElement.addEventListener('click', () => this.emit('popIn'));
document.body.appendChild(popInButtonElement);
return true;
}
}
/** @internal */
override bindComponent(container: ComponentContainer, itemConfig: ResolvedComponentItemConfig): ComponentContainer.BindableComponent {
if (this.bindComponentEvent !== undefined) {
const bindableComponent = this.bindComponentEvent(container, itemConfig);
return bindableComponent;
} else {
if (this.getComponentEvent !== undefined) {
return {
virtual: false,
component: this.getComponentEvent(container, itemConfig),
}
} else {
// There is no component registered for this type, and we don't have a getComponentEvent defined.
// This might happen when the user pops out a dialog and the component types are not registered upfront.
const text = i18nStrings[I18nStringId.ComponentTypeNotRegisteredAndBindComponentEventHandlerNotAssigned];
const message = `${text}: ${JSON.stringify(itemConfig)}`
throw new BindError(message);
}
}
}
/** @internal */
override unbindComponent(container: ComponentContainer, virtual: boolean, component: ComponentContainer.Component | undefined): void {
if (this.unbindComponentEvent !== undefined) {
this.unbindComponentEvent(container);
} else {
if (!virtual && this.releaseComponentEvent !== undefined) {
if (component === undefined) {
throw new UnexpectedUndefinedError('VCUCRCU333998');
} else {
this.releaseComponentEvent(container, component);
}
}
}
}
}
/** @public */
export namespace VirtualLayout {
/**
* @deprecated Use virtual components with {@link (VirtualLayout:class).bindComponentEvent} and
* {@link (VirtualLayout:class).unbindComponentEvent} events.
*/
export type GetComponentEventHandler =
(this: void, container: ComponentContainer, itemConfig: ResolvedComponentItemConfig) => ComponentContainer.Component;
/**
* @deprecated Use virtual components with {@link (VirtualLayout:class).bindComponentEvent} and
* {@link (VirtualLayout:class).unbindComponentEvent} events.
*/
export type ReleaseComponentEventHandler =
(this: void, container: ComponentContainer, component: ComponentContainer.Component) => void;
export type BindComponentEventHandler =
(this: void, container: ComponentContainer, itemConfig: ResolvedComponentItemConfig) => ComponentContainer.BindableComponent;
export type UnbindComponentEventHandler =
(this: void, container: ComponentContainer) => void;
export type BeforeVirtualRectingEvent = (this: void) => void;
/** @internal
* Veriable to hold the state whether we already checked if we are running in a sub window.
* Fixes popout and creation of nested golden-layouts.
*/
let subWindowChecked = false;
/** @internal */
export function createLayoutManagerConstructorParameters(configOrOptionalContainer: LayoutConfig | HTMLElement | undefined,
containerOrBindComponentEventHandler?: HTMLElement | VirtualLayout.BindComponentEventHandler):
LayoutManager.ConstructorParameters
{
const windowConfigKey = subWindowChecked ? null : new URL(document.location.href).searchParams.get('gl-window');
subWindowChecked = true;
const isSubWindow = windowConfigKey !== null;
let containerElement: HTMLElement | undefined;
let config: LayoutConfig | undefined;
if (windowConfigKey !== null) {
const windowConfigStr = localStorage.getItem(windowConfigKey);
if (windowConfigStr === null) {
throw new Error('Null gl-window Config');
}
localStorage.removeItem(windowConfigKey);
const minifiedWindowConfig = JSON.parse(windowConfigStr) as ResolvedPopoutLayoutConfig;
const resolvedConfig = ResolvedLayoutConfig.unminifyConfig(minifiedWindowConfig);
config = LayoutConfig.fromResolved(resolvedConfig)
if (configOrOptionalContainer instanceof HTMLElement) {
containerElement = configOrOptionalContainer;
}
} else {
if (configOrOptionalContainer === undefined) {
config = undefined;
} else {
if (configOrOptionalContainer instanceof HTMLElement) {
config = undefined;
containerElement = configOrOptionalContainer;
} else {
// backwards compatibility
config = configOrOptionalContainer;
}
}
if (containerElement === undefined) {
if (containerOrBindComponentEventHandler instanceof HTMLElement) {
containerElement = containerOrBindComponentEventHandler;
}
}
}
return {
constructorOrSubWindowLayoutConfig: config,
isSubWindow,
containerElement,
};
}
} | the_stack |
declare var R: R.Static;
//
declare namespace R {
// Fantasyland interfaces
// TODO: incorporate generalized inheritance e.g.: `<U extends
// Applicative, V extends Traversable>`; possibly needs [rank 2
// polymorphism](https://github.com/Microsoft/TypeScript/issues/1213).
interface Setoid<T> {
equals(b: Setoid<T>): boolean;
}
interface Semigroup<T> {
concat(b: Semigroup<T>): Semigroup<T>;
}
interface Monoid<T> extends Semigroup<T> {
/* static */ empty<T>(): Monoid<T>;
}
interface Functor<T> {
map<U>(fn: (t: T) => U): Functor<U>;
}
interface Apply<T> extends Functor<T> {
apply<U>(fn: Apply<(t: T) => U>): Apply<U>;
}
interface Applicative<T> extends Apply<T> {
/* static */ of<U>(a: U): Applicative<U>;
}
interface Alt<T> extends Functor<T> {
alt(b: T): Alt<T>;
}
interface Plus<T> extends Alt<T> {
/* static */ zero<T>(): Plus<T>;
}
interface Alternative<T> extends Plus<T>, Applicative<T> {
}
interface Foldable<T> {
reduce<U>(fn: (u: U, t: T) => U, u: U): U;
}
interface Traversable<T> extends Functor<T>, Foldable<T> {
traverse<U, V>(fn: (t: T) => Applicative<U>, of: (v: V) => Applicative<V>): Applicative<Traversable<U>>;
}
interface Chain<T> extends Apply<T> {
chain<U>(fn: (t: T) => Chain<U>): Chain<U>;
}
interface ChainRec<T> extends Chain<T> {
/* static */ chainRec<A,B,C>(f: (next: (a: A) => C, done: (b: B) => C, value: A) => ChainRec<C>, i: A): ChainRec<B>;
}
interface Monad<T> extends Applicative<T>, Chain<T> {
}
interface Extend<T> {
extend<U>(f: (v: Extend<T>) => U): Extend<U>;
}
interface Comonad<T> extends Functor<T>, Extend<T> {
extract<U>(): U; // 'same U as in extend's f -- how to bind?
}
interface Bifunctor<T,U> extends Functor<T> /*, Functor<U>*/ {
bimap<B,D>(f: (v: T) => B, g: (v: U) => D): Bifunctor<B,D>;
}
interface Profunctor<T,U> extends Functor<T> /*, Functor<U>*/ {
promap<B,D>(f: (v: T) => B, g: (v: U) => D): Profunctor<B,D>;
}
// simple types
type Index = string | number;
type Primitive = string | number | boolean;
type Ord = string | number | boolean | Date;
interface Dictionary<T> {
[index: string]: T;
}
type Obj<T> = Dictionary<T>;
type List<T> = ArrayLike<T>;
type StringLike = string | StringRepresentable<string>;
type Prop = Index | StringRepresentable<Index>;
type Path = List<Prop>;
type Struct<T> = Obj<T> | List<T>;
type AccOpts<T,U> = List<any>|Obj<any>|Transformer<T, U, U>;
type Pred<T> = (v: T) => boolean;
type ObjPred<T> = (value: T, key: string) => boolean;
// Ramda interfaces
interface Type<T> extends Function {
new (...args: any[]): T;
}
interface Variadic<T> {
(...args: any[]): T;
}
interface KeyValuePair<K, V> extends Array<K | V> { 0 : K; 1 : V; }
interface Transformer<T, Acc, Res> {
step: (acc: Acc, v: T) => Acc;
init: () => Acc;
result: (acc: Acc) => Res; // = R.identity
}
interface NumericDictionary<T> {
[index: number]: T;
}
interface StringRepresentable<T> {
toString(): T;
}
interface NestedObj<T> {
[index: string]: T|NestedObj<T>;
}
// interface RecursiveArray<T> extends Array<T|RecursiveArray<T>> {}
// interface ListOfRecursiveArraysOrValues<T> extends List<T|RecursiveArray<T>> {}
interface NestedArray <T> {
[index: number]: T | NestedArray<T>;
length: number;
}
// // an unfortunate compromise -- while the actual lens should be generic, for the purpose of TS the structure should be supplied beforehand
// interface KeyLens<T extends Struct<any>, K extends keyof T> {
// // <T extends Struct<any>>
// (obj: T): T[K]; // get
// set(v: T[K], obj: T): T;
// // map(fn: (v: T[K]) => T[K], obj: T): T
// }
interface Lens<T,U> {
(obj: T): U; // get
set(v: U, obj: T): T;
// map(fn: (v: U) => U, obj: T): T
}
interface ManualLens<U> {
<T extends Struct<any>>(obj: T): U; // get
set<T extends Struct<any>>(v: U, obj: T): T;
// <T extends Struct<any>>map(fn: (v: U) => U, obj: T): T
}
interface UnknownLens {
<T,U>(obj: T): U; // get
set<T,U>(v: U, obj: T): T;
// map<T,U>(fn: (v: U) => U, obj: T): T
}
// @see https://gist.github.com/donnut/fd56232da58d25ceecf1, comment by @albrow
// interface CurriedFunction1<T1, R> {
// (v1: T1): R;
// }
type CurriedFunction1<T1, R> = (v1: T1) => R;
interface CurriedFunction2<T1, T2, R> {
(v1: T1): (v2: T2) => R;
(v1: T1, v2: T2): R;
}
interface CurriedFunction3<T1, T2, T3, R> {
(v1: T1): CurriedFunction2<T2, T3, R>;
(v1: T1, v2: T2): (v3: T3) => R;
(v1: T1, v2: T2, v3: T3): R;
}
interface CurriedFunction4<T1, T2, T3, T4, R> {
(v1: T1): CurriedFunction3<T2, T3, T4, R>;
(v1: T1, v2: T2): CurriedFunction2<T3, T4, R>;
(v1: T1, v2: T2, v3: T3): (v4: T4) => R;
(v1: T1, v2: T2, v3: T3, v4: T4): R;
}
interface CurriedFunction5<T1, T2, T3, T4, T5, R> {
(v1: T1): CurriedFunction4<T2, T3, T4, T5, R>;
(v1: T1, v2: T2): CurriedFunction3<T3, T4, T5, R>;
(v1: T1, v2: T2, v3: T3): CurriedFunction2<T4, T5, R>;
(v1: T1, v2: T2, v3: T3, v4: T4): (v5: T5) => R;
(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5): R;
}
interface CurriedFunction6<T1, T2, T3, T4, T5, T6, R> {
(v1: T1): CurriedFunction5<T2, T3, T4, T5, T6, R>;
(v1: T1, v2: T2): CurriedFunction4<T3, T4, T5, T6, R>;
(v1: T1, v2: T2, v3: T3): CurriedFunction3<T4, T5, T6, R>;
(v1: T1, v2: T2, v3: T3, v4: T4): CurriedFunction2<T5, T6, R>;
(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5): (v6: T6) => R;
(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6): R;
}
interface CurriedFunction7<T1, T2, T3, T4, T5, T6, T7, R> {
(v1: T1): CurriedFunction6<T2, T3, T4, T5, T6, T7, R>;
(v1: T1, v2: T2): CurriedFunction5<T3, T4, T5, T6, T7, R>;
(v1: T1, v2: T2, v3: T3): CurriedFunction4<T4, T5, T6, T7, R>;
(v1: T1, v2: T2, v3: T3, v4: T4): CurriedFunction3<T5, T6, T7, R>;
(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5): CurriedFunction2<T6, T7, R>;
(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6): (v7: T7) => R;
(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7): R;
}
interface CurriedFunction8<T1, T2, T3, T4, T5, T6, T7, T8, R> {
(v1: T1): CurriedFunction7<T2, T3, T4, T5, T6, T7, T8, R>;
(v1: T1, v2: T2): CurriedFunction6<T3, T4, T5, T6, T7, T8, R>;
(v1: T1, v2: T2, v3: T3): CurriedFunction5<T4, T5, T6, T7, T8, R>;
(v1: T1, v2: T2, v3: T3, v4: T4): CurriedFunction4<T5, T6, T7, T8, R>;
(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5): CurriedFunction3<T6, T7, T8, R>;
(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6): CurriedFunction2<T7, T8, R>;
(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7): (v8: T8) => R;
(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8): R;
}
interface CurriedFunction9<T1, T2, T3, T4, T5, T6, T7, T8, T9, R> {
(v1: T1): CurriedFunction8<T2, T3, T4, T5, T6, T7, T8, T9, R>;
(v1: T1, v2: T2): CurriedFunction7<T3, T4, T5, T6, T7, T8, T9, R>;
(v1: T1, v2: T2, v3: T3): CurriedFunction6<T4, T5, T6, T7, T8, T9, R>;
(v1: T1, v2: T2, v3: T3, v4: T4): CurriedFunction5<T5, T6, T7, T8, T9, R>;
(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5): CurriedFunction4<T6, T7, T8, T9, R>;
(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6): CurriedFunction3<T7, T8, T9, R>;
(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7): CurriedFunction2<T8, T9, R>;
(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8): (v9: T9) => R;
(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9): R;
}
interface Reduced {}
interface Static {
/**
* A special placeholder value used to specify "gaps" within curried
* functions, allowing partial application of any combination of
* arguments, regardless of their positions.
* NOTE: can't type this yet, for binary functions consider R.flip!
*/
__: any; // { "@@functional/placeholder": boolean };
// ^ don't type by value, since it can be inserted anywhere...
// Note this is still not useful, as it doesn't take into account how it changes formulas (leaving holes!).
// This remains TODO, and should be done on the level of curried functions, but that
// pretty much requires being able to express the separate functions as a single curried function...
// until that moment handling this would mean having to handle each possible combination for each function. :(
/**
* Adds two numbers. Equivalent to a + b but curried.
*/
add(a: number, b: number): number;
add(a: number): (b: number) => number;
// add: CurriedFunction2<number, number, number>;
/**
* Creates a new list iteration function from an existing one by adding two new parameters to its callback
* function: the current index, and the entire list.
*/
addIndex<T,U>(fn: (f: (item: T) => U, list: List<T>) => U[]): CurriedFunction2<(item: T, idx: number, list?: List<T>) => U, List<T>, U[]>;
/* Special case for forEach */
addIndex<T>(fn: (f: (item: T) => void, list: List<T>) => T[]): CurriedFunction2<(item: T, idx: number, list?: List<T>) => void, List<T>, T[]>;
/* Special case for reduce */
addIndex<T,U>(fn: (f: (acc:U, item: T) => U, aci:U, list: List<T>) => U): CurriedFunction3<(acc:U, item: T, idx: number, list?: List<T>) => U, U, List<T>, U>;
// addIndex<T,U>: CurriedFunction3<(f: (item: T) => U, list: List<T>) => U[], (item: T, idx: number, list?: List<T>) => U, List<T>, U[]>;
/**
* Applies a function to the value at the given index of an array, returning a new copy of the array with the
* element at the given index replaced with the result of the function application.
*/
// adjust<T>(fn: (a: T) => T, index: number, list: List<T>): T[];
// adjust<T>(fn: (a: T) => T, index: number): (list: List<T>) => T[];
// adjust<T>(fn: (a: T) => T): CurriedFunction2<number, List<T>, T[]>;
// adjust<T>: CurriedFunction3<(a: T) => T, number, List<T>, T[]>;
// base
adjust<T>(fn: (a: T) => T, index: number, list: List<T>): T[];
adjust<T>(fn: (a: T) => T, index: number):{
(list: List<T>): T[];
};
adjust<T>(fn: (a: T) => T):{
(index: number, list: List<T>): T[];
(index: number):{
(list: List<T>): T[];
};
};
/**
* Returns true if all elements of the list match the predicate, false if there are any that don't.
*/
all<T>(pred: Pred<T>, list: List<T>): boolean;
all<T>(pred: Pred<T>): (list: List<T>) => boolean;
// all<T>: CurriedFunction2<Pred<T>, List<T>, boolean>;
/**
* Given a list of predicates, returns a new predicate that will be true exactly when all of them are.
*/
allPass<T>(preds: Pred<T>[]): Pred<T>;
/**
* Returns a function that always returns the given value.
*/
always<T>(val: T): () => T;
/**
* A function that returns the first argument if it's falsy otherwise the second argument. Note that this is
* NOT short-circuited, meaning that if expressions are passed they are both evaluated.
*/
// dispatch to some `and` method:
and<T extends {and?: Function;}>(fn1: T, val2: boolean|any): boolean;
and<T extends {and?: Function;}>(fn1: T): (val2: boolean|any) => boolean;
// and<T extends {and?: Function;}>: CurriedFunction2<T, boolean|any, boolean>;
// // functions, does this still exist?
// and<T extends () => boolean>(fn1: T, fn2: T): T;
// and<T extends () => boolean>(fn1: T): (fn2: T) => T;
// no generics:
and(v1: any, v2: any): boolean;
and(v1: any): (v2: any) => boolean;
// and: CurriedFunction2<any, any, boolean>;
/**
* Returns true if at least one of elements of the list match the predicate, false otherwise.
*/
any<T>(pred: Pred<T>, list: List<T>): boolean;
any<T>(fnpred: Pred<T>): (list: List<T>) => boolean;
// any<T>: CurriedFunction2<Pred<T>, List<T>, boolean>;
/**
* Given a list of predicates returns a new predicate that will be true exactly when any one of them is.
*/
anyPass<T>(preds: Pred<T>[]): Pred<T>;
/**
* ap applies a list of functions to a list of values.
*/
ap<T,U>(fns: ((a: T) => U)[], vs: List<T>): U[];
ap<T,U>(fns: ((a: T) => U)[]): (vs: List<T>) => U[];
// ap<T,U>: CurriedFunction2<((a: T) => U)[], List<T>, U[]>;
/**
* Returns a new list, composed of n-tuples of consecutive elements If n is greater than the length of the list,
* an empty list is returned.
*/
aperture<T>(n: number, list: List<T>): T[][];
aperture(n: number): <T>(list: List<T>) => T[][];
// aperture<T>: CurriedFunction2<number, List<T>, T[][]>;
/**
* Returns a new list containing the contents of the given list, followed by the given element.
*/
append<T, U>(el: U, list: List<T>): (T & U)[];
append<U>(el: U): <T>(list: List<T>) => (T & U)[];
// append<T, U>: CurriedFunction2<U, List<T>, (T & U)[]>;
/**
* Applies function fn to the argument list args. This is useful for creating a fixed-arity function from
* a variadic function. fn should be a bound function if context is significant.
*/
apply<TResult>(fn: (...args: any[]) => TResult, args: any[]): TResult;
apply<TResult>(fn: (...args: any[]) => TResult): <U>(args: any[]) => TResult;
// apply<TResult>: CurriedFunction2<(...args: any[]) => TResult, any[], TResult>;
/**
* Given a spec object recursively mapping properties to functions, creates a function producing an object
* of the same structure, by mapping each property to the result of calling its associated function with
* the supplied arguments.
*/
applySpec<T>(obj: any): Variadic<T>;
/**
* Makes an ascending comparator function out of a function that returns a value that can be compared with `<` and `>`.
*/
ascend<T, V extends Ord>(comparator: (val: T) => V, a: T, b: T): number;
ascend<T, V extends Ord>(comparator: (val: T) => V, a: T): (b: T) => number;
ascend<T, V extends Ord>(comparator: (val: T) => V): CurriedFunction2<T, T, number>;
// ascend<T, V extends Ord>: CurriedFunction3<(val: T) => V, T, T, number>;
/**
* Makes a shallow clone of an object, setting or overriding the specified property with the given value.
*/
// hard to mix cuz different initial generics?
// extend object with new property
// assoc<T, U extends Struct<any>, K extends keyof U>(prop: K, val: T, obj: U): {[P in K]: T} & U;
// assoc<T, U extends Struct<any>, K extends keyof U>(prop: K, val: T): (obj: U) => {[P in K]: T} & U; // generics too early?
// assoc<T, U extends Struct<any>, K extends keyof U>(prop: K): CurriedFunction2<T,U, {[P in K]: T} & U>; // generics too early?
// assoc<T, U extends Struct<any>, K extends keyof U>: CurriedFunction3<K, T, U, {[P in K]: T} & U>;
// // extend object with new property
// assoc<K extends string, T, U extends Struct<any>>(prop: K, val: T, obj: U): {[P in K]: T} & U;
// assoc<K extends string, T>(prop: K, val: T):{
// <U extends Struct<any>>(obj: U): {[P in K]: T} & U;
// };
// assoc<K extends string>(prop: K):{
// <T, U extends Struct<any>>(val: T, obj: U): {[P in K]: T} & U;
// <T>(val: T):{
// <U extends Struct<any>>(obj: U): {[P in K]: T} & U;
// };
// };
// homogeneous object
assoc<T, U extends Struct<T>>(prop: Prop, val: T, obj: U): U;
assoc<T>(prop: Prop, val: T): <U extends Struct<T>>(obj: U) => U;
assoc<T, U extends Struct<T>>(prop: Prop): CurriedFunction2<T, U, U>; // generics too early?
// assoc<T, U extends Struct<T>>: CurriedFunction3<Prop, T, U, U>;
// any object as long as the type remains unchanged
assoc<T>(prop: Prop, val: any, obj: T): T;
assoc(prop: Prop, val: any): <T>(obj: T) => T;
assoc<T>(prop: Prop): CurriedFunction2<any, T, T>; // generics too early?
// assoc<T>: CurriedFunction3<Prop, any, T, T>;
// any object as long as the type remains unchanged
assoc<T>(prop: Prop, val: any, obj: T): T;
assoc(prop: Prop, val: any):{
<T>(obj: T): T;
};
assoc(prop: Prop):{
<T>(val: any, obj: T): T;
(val: any):{
<T>(obj: T): T;
};
};
/**
* Makes a shallow clone of an object, setting or overriding the nodes required to create the given path, and
* placing the specific value at the tail end of that path.
*/
// assocPath<T,U>(path: Path, val: T, obj: U): U;
// assocPath<T>(path: Path, val: T): <U>(obj: U) => U;
// assocPath<T,U>(path: Path): CurriedFunction2<T, U, U>;
// assocPath<T,U>: CurriedFunction3<Path, T, U, U>;
// base
assocPath<T, U>(path: Path, val: T, obj: U): U;
assocPath<T>(path: Path, val: T):{
<U>(obj: U): U;
};
assocPath(path: Path):{
<T, U>(val: T, obj: U): U;
<T>(val: T):{
<U>(obj: U): U;
};
};
/**
* Wraps a function of any arity (including nullary) in a function that accepts exactly 2
* parameters. Any extraneous parameters will not be passed to the supplied function.
*/
binary<T, A, B>(fn: (a: A, b: T, ...args: any[]) => T): (a: A, b: B) => T;
binary<T>(fn: Variadic<T>): (a: any, b: any) => T;
/**
* Creates a function that is bound to a context. Note: R.bind does not provide the additional argument-binding
* capabilities of Function.prototype.bind.
*/
bind<T>(fn: Variadic<T>, thisObj: {}): Variadic<T>;
bind<T>(fn: Variadic<T>): (thisObj: {}) => Variadic<T>;
// bind<T>: CurriedFunction2<Variadic<T>, {}, Variadic<T>>;
/**
* A function wrapping calls to the two functions in an && operation, returning the result of the first function
* if it is false-y and the result of the second function otherwise. Note that this is short-circuited, meaning
* that the second function will not be invoked if the first returns a false-y value.
*/
both<T>(pred1: Pred<T>, pred2: Pred<T>): Pred<T>;
both<T>(pred1: Pred<T>): (pred2: Pred<T>) => Pred<T>;
// both<T>: CurriedFunction2<Pred<T>, Pred<T>, Pred<T>>;
/**
* Returns the result of calling its first argument with the remaining arguments. This is occasionally useful
* as a converging function for R.converge: the left branch can produce a function while the right branch
* produces a value to be passed to that function as an argument.
*/
// not curried!
call<T>(fn: Variadic<T>, ...args: any[]): T;
/**
* `chain` maps a function over a list and concatenates the results.
* This implementation is compatible with the Fantasy-land Chain spec
*/
// List version
chain<T, U>(fn: (n: T) => U[], list: List<T>): U[];
chain<T, U>(fn: (n: T) => U[]): (list: List<T>) => U[];
// chain<T, U>: CurriedFunction2<(n: T) => U[], List<T>, U[]>;
// generic Chain version
chain<T, U>(fn: (n: T) => Chain<U>, list: Chain<T>): Chain<U>;
chain<T, U>(fn: (n: T) => Chain<U>): (list: Chain<T>) => Chain<U>;
// chain<T, U>: CurriedFunction2<(n: T) => Chain<U>, Chain<T>, Chain<U>>;
// function argument
chain<T, U, V>(fn: (v: V) => (list: Chain<T>) => Chain<U>, monad: (chain: Chain<T>) => V): (list: Chain<T>) => Chain<U>;
chain<T, U, V>(fn: (v: V) => (list: Chain<T>) => Chain<U>): (monad: (chain: Chain<T>) => V) => (list: Chain<T>) => Chain<U>;
// chain<T, U, V>: CurriedFunction2<(v: V) => (list: Chain<T>) => Chain<U>, (chain: Chain<T>) => V, (list: Chain<T>) => Chain<U>>;
/**
* Restricts a number to be within a range.
* Also works for other ordered types such as Strings and Date
*/
// clamp<T>(min: T, max: T, value: T): T;
// clamp<T>(min: T, max: T): (value: T) => T;
// clamp<T>(min: T): CurriedFunction2<T,T,T>;
// clamp<T>: CurriedFunction3<T,T,T,T>;
// base
clamp<T>(min: T, max: T, value: T): T;
clamp<T>(min: T, max: T):{
(value: T): T;
};
clamp<T>(min: T):{
(max: T, value: T): T;
(max: T):{
(value: T): T;
};
};
/**
* Creates a deep copy of the value which may contain (nested) Arrays and Objects, Numbers, Strings, Booleans and Dates.
*/
clone<T>(value: T): T;
clone<T>(value: List<T>): T[];
/**
* Makes a comparator function out of a function that reports whether the first element is less than the second.
*/
comparator<T>(pred: (a: T, b: T) => boolean): (x: T, y: T) => number;
/**
* Takes a function f and returns a function g such that:
* - applying g to zero or more arguments will give true if applying the same arguments to f gives
* a logical false value; and
* - applying g to zero or more arguments will give false if applying the same arguments to f gives
* a logical true value.
*/
complement<T>(pred: Variadic<boolean>): Variadic<boolean>;
/**
* Performs right-to-left function composition. The rightmost function may have any arity; the remaining
* functions must be unary.
*/
compose<V0, T1>(fn0: (x0: V0) => T1): (x0: V0) => T1;
compose<V0, V1, T1>(fn0: (x0: V0, x1: V1) => T1): (x0: V0, x1: V1) => T1;
compose<V0, V1, V2, T1>(fn0: (x0: V0, x1: V1, x2: V2) => T1): (x0: V0, x1: V1, x2: V2) => T1;
compose<V0, V1, V2, V3, T1>(fn0: (x0: V0, x1: V1, x2: V2, x3: V3) => T1): (x0: V0, x1: V1, x2: V2, x3: V3) => T1;
compose<V0, T1, T2>(fn1: (x: T1) => T2, fn0: (x0: V0) => T1): (x0: V0) => T2;
compose<V0, V1, T1, T2>(fn1: (x: T1) => T2, fn0: (x0: V0, x1: V1) => T1): (x0: V0, x1: V1) => T2;
compose<V0, V1, V2, T1, T2>(fn1: (x: T1) => T2, fn0: (x0: V0, x1: V1, x2: V2) => T1): (x0: V0, x1: V1, x2: V2) => T2;
compose<V0, V1, V2, V3, T1, T2>(fn1: (x: T1) => T2, fn0: (x0: V0, x1: V1, x2: V2, x3: V3) => T1): (x0: V0, x1: V1, x2: V2, x3: V3) => T2;
compose<V0, T1, T2, T3>(fn2: (x: T2) => T3, fn1: (x: T1) => T2, fn0: (x0: V0) => T1): (x0: V0) => T3;
compose<V0, V1, T1, T2, T3>(fn2: (x: T2) => T3, fn1: (x: T1) => T2, fn0: (x0: V0, x1: V1) => T1): (x0: V0, x1: V1) => T3;
compose<V0, V1, V2, T1, T2, T3>(fn2: (x: T2) => T3, fn1: (x: T1) => T2, fn0: (x0: V0, x1: V1, x2: V2) => T1): (x0: V0, x1: V1, x2: V2) => T3;
compose<V0, V1, V2, V3, T1, T2, T3>(fn2: (x: T2) => T3, fn1: (x: T1) => T2, fn0: (x0: V0, x1: V1, x2: V2, x3: V3) => T1): (x0: V0, x1: V1, x2: V2, x3: V3) => T3;
compose<V0, T1, T2, T3, T4>(fn3: (x: T3) => T4, fn2: (x: T2) => T3, fn1: (x: T1) => T2, fn0: (x0: V0) => T1): (x0: V0) => T4;
compose<V0, V1, T1, T2, T3, T4>(fn3: (x: T3) => T4, fn2: (x: T2) => T3, fn1: (x: T1) => T2, fn0: (x0: V0, x1: V1) => T1): (x0: V0, x1: V1) => T4;
compose<V0, V1, V2, T1, T2, T3, T4>(fn3: (x: T3) => T4, fn2: (x: T2) => T3, fn1: (x: T1) => T2, fn0: (x0: V0, x1: V1, x2: V2) => T1): (x0: V0, x1: V1, x2: V2) => T4;
compose<V0, V1, V2, V3, T1, T2, T3, T4>(fn3: (x: T3) => T4, fn2: (x: T2) => T3, fn1: (x: T1) => T2, fn0: (x0: V0, x1: V1, x2: V2, x3: V3) => T1): (x0: V0, x1: V1, x2: V2, x3: V3) => T4;
compose<V0, T1, T2, T3, T4, T5>(fn4: (x: T4) => T5, fn3: (x: T3) => T4, fn2: (x: T2) => T3, fn1: (x: T1) => T2, fn0: (x0: V0) => T1): (x0: V0) => T5;
compose<V0, V1, T1, T2, T3, T4, T5>(fn4: (x: T4) => T5, fn3: (x: T3) => T4, fn2: (x: T2) => T3, fn1: (x: T1) => T2, fn0: (x0: V0, x1: V1) => T1): (x0: V0, x1: V1) => T5;
compose<V0, V1, V2, T1, T2, T3, T4, T5>(fn4: (x: T4) => T5, fn3: (x: T3) => T4, fn2: (x: T2) => T3, fn1: (x: T1) => T2, fn0: (x0: V0, x1: V1, x2: V2) => T1): (x0: V0, x1: V1, x2: V2) => T5;
compose<V0, V1, V2, V3, T1, T2, T3, T4, T5>(fn4: (x: T4) => T5, fn3: (x: T3) => T4, fn2: (x: T2) => T3, fn1: (x: T1) => T2, fn0: (x0: V0, x1: V1, x2: V2, x3: V3) => T1): (x0: V0, x1: V1, x2: V2, x3: V3) => T5;
compose<V0, T1, T2, T3, T4, T5, T6>(fn5: (x: T5) => T6, fn4: (x: T4) => T5, fn3: (x: T3) => T4, fn2: (x: T2) => T3, fn1: (x: T1) => T2, fn0: (x0: V0) => T1): (x0: V0) => T6;
compose<V0, V1, T1, T2, T3, T4, T5, T6>(fn5: (x: T5) => T6, fn4: (x: T4) => T5, fn3: (x: T3) => T4, fn2: (x: T2) => T3, fn1: (x: T1) => T2, fn0: (x0: V0, x1: V1) => T1): (x0: V0, x1: V1) => T6;
compose<V0, V1, V2, T1, T2, T3, T4, T5, T6>(fn5: (x: T5) => T6, fn4: (x: T4) => T5, fn3: (x: T3) => T4, fn2: (x: T2) => T3, fn1: (x: T1) => T2, fn0: (x0: V0, x1: V1, x2: V2) => T1): (x0: V0, x1: V1, x2: V2) => T6;
compose<V0, V1, V2, V3, T1, T2, T3, T4, T5, T6>(fn5: (x: T5) => T6, fn4: (x: T4) => T5, fn3: (x: T3) => T4, fn2: (x: T2) => T3, fn1: (x: T1) => T2, fn0: (x0: V0, x1: V1, x2: V2, x3: V3) => T1): (x0: V0, x1: V1, x2: V2, x3: V3) => T6;
compose<V0, T1, T2, T3, T4, T5, T6, T7>(fn6: (x: T6) => T7, fn5: (x: T5) => T6, fn4: (x: T4) => T5, fn3: (x: T3) => T4, fn2: (x: T2) => T3, fn1: (x: T1) => T2, fn0: (x0: V0) => T1): (x0: V0) => T7;
compose<V0, V1, T1, T2, T3, T4, T5, T6, T7>(fn6: (x: T6) => T7, fn5: (x: T5) => T6, fn4: (x: T4) => T5, fn3: (x: T3) => T4, fn2: (x: T2) => T3, fn1: (x: T1) => T2, fn0: (x0: V0, x1: V1) => T1): (x0: V0, x1: V1) => T7;
compose<V0, V1, V2, T1, T2, T3, T4, T5, T6, T7>(fn6: (x: T6) => T7, fn5: (x: T5) => T6, fn4: (x: T4) => T5, fn3: (x: T3) => T4, fn2: (x: T2) => T3, fn1: (x: T1) => T2, fn0: (x0: V0, x1: V1, x2: V2) => T1): (x0: V0, x1: V1, x2: V2) => T7;
compose<V0, V1, V2, V3, T1, T2, T3, T4, T5, T6, T7>(fn6: (x: T6) => T7, fn5: (x: T5) => T6, fn4: (x: T4) => T5, fn3: (x: T3) => T4, fn2: (x: T2) => T3, fn1: (x: T1) => T2, fn0: (x0: V0, x1: V1, x2: V2, x3: V3) => T1): (x0: V0, x1: V1, x2: V2, x3: V3) => T7;
compose<V0, T1, T2, T3, T4, T5, T6, T7, T8>(fn7: (x: T7) => T8, fn6: (x: T6) => T7, fn5: (x: T5) => T6, fn4: (x: T4) => T5, fn3: (x: T3) => T4, fn2: (x: T2) => T3, fn1: (x: T1) => T2, fn0: (x0: V0) => T1): (x0: V0) => T8;
compose<V0, V1, T1, T2, T3, T4, T5, T6, T7, T8>(fn7: (x: T7) => T8, fn6: (x: T6) => T7, fn5: (x: T5) => T6, fn4: (x: T4) => T5, fn3: (x: T3) => T4, fn2: (x: T2) => T3, fn1: (x: T1) => T2, fn0: (x0: V0, x1: V1) => T1): (x0: V0, x1: V1) => T8;
compose<V0, V1, V2, T1, T2, T3, T4, T5, T6, T7, T8>(fn7: (x: T7) => T8, fn6: (x: T6) => T7, fn5: (x: T5) => T6, fn4: (x: T4) => T5, fn3: (x: T3) => T4, fn2: (x: T2) => T3, fn1: (x: T1) => T2, fn0: (x0: V0, x1: V1, x2: V2) => T1): (x0: V0, x1: V1, x2: V2) => T8;
compose<V0, V1, V2, V3, T1, T2, T3, T4, T5, T6, T7, T8>(fn7: (x: T7) => T8, fn6: (x: T6) => T7, fn5: (x: T5) => T6, fn4: (x: T4) => T5, fn3: (x: T3) => T4, fn2: (x: T2) => T3, fn1: (x: T1) => T2, fn0: (x0: V0, x1: V1, x2: V2, x3: V3) => T1): (x0: V0, x1: V1, x2: V2, x3: V3) => T8;
compose<V0, T1, T2, T3, T4, T5, T6, T7, T8, T9>(fn8: (x: T8) => T9, fn7: (x: T7) => T8, fn6: (x: T6) => T7, fn5: (x: T5) => T6, fn4: (x: T4) => T5, fn3: (x: T3) => T4, fn2: (x: T2) => T3, fn1: (x: T1) => T2, fn0: (x0: V0) => T1): (x0: V0) => T9;
compose<V0, V1, T1, T2, T3, T4, T5, T6, T7, T8, T9>(fn8: (x: T8) => T9, fn7: (x: T7) => T8, fn6: (x: T6) => T7, fn5: (x: T5) => T6, fn4: (x: T4) => T5, fn3: (x: T3) => T4, fn2: (x: T2) => T3, fn1: (x: T1) => T2, fn0: (x0: V0, x1: V1) => T1): (x0: V0, x1: V1) => T9;
compose<V0, V1, V2, T1, T2, T3, T4, T5, T6, T7, T8, T9>(fn8: (x: T8) => T9, fn7: (x: T7) => T8, fn6: (x: T6) => T7, fn5: (x: T5) => T6, fn4: (x: T4) => T5, fn3: (x: T3) => T4, fn2: (x: T2) => T3, fn1: (x: T1) => T2, fn0: (x0: V0, x1: V1, x2: V2) => T1): (x0: V0, x1: V1, x2: V2) => T9;
compose<V0, V1, V2, V3, T1, T2, T3, T4, T5, T6, T7, T8, T9>(fn8: (x: T8) => T9, fn7: (x: T7) => T8, fn6: (x: T6) => T7, fn5: (x: T5) => T6, fn4: (x: T4) => T5, fn3: (x: T3) => T4, fn2: (x: T2) => T3, fn1: (x: T1) => T2, fn0: (x0: V0, x1: V1, x2: V2, x3: V3) => T1): (x0: V0, x1: V1, x2: V2, x3: V3) => T9;
/**
* Returns the right-to-left Kleisli composition of the provided functions, each of which must return a value of a type supported by chain.
*/
composeK<V, T1>(fn0: (v: Chain<V>) => Chain<T1>): (v: V) => Chain<T1>;
composeK<V, T1, T2>(fn1: (x: T1) => Chain<T2>, fn0: (v: Chain<V>) => Chain<T1>): (v: V) => Chain<T2>;
composeK<V, T1, T2, T3>(fn2: (x: T2) => Chain<T3>, fn1: (x: T1) => Chain<T2>, fn0: (v: Chain<V>) => Chain<T1>): (v: V) => Chain<T3>;
composeK<V, T1, T2, T3, T4>(fn3: (x: T3) => Chain<T4>, fn2: (x: T2) => Chain<T3>, fn1: (x: T1) => Chain<T2>, fn0: (v: Chain<V>) => Chain<T1>): (v: V) => Chain<T4>;
composeK<V, T1, T2, T3, T4, T5>(fn4: (x: T4) => Chain<T5>, fn3: (x: T3) => Chain<T4>, fn2: (x: T2) => Chain<T3>, fn1: (x: T1) => Chain<T2>, fn0: (v: Chain<V>) => Chain<T1>): (v: V) => Chain<T5>;
composeK<V, T1, T2, T3, T4, T5, T6>(fn5: (x: T5) => Chain<T6>, fn4: (x: T4) => Chain<T5>, fn3: (x: T3) => Chain<T4>, fn2: (x: T2) => Chain<T3>, fn1: (x: T1) => Chain<T2>, fn0: (v: Chain<V>) => Chain<T1>): (v: V) => Chain<T6>;
composeK<V, T1, T2, T3, T4, T5, T6, T7>(fn6: (x: T6) => Chain<T7>, fn5: (x: T5) => Chain<T6>, fn4: (x: T4) => Chain<T5>, fn3: (x: T3) => Chain<T4>, fn2: (x: T2) => Chain<T3>, fn1: (x: T1) => Chain<T2>, fn0: (v: Chain<V>) => Chain<T1>): (v: V) => Chain<T7>;
composeK<V, T1, T2, T3, T4, T5, T6, T7, T8>(fn7: (x: T7) => Chain<T8>, fn6: (x: T6) => Chain<T7>, fn5: (x: T5) => Chain<T6>, fn4: (x: T4) => Chain<T5>, fn3: (x: T3) => Chain<T4>, fn2: (x: T2) => Chain<T3>, fn1: (x: T1) => Chain<T2>, fn0: (v: Chain<V>) => Chain<T1>): (v: V) => Chain<T8>;
composeK<V, T1, T2, T3, T4, T5, T6, T7, T8, T9>(fn8: (x: T8) => Chain<T9>, fn7: (x: T7) => Chain<T8>, fn6: (x: T6) => Chain<T7>, fn5: (x: T5) => Chain<T6>, fn4: (x: T4) => Chain<T5>, fn3: (x: T3) => Chain<T4>, fn2: (x: T2) => Chain<T3>, fn1: (x: T1) => Chain<T2>, fn0: (v: Chain<V>) => Chain<T1>): (v: V) => Chain<T9>;
/**
* Performs right-to-left composition of one or more Promise-returning functions. The rightmost function may have any arity; the remaining functions must be unary.
*/
composeP<V0, T1>(fn0: (x0: V0) => Promise<T1>): (x0: V0) => Promise<T1>;
composeP<V0, V1, T1>(fn0: (x0: V0, x1: V1) => Promise<T1>): (x0: V0, x1: V1) => Promise<T1>;
composeP<V0, V1, V2, T1>(fn0: (x0: V0, x1: V1, x2: V2) => Promise<T1>): (x0: V0, x1: V1, x2: V2) => Promise<T1>;
composeP<V0, V1, V2, V3, T1>(fn0: (x0: V0, x1: V1, x2: V2, x3: V3) => Promise<T1>): (x0: V0, x1: V1, x2: V2, x3: V3) => Promise<T1>;
composeP<V0, T1, T2>(fn1: (x: T1) => Promise<T2>|T2, fn0: (x0: V0) => Promise<T1>): (x0: V0) => Promise<T2>;
composeP<V0, V1, T1, T2>(fn1: (x: T1) => Promise<T2>|T2, fn0: (x0: V0, x1: V1) => Promise<T1>): (x0: V0, x1: V1) => Promise<T2>;
composeP<V0, V1, V2, T1, T2>(fn1: (x: T1) => Promise<T2>|T2, fn0: (x0: V0, x1: V1, x2: V2) => Promise<T1>): (x0: V0, x1: V1, x2: V2) => Promise<T2>;
composeP<V0, V1, V2, V3, T1, T2>(fn1: (x: T1) => Promise<T2>|T2, fn0: (x0: V0, x1: V1, x2: V2, x3: V3) => Promise<T1>): (x0: V0, x1: V1, x2: V2, x3: V3) => Promise<T2>;
composeP<V0, T1, T2, T3>(fn2: (x: T2) => Promise<T3>|T3, fn1: (x: T1) => Promise<T2>|T2, fn0: (x0: V0) => Promise<T1>): (x0: V0) => Promise<T3>;
composeP<V0, V1, T1, T2, T3>(fn2: (x: T2) => Promise<T3>|T3, fn1: (x: T1) => Promise<T2>|T2, fn0: (x0: V0, x1: V1) => Promise<T1>): (x0: V0, x1: V1) => Promise<T3>;
composeP<V0, V1, V2, T1, T2, T3>(fn2: (x: T2) => Promise<T3>|T3, fn1: (x: T1) => Promise<T2>|T2, fn0: (x0: V0, x1: V1, x2: V2) => Promise<T1>): (x0: V0, x1: V1, x2: V2) => Promise<T3>;
composeP<V0, V1, V2, V3, T1, T2, T3>(fn2: (x: T2) => Promise<T3>|T3, fn1: (x: T1) => Promise<T2>|T2, fn0: (x0: V0, x1: V1, x2: V2, x3: V3) => Promise<T1>): (x0: V0, x1: V1, x2: V2, x3: V3) => Promise<T3>;
composeP<V0, T1, T2, T3, T4>(fn3: (x: T3) => Promise<T4>|T4, fn2: (x: T2) => Promise<T3>|T3, fn1: (x: T1) => Promise<T2>|T2, fn0: (x0: V0) => Promise<T1>): (x0: V0) => Promise<T4>;
composeP<V0, V1, T1, T2, T3, T4>(fn3: (x: T3) => Promise<T4>|T4, fn2: (x: T2) => Promise<T3>|T3, fn1: (x: T1) => Promise<T2>|T2, fn0: (x0: V0, x1: V1) => Promise<T1>): (x0: V0, x1: V1) => Promise<T4>;
composeP<V0, V1, V2, T1, T2, T3, T4>(fn3: (x: T3) => Promise<T4>|T4, fn2: (x: T2) => Promise<T3>|T3, fn1: (x: T1) => Promise<T2>|T2, fn0: (x0: V0, x1: V1, x2: V2) => Promise<T1>): (x0: V0, x1: V1, x2: V2) => Promise<T4>;
composeP<V0, V1, V2, V3, T1, T2, T3, T4>(fn3: (x: T3) => Promise<T4>|T4, fn2: (x: T2) => Promise<T3>|T3, fn1: (x: T1) => Promise<T2>|T2, fn0: (x0: V0, x1: V1, x2: V2, x3: V3) => Promise<T1>): (x0: V0, x1: V1, x2: V2, x3: V3) => Promise<T4>;
composeP<V0, T1, T2, T3, T4, T5>(fn4: (x: T4) => Promise<T5>|T5, fn3: (x: T3) => Promise<T4>|T4, fn2: (x: T2) => Promise<T3>|T3, fn1: (x: T1) => Promise<T2>|T2, fn0: (x0: V0) => Promise<T1>): (x0: V0) => Promise<T5>;
composeP<V0, V1, T1, T2, T3, T4, T5>(fn4: (x: T4) => Promise<T5>|T5, fn3: (x: T3) => Promise<T4>|T4, fn2: (x: T2) => Promise<T3>|T3, fn1: (x: T1) => Promise<T2>|T2, fn0: (x0: V0, x1: V1) => Promise<T1>): (x0: V0, x1: V1) => Promise<T5>;
composeP<V0, V1, V2, T1, T2, T3, T4, T5>(fn4: (x: T4) => Promise<T5>|T5, fn3: (x: T3) => Promise<T4>|T4, fn2: (x: T2) => Promise<T3>|T3, fn1: (x: T1) => Promise<T2>|T2, fn0: (x0: V0, x1: V1, x2: V2) => Promise<T1>): (x0: V0, x1: V1, x2: V2) => Promise<T5>;
composeP<V0, V1, V2, V3, T1, T2, T3, T4, T5>(fn4: (x: T4) => Promise<T5>|T5, fn3: (x: T3) => Promise<T4>|T4, fn2: (x: T2) => Promise<T3>|T3, fn1: (x: T1) => Promise<T2>|T2, fn0: (x0: V0, x1: V1, x2: V2, x3: V3) => Promise<T1>): (x0: V0, x1: V1, x2: V2, x3: V3) => Promise<T5>;
composeP<V0, T1, T2, T3, T4, T5, T6>(fn5: (x: T5) => Promise<T6>|T6, fn4: (x: T4) => Promise<T5>|T5, fn3: (x: T3) => Promise<T4>|T4, fn2: (x: T2) => Promise<T3>|T3, fn1: (x: T1) => Promise<T2>|T2, fn0: (x0: V0) => Promise<T1>): (x0: V0) => Promise<T6>;
composeP<V0, V1, T1, T2, T3, T4, T5, T6>(fn5: (x: T5) => Promise<T6>|T6, fn4: (x: T4) => Promise<T5>|T5, fn3: (x: T3) => Promise<T4>|T4, fn2: (x: T2) => Promise<T3>|T3, fn1: (x: T1) => Promise<T2>|T2, fn0: (x0: V0, x1: V1) => Promise<T1>): (x0: V0, x1: V1) => Promise<T6>;
composeP<V0, V1, V2, T1, T2, T3, T4, T5, T6>(fn5: (x: T5) => Promise<T6>|T6, fn4: (x: T4) => Promise<T5>|T5, fn3: (x: T3) => Promise<T4>|T4, fn2: (x: T2) => Promise<T3>|T3, fn1: (x: T1) => Promise<T2>|T2, fn0: (x0: V0, x1: V1, x2: V2) => Promise<T1>): (x0: V0, x1: V1, x2: V2) => Promise<T6>;
composeP<V0, V1, V2, V3, T1, T2, T3, T4, T5, T6>(fn5: (x: T5) => Promise<T6>|T6, fn4: (x: T4) => Promise<T5>|T5, fn3: (x: T3) => Promise<T4>|T4, fn2: (x: T2) => Promise<T3>|T3, fn1: (x: T1) => Promise<T2>|T2, fn0: (x0: V0, x1: V1, x2: V2, x3: V3) => Promise<T1>): (x0: V0, x1: V1, x2: V2, x3: V3) => Promise<T6>;
composeP<V0, T1, T2, T3, T4, T5, T6, T7>(fn6: (x: T6) => Promise<T7>|T7, fn5: (x: T5) => Promise<T6>|T6, fn4: (x: T4) => Promise<T5>|T5, fn3: (x: T3) => Promise<T4>|T4, fn2: (x: T2) => Promise<T3>|T3, fn1: (x: T1) => Promise<T2>|T2, fn0: (x0: V0) => Promise<T1>): (x0: V0) => Promise<T7>;
composeP<V0, V1, T1, T2, T3, T4, T5, T6, T7>(fn6: (x: T6) => Promise<T7>|T7, fn5: (x: T5) => Promise<T6>|T6, fn4: (x: T4) => Promise<T5>|T5, fn3: (x: T3) => Promise<T4>|T4, fn2: (x: T2) => Promise<T3>|T3, fn1: (x: T1) => Promise<T2>|T2, fn0: (x0: V0, x1: V1) => Promise<T1>): (x0: V0, x1: V1) => Promise<T7>;
composeP<V0, V1, V2, T1, T2, T3, T4, T5, T6, T7>(fn6: (x: T6) => Promise<T7>|T7, fn5: (x: T5) => Promise<T6>|T6, fn4: (x: T4) => Promise<T5>|T5, fn3: (x: T3) => Promise<T4>|T4, fn2: (x: T2) => Promise<T3>|T3, fn1: (x: T1) => Promise<T2>|T2, fn0: (x0: V0, x1: V1, x2: V2) => Promise<T1>): (x0: V0, x1: V1, x2: V2) => Promise<T7>;
composeP<V0, V1, V2, V3, T1, T2, T3, T4, T5, T6, T7>(fn6: (x: T6) => Promise<T7>|T7, fn5: (x: T5) => Promise<T6>|T6, fn4: (x: T4) => Promise<T5>|T5, fn3: (x: T3) => Promise<T4>|T4, fn2: (x: T2) => Promise<T3>|T3, fn1: (x: T1) => Promise<T2>|T2, fn0: (x0: V0, x1: V1, x2: V2, x3: V3) => Promise<T1>): (x0: V0, x1: V1, x2: V2, x3: V3) => Promise<T7>;
composeP<V0, T1, T2, T3, T4, T5, T6, T7, T8>(fn7: (x: T7) => Promise<T8>|T8, fn6: (x: T6) => Promise<T7>|T7, fn5: (x: T5) => Promise<T6>|T6, fn4: (x: T4) => Promise<T5>|T5, fn3: (x: T3) => Promise<T4>|T4, fn2: (x: T2) => Promise<T3>|T3, fn1: (x: T1) => Promise<T2>|T2, fn0: (x0: V0) => Promise<T1>): (x0: V0) => Promise<T8>;
composeP<V0, V1, T1, T2, T3, T4, T5, T6, T7, T8>(fn7: (x: T7) => Promise<T8>|T8, fn6: (x: T6) => Promise<T7>|T7, fn5: (x: T5) => Promise<T6>|T6, fn4: (x: T4) => Promise<T5>|T5, fn3: (x: T3) => Promise<T4>|T4, fn2: (x: T2) => Promise<T3>|T3, fn1: (x: T1) => Promise<T2>|T2, fn0: (x0: V0, x1: V1) => Promise<T1>): (x0: V0, x1: V1) => Promise<T8>;
composeP<V0, V1, V2, T1, T2, T3, T4, T5, T6, T7, T8>(fn7: (x: T7) => Promise<T8>|T8, fn6: (x: T6) => Promise<T7>|T7, fn5: (x: T5) => Promise<T6>|T6, fn4: (x: T4) => Promise<T5>|T5, fn3: (x: T3) => Promise<T4>|T4, fn2: (x: T2) => Promise<T3>|T3, fn1: (x: T1) => Promise<T2>|T2, fn0: (x0: V0, x1: V1, x2: V2) => Promise<T1>): (x0: V0, x1: V1, x2: V2) => Promise<T8>;
composeP<V0, V1, V2, V3, T1, T2, T3, T4, T5, T6, T7, T8>(fn7: (x: T7) => Promise<T8>|T8, fn6: (x: T6) => Promise<T7>|T7, fn5: (x: T5) => Promise<T6>|T6, fn4: (x: T4) => Promise<T5>|T5, fn3: (x: T3) => Promise<T4>|T4, fn2: (x: T2) => Promise<T3>|T3, fn1: (x: T1) => Promise<T2>|T2, fn0: (x0: V0, x1: V1, x2: V2, x3: V3) => Promise<T1>): (x0: V0, x1: V1, x2: V2, x3: V3) => Promise<T8>;
composeP<V0, T1, T2, T3, T4, T5, T6, T7, T8, T9>(fn8: (x: T8) => Promise<T9>|T9, fn7: (x: T7) => Promise<T8>|T8, fn6: (x: T6) => Promise<T7>|T7, fn5: (x: T5) => Promise<T6>|T6, fn4: (x: T4) => Promise<T5>|T5, fn3: (x: T3) => Promise<T4>|T4, fn2: (x: T2) => Promise<T3>|T3, fn1: (x: T1) => Promise<T2>|T2, fn0: (x0: V0) => Promise<T1>): (x0: V0) => Promise<T9>;
composeP<V0, V1, T1, T2, T3, T4, T5, T6, T7, T8, T9>(fn8: (x: T8) => Promise<T9>|T9, fn7: (x: T7) => Promise<T8>|T8, fn6: (x: T6) => Promise<T7>|T7, fn5: (x: T5) => Promise<T6>|T6, fn4: (x: T4) => Promise<T5>|T5, fn3: (x: T3) => Promise<T4>|T4, fn2: (x: T2) => Promise<T3>|T3, fn1: (x: T1) => Promise<T2>|T2, fn0: (x0: V0, x1: V1) => Promise<T1>): (x0: V0, x1: V1) => Promise<T9>;
composeP<V0, V1, V2, T1, T2, T3, T4, T5, T6, T7, T8, T9>(fn8: (x: T8) => Promise<T9>|T9, fn7: (x: T7) => Promise<T8>|T8, fn6: (x: T6) => Promise<T7>|T7, fn5: (x: T5) => Promise<T6>|T6, fn4: (x: T4) => Promise<T5>|T5, fn3: (x: T3) => Promise<T4>|T4, fn2: (x: T2) => Promise<T3>|T3, fn1: (x: T1) => Promise<T2>|T2, fn0: (x0: V0, x1: V1, x2: V2) => Promise<T1>): (x0: V0, x1: V1, x2: V2) => Promise<T9>;
composeP<V0, V1, V2, V3, T1, T2, T3, T4, T5, T6, T7, T8, T9>(fn8: (x: T8) => Promise<T9>|T9, fn7: (x: T7) => Promise<T8>|T8, fn6: (x: T6) => Promise<T7>|T7, fn5: (x: T5) => Promise<T6>|T6, fn4: (x: T4) => Promise<T5>|T5, fn3: (x: T3) => Promise<T4>|T4, fn2: (x: T2) => Promise<T3>|T3, fn1: (x: T1) => Promise<T2>|T2, fn0: (x0: V0, x1: V1, x2: V2, x3: V3) => Promise<T1>): (x0: V0, x1: V1, x2: V2, x3: V3) => Promise<T9>;
/**
* Returns a new list consisting of the elements of the first list followed by the elements
* of the second.
*/
concat<T extends List<any>>(list1: T, list2: T): T;
concat<T extends List<any>>(list1: T): (list2: T) => T;
// concat<T extends List<any>>: CurriedFunction2<T, T, T>;
/**
* Returns a function, fn, which encapsulates if/else-if/else logic. R.cond takes a list of [predicate, transform] pairs.
* All of the arguments to fn are applied to each of the predicates in turn until one returns a "truthy" value, at which
* point fn returns the result of applying its arguments to the corresponding transformer. If none of the predicates
* matches, fn returns undefined.
*/
cond<T, U>(fns: [Pred<T>, (v: T) => U][]): (v: T) => U;
/**
* Wraps a constructor function inside a curried function that can be called with the same arguments and returns the same type.
*/
construct(fn: Function): Function;
/**
* Wraps a constructor function inside a curried function that can be called with the same arguments and returns the same type.
* The arity of the function returned is specified to allow using variadic constructor functions.
*/
constructN(n: number, fn: Function): Function;
// constructN: CurriedFunction2<number, Function, Function>;
/**
* Returns `true` if the specified item is somewhere in the list, `false` otherwise.
* Equivalent to `indexOf(a)(list) > -1`. Uses strict (`===`) equality checking.
*/
contains(a: string, list: string): boolean;
contains(a: string): (list: string) => boolean;
// contains: CurriedFunction2<string, string, boolean>;
contains<T, R extends List<T>>(a: T, list: R): boolean;
contains<T, R extends List<T>>(a: T): (list: R) => boolean;
// contains<T, R extends List<T>>: CurriedFunction2<T, R, boolean>;
/**
* Accepts a converging function and a list of branching functions and returns a new
* function. When invoked, this new function is applied to some arguments, each branching
* function is applied to those same arguments. The results of each branching function
* are passed as arguments to the converging function to produce the return value.
*/
converge<T>(after: Variadic<T>, fns: List<Variadic<any>>): Variadic<T>;
// converge<T>: CurriedFunction2<Variadic<T>, List<Variadic<any>>, Variadic<T>>;
/**
* Counts the elements of a list according to how many match each value
* of a key generated by the supplied function. Returns an object
* mapping the keys produced by `fn` to the number of occurrences in
* the list. Note that all keys are coerced to strings because of how
* JavaScript objects work.
*/
countBy<T>(fn: (a: T) => Prop, list: List<T>): Obj<number>;
countBy<T>(fn: (a: T) => Prop): (list: List<T>) => Obj<number>;
// countBy<T>: CurriedFunction2<(a: T) => Prop, List<T>, Obj<number>>;
/**
* Returns a curried equivalent of the provided function.
*/
curry<T1, TResult>(fn: (a: T1) => TResult): CurriedFunction1<T1, TResult>;
curry<T1, T2, TResult>(fn: (a: T1, b: T2) => TResult): CurriedFunction2<T1, T2, TResult>;
curry<T1, T2, T3, TResult>(fn: (a: T1, b: T2, c: T3) => TResult): CurriedFunction3<T1, T2, T3, TResult>;
curry<T1, T2, T3, T4, TResult>(fn: (a: T1, b: T2, c: T3, d: T4) => TResult): CurriedFunction4<T1, T2, T3, T4, TResult>;
curry<T1, T2, T3, T4, T5, TResult>(fn: (a: T1, b: T2, c: T3, d: T4, e: T5) => TResult): CurriedFunction5<T1, T2, T3, T4, T5, TResult>;
curry<T1, T2, T3, T4, T5, T6, TResult>(fn: (a: T1, b: T2, c: T3, d: T4, e: T5, f: T6) => TResult): CurriedFunction6<T1, T2, T3, T4, T5, T6, TResult>;
curry<T1, T2, T3, T4, T5, T6, T7, TResult>(fn: (a: T1, b: T2, c: T3, d: T4, e: T5, f: T6, g: T7) => TResult): CurriedFunction7<T1, T2, T3, T4, T5, T6, T7, TResult>;
curry<T1, T2, T3, T4, T5, T6, T7, T8, TResult>(fn: (a: T1, b: T2, c: T3, d: T4, e: T5, f: T6, g: T7, h: T8) => TResult): CurriedFunction8<T1, T2, T3, T4, T5, T6, T7, T8, TResult>;
curry<T1, T2, T3, T4, T5, T6, T7, T8, T9, TResult>(fn: (a: T1, b: T2, c: T3, d: T4, e: T5, f: T6, g: T7, h: T8, i: T9) => TResult): CurriedFunction9<T1, T2, T3, T4, T5, T6, T7, T8, T9, TResult>;
// curry(fn: Function): Function
/**
* Returns a curried equivalent of the provided function, with the specified arity.
*/
curryN<T>(length: number, fn: Variadic<T>): Variadic<T>;
// curryN<T>: CurriedFunction2<number, Variadic<T>, Variadic<T>>;
/**
* Decrements its argument.
*/
dec(n: number): number;
/**
* Returns the second argument if it is not null or undefined. If it is null or undefined, the
* first (default) argument is returned.
*/
defaultTo<T,U>(a: T, b: U): T|U;
defaultTo<T>(a: T): <U>(b: U) => T|U;
// defaultTo<T,U>: CurriedFunction2<T, U, T|U>;
/**
* Makes a descending comparator function out of a function that returns a value that can be compared with `<` and `>`.
*/
descend<T, V extends Ord>(comparator: (val: T) => V, a: T, b: T): number;
descend<T, V extends Ord>(comparator: (val: T) => V, a: T): (b: T) => number;
descend<T, V extends Ord>(comparator: (val: T) => V): CurriedFunction2<T, T, number>;
// descend<T, V extends Ord>: CurriedFunction3<(val: T) => V, T, T, number>;
/**
* Finds the set (i.e. no duplicates) of all elements in the first list not contained in the second list.
*/
difference<T>(list1: List<T>, list2: List<T>): T[];
difference<T>(list1: List<T>): (list2: List<T>) => T[];
// difference<T>: CurriedFunction2<List<T>, List<T>, T[]>;
/**
* Finds the set (i.e. no duplicates) of all elements in the first list not contained in the second list.
* Duplication is determined according to the value returned by applying the supplied predicate to two list
* elements.
*/
// differenceWith<T>(pred: (a: T, b: T) => boolean, list1: List<T>, list2: List<T>): T[];
// differenceWith<T>(pred: (a: T, b: T) => boolean, list1: List<T>): (list2: List<T>) => T[];
// differenceWith<T>(pred: (a: T, b: T) => boolean): CurriedFunction2<List<T>,List<T>,T>;
// differenceWith<T>: CurriedFunction3<(a: T, b: T) => boolean, List<T>, List<T>, T[]>;
// base
differenceWith<T>(pred: (a: T, b: T) => boolean, list1: List<T>, list2: List<T>): T[];
differenceWith<T>(pred: (a: T, b: T) => boolean, list1: List<T>):{
(list2: List<T>): T[];
};
differenceWith<T>(pred: (a: T, b: T) => boolean):{
(list1: List<T>, list2: List<T>): T[];
(list1: List<T>):{
(list2: List<T>): T[];
};
};
/*
* Returns a new object that does not contain a prop property.
*/
// TODO: fix, can't yet calculate type of object minus this key
// dissoc<T,U>(prop: Prop, obj: T & { [prop: Prop]: any }): T; // wanna say the original object is the same with the extra key, but can't bind it to prop
// dissoc<T,U extends Struct<any>>(prop: keyof U, obj: U): T;
// simplified but inferrable: leave the key in
// dissoc<T>(prop: keyof T, obj: T): T;
dissoc(prop: Prop): <T>(obj: T) => T; // mix
// dissoc<T>: CurriedFunction2<keyof T, T, T>;
// dissoc<T>: CurriedFunction2<Prop, T, T>;
// It seems impossible to infer the return type, so this may have to be specified explicitly
dissoc<T>(prop: Prop, obj: Struct<any>): T;
// dissoc(prop: Prop): <T>(obj: Struct<any>) => T; // mix
// dissoc<T>: CurriedFunction2<Prop, Struct<any>, T>;
// dissoc<U extends Struct<any>>(prop: keyof U): <T>(obj: U) => T; // can't do this, don't know U in time
// mixed curry:
dissoc(prop: Prop): {
<T>(obj: T): T; // infer
<T>(obj: Struct<any>): T; // manual
};
/**
* Makes a shallow clone of an object, omitting the property at the given path.
*/
dissocPath<T>(path: Path, obj: Struct<any>): T;
dissocPath(path: Path): <T>(obj: Struct<any>) => T;
// dissocPath<T>: CurriedFunction2<Path, Struct<any>, T>;
/**
* Divides two numbers. Equivalent to a / b.
*/
divide(a: number, b: number): number;
divide(a: number): (b: number) => number;
// divide: CurriedFunction2<number, number, number>;
/**
* Returns a new list containing all but the first n elements of the given list.
*/
drop<T extends List<any>>(n: number, xs: T): T;
drop<T extends List<any>>(n: number): (xs: T) => T;
// drop<T extends List<any>>: CurriedFunction2<number, T, T>;
/**
* Returns a list containing all but the last n elements of the given list.
*/
// = drop
dropLast<T extends List<any>>(n: number, xs: T): T;
dropLast<T extends List<any>>(n: number): (xs: T) => T;
// dropLast<T extends List<any>>: CurriedFunction2<number, T, T>;
/**
* Returns a new list containing all but last then elements of a given list, passing each value from the
* right to the supplied predicate function, skipping elements while the predicate function returns true.
*/
// = dropWhile
dropLastWhile<T, R extends List<T>>(pred: Pred<T>, list: R): T[];
dropLastWhile<T, R extends List<T>>(pred: Pred<T>): (list: R) => T[];
// dropLastWhile<T, R extends List<T>>: CurriedFunction2<Pred<T>, R, T[]>;
/**
* Returns a new list containing the last n elements of a given list, passing each value to the supplied
* predicate function, skipping elements while the predicate function returns true.
*/
dropWhile<T, R extends List<T>>(pred: Pred<T>, list: R): T[];
dropWhile<T, R extends List<T>>(pred: Pred<T>): (list: R) => T[];
// dropWhile<T, R extends List<T>>: CurriedFunction2<Pred<T>, R, T[]>;
/**
* A function wrapping calls to the two functions in an || operation, returning the result of the first
* function if it is truth-y and the result of the second function otherwise. Note that this is
* short-circuited, meaning that the second function will not be invoked if the first returns a truth-y value.
*/
either<T>(pred1: Pred<T>, pred2: Pred<T>): Pred<T>;
either<T>(pred1: Pred<T>): (pred2: Pred<T>) => Pred<T>;
// either<T>: CurriedFunction2<Pred<T>, Pred<T>, Pred<T>>;
/**
* Returns the empty value of its argument's type. Ramda defines the empty value of Array ([]), Object ({}),
* String (''), and Arguments. Other types are supported if they define <Type>.empty and/or <Type>.prototype.empty.
* Dispatches to the empty method of the first argument, if present.
*/
empty<T>(x: T): T;
/**
* Takes a function and two values in its domain and returns true if the values map to the same value in the
* codomain; false otherwise.
*/
// eqBy<T>(fn: (a: T) => T, a: T, b: T): boolean;
// eqBy<T>(fn: (a: T) => T, a: T): (b: T) => boolean;
// eqBy<T>(fn: (a: T) => T): CurriedFunction2<T,T,boolean>;
// eqBy<T>: CurriedFunction3<(a: T) => T, T, T, boolean>;
// base
eqBy<T>(fn: (a: T) => T, a: T, b: T): boolean;
eqBy<T>(fn: (a: T) => T, a: T):{
(b: T): boolean;
};
eqBy<T>(fn: (a: T) => T):{
(a: T, b: T): boolean;
(a: T):{
(b: T): boolean;
};
};
/**
* Reports whether two functions have the same value for the specified property.
*/
// hard to mix cuz different initial generics?
// more generics
// eqProps<T,U>(prop: Prop, obj1: T, obj2: U): boolean;
// eqProps<T>(prop: Prop, obj1: T): <U>(obj2: U) => boolean;
// eqProps<T,U>(prop: Prop): CurriedFunction2<T,U,boolean>;
// eqProps(prop: Prop): <T,U>(obj1: T, obj2: U) => boolean;
// eqProps<T,U>: CurriedFunction3<Prop, T, U, boolean>;
// less generics
// eqProps(prop: Prop, obj1: any, obj2: any): boolean;
// eqProps(prop: Prop, obj1: any): (obj2: any) => boolean;
// eqProps(prop: Prop): CurriedFunction2<any, any, boolean>;
// eqProps(prop: Prop): (obj1: any, obj2: any) => boolean;
// eqProps(prop: Prop): (obj1: any) => (obj2: any) => boolean;
// eqProps: CurriedFunction3<Prop, any, any, boolean>;
// base
eqProps<T, U>(prop: Prop, obj1: T, obj2: U): boolean;
eqProps<T>(prop: Prop, obj1: T):{
<U>(obj2: U): boolean;
};
eqProps(prop: Prop):{
<T, U>(obj1: T, obj2: U): boolean;
<T>(obj1: T):{
<U>(obj2: U): boolean;
};
};
// less generics
eqProps(prop: Prop, obj1: any, obj2: any): boolean;
eqProps(prop: Prop, obj1: any):{
(obj2: any): boolean;
};
eqProps(prop: Prop):{
(obj1: any, obj2: any): boolean;
(obj1: any):{
(obj2: any): boolean;
};
};
/**
* Returns true if its arguments are equivalent, false otherwise. Dispatches to an equals method if present.
* Handles cyclical data structures.
*/
equals<T>(a: T, b: T): boolean;
equals<T>(a: T): (b: T) => boolean;
// equals<T>: CurriedFunction2<T, T, boolean>;
/**
* Creates a new object by evolving a shallow copy of object, according to the transformation functions.
*/
// hard to mix cuz different generics
// NestedObj
evolve<V>(transformations: NestedObj<(v: any) => any>, obj: V): V;
evolve<V>(transformations: NestedObj<(v: any) => any>): (obj: V) => V;
// evolve<V>: CurriedFunction2<NestedObj<(v: any) => any>, V, V>;
// no inference, manually supply result type
evolve<T>(transformations: Obj<Function>, obj: any): T;
evolve(transformations: Obj<Function>): <T>(obj: any) => T;
// evolve<T>: CurriedFunction2<Obj<Function>, any, T>;
/*
* A function that always returns false. Any passed in parameters are ignored.
*/
F(): false;
/**
* Returns a new list containing only those items that match a given predicate function. The predicate function is passed one argument: (value).
*/
// array
filter<T>(pred: Pred<T>, list: List<T>): T[];
// filter<T>(pred: Pred<T>): (list: List<T>) => T[]; // should disable for mixing, but this somehow makes #73 fail
// filter<T>: CurriedFunction2<Pred<T>, List<T>, T[]>;
// functor to functor
filter<T>(pred: Pred<T>, list: Functor<T>): Functor<T>;
// filter<T>(pred: Pred<T>): (list: Functor<T>) => Functor<T>; // mix
// filter<T>: CurriedFunction2<(value: T) => boolean, Functor<T>, Functor<T>>;
// functor to array
filter<T>(pred: Pred<T>, list: Functor<T>): T[];
// filter<T>(pred: Pred<T>): (list: Functor<T>) => T[]; // mix
// filter<T>: CurriedFunction2<(value: T) => boolean, Functor<T>, T[]>;
// object
// filter<T,U extends Obj<T>>(pred: Pred<T>, obj: U) : Partial<U>;
filter<T>(pred: Pred<T>): <U extends Obj<T>>(obj: U) => U; // mix
// filter<T,U extends Obj<T>>: CurriedFunction2<(value: T) => boolean, U, Partial<U>>;
// mixed
filter<T>(pred: Pred<T>): {
(list: List<T>): T[];
(list: Functor<T>): Functor<T>;
(list: Functor<T>): T[];
<U extends Obj<T>>(obj: U): U;
};
/**
* Returns the first element of the list which matches the predicate, or `undefined` if no
* element matches.
*/
find<T>(fn: (a: T) => boolean, list: List<T>): T;
find<T>(fn: (a: T) => boolean): (list: List<T>) => T;
// find<T>: CurriedFunction2<(a: T) => boolean, List<T>, T>;
/**
* Returns the index of the first element of the list which matches the predicate, or `-1`
* if no element matches.
*/
findIndex<T>(fn: (a: T) => boolean, list: List<T>): number;
findIndex<T>(fn: (a: T) => boolean): (list: List<T>) => number;
// findIndex<T>: CurriedFunction2<(a: T) => boolean, List<T>, number>;
/**
* Returns the last element of the list which matches the predicate, or `undefined` if no
* element matches.
*/
findLast<T>(fn: (a: T) => boolean, list: List<T>): T;
findLast<T>(fn: (a: T) => boolean): (list: List<T>) => T;
// findLast<T>: CurriedFunction2<(a: T) => boolean, List<T>, T>;
/**
* Returns the index of the last element of the list which matches the predicate, or
* `-1` if no element matches.
*/
findLastIndex<T>(fn: (a: T) => boolean, list: List<T>): number;
findLastIndex<T>(fn: (a: T) => boolean): (list: List<T>) => number;
// findLastIndex<T>: CurriedFunction2<(a: T) => boolean, List<T>, number>;
/**
* Returns a new list by pulling every item out of it (and all its sub-arrays) and putting
* them in a new array, depth-first.
*/
// flatten<T>(x: ArrayLike<ArrayLike<ArrayLike<ArrayLike<ArrayLike<ArrayLike<ArrayLike<T>>>>>>>): T[];
// flatten<T>(x: ArrayLike<ArrayLike<ArrayLike<ArrayLike<ArrayLike<ArrayLike<T>>>>>>): T[];
// flatten<T>(x: ArrayLike<ArrayLike<ArrayLike<ArrayLike<ArrayLike<T>>>>>): T[];
// flatten<T>(x: ArrayLike<ArrayLike<ArrayLike<ArrayLike<T>>>>): T[];
// flatten<T>(x: ArrayLike<ArrayLike<ArrayLike<T>>>): T[];
// flatten<T>(x: ArrayLike<ArrayLike<T>>): T[];
// flatten<T>(x: ArrayLike<T>): T[];
// TODO: figure out how to handle arrays using different levels of nesting
// flatten<T>(x: ListOfRecursiveArraysOrValues<T>): T[];
flatten<T>(x: NestedArray<T>): T[];
/**
* Returns a new function much like the supplied one, except that the first two arguments'
* order is reversed.
*/
flip<T,U,TResult>(fn: (arg0: T, arg1: U) => TResult): (arg1: U, arg0?: T) => TResult;
flip<T,U,Rest,TResult>(fn: (arg0: T, arg1: U, ...args: Rest[]) => TResult): (arg1: U, arg0?: T, ...args: Rest[]) => TResult;
/**
* Iterate over an input list, calling a provided function fn for each element in the list.
*/
forEach<T>(fn: (x: T) => void, list: List<T>): T[];
forEach<T>(fn: (x: T) => void): (list: List<T>) => T[];
// forEach<T>: CurriedFunction2<(x: T) => void, List<T>, T[]>;
/**
* Iterate over an input object, calling a provided function fn for each key and value in the object.
*/
forEachObjIndexed<T, Inp extends Struct<T>>(fn: (val: T, key: string, obj?: Inp) => void, o: Inp): Inp;
forEachObjIndexed<T, Inp extends Struct<T>>(fn: (val: T, key: string, obj?: Inp) => void): (o: Inp) => Inp;
// forEachObjIndexed<T, Inp extends Struct<T>>: CurriedFunction2<(val: T, key: string, obj?: Inp) => void, Inp, Inp>;
/**
* Creates a new object out of a list key-value pairs.
*/
fromPairs<V>(pairs: List<KeyValuePair<Prop, V>>): Obj<V>;
/**
* Splits a list into sublists stored in an object, based on the result of
* calling a String-returning function
* on each element, and grouping the results according to values returned.
*/
groupBy<T>(fn: (a: T) => Prop, list: List<T>): Obj<T[]>;
groupBy<T>(fn: (a: T) => Prop): (list: List<T>) => Obj<T[]>;
// groupBy<T>: CurriedFunction2<(a: T) => Prop, List<T>, Obj<T[]>>;
/**
* Takes a list and returns a list of lists where each sublist's elements are all "equal" according to the provided equality function
*/
groupWith<T, R extends List<T>>(fn: (x: T, y: T) => boolean, list: R): R[];
groupWith<T, R extends List<T>>(fn: (x: T, y: T) => boolean): (list: R) => R[];
// groupWith<T, R extends List<T>>: CurriedFunction2<(x: T, y: T) => boolean, R, R[]>;
/**
* Returns true if the first parameter is greater than the second.
*/
gt(a: number, b: number): boolean;
gt(a: number): (b: number) => boolean;
// gt: CurriedFunction2<number, number, boolean>;
/**
* Returns true if the first parameter is greater than or equal to the second.
*/
gte(a: number, b: number): boolean;
gte(a: number): (b: number) => boolean;
// gte: CurriedFunction2<number, number, boolean>;
/**
* Returns whether or not an object has an own property with the specified name.
*/
// no generics
has(s: Prop, obj: Struct<any>): boolean;
has(s: Prop): (obj: Struct<any>) => boolean;
// has: CurriedFunction2<Prop, Struct<any>, boolean>;
// // bound generic, hopefully gives a hint as to what goes into obj
// has<T extends Struct<any>>(s: Prop, obj: T): boolean;
// // has(s: Prop): <T extends Struct<any>>(obj: T) => boolean; // mix
// // has<T extends Struct<any>>: CurriedFunction2<Prop, T, boolean>;
// // free generic, helps make a few tests pass. TODO: kill this workaround?
// has<T>(s: Prop, obj: T): boolean;
// // has(s: Prop): <T>(obj: T) => boolean; // mix
// // has<T>: CurriedFunction2<Prop, T, boolean>;
// // mixed
// has(s: Prop): {
// <T extends Struct<any>>(obj: T): boolean;
// <T>(obj: T): boolean;
// }
/**
* Returns whether or not an object or its prototype chain has a property with the specified name
*/
// = has
// no generics
hasIn(s: Prop, obj: Struct<any>): boolean;
hasIn(s: Prop): (obj: Struct<any>) => boolean;
// hasIn: CurriedFunction2<Prop, Struct<any>, boolean>;
// // bound generic, hopefully gives a hint as to what goes into obj
// hasIn<T extends Struct<any>>(s: Prop, obj: T): boolean;
// // hasIn(s: Prop): <T extends Struct<any>>(obj: T) => boolean; // mix
// // hasIn<T extends Struct<any>>: CurriedFunction2<Prop, T, boolean>;
// // free generic, helps make a few tests pass. TODO: kill this workaround?
// hasIn<T>(s: Prop, obj: T): boolean;
// // hasIn(s: Prop): <T>(obj: T) => boolean; // mix
// // hasIn<T>: CurriedFunction2<Prop, T, boolean>;
// // mixed
// hasIn(s: Prop): {
// <T extends Struct<any>>(obj: T): boolean;
// <T>(obj: T): boolean;
// }
/**
* Returns the first element in a list.
* In some libraries this function is named `first`.
*/
// head<T extends List<any>>(list: T): T[0];
// tuple attempts; it doesn't like these.
head<T>(list: [T]): T;
head<T0, T1>(list: [T0, T1]): T0;
head<T0, T1, T2>(list: [T0, T1, T2]): T0;
/**
* Returns true if its arguments are identical, false otherwise. Values are identical if they reference the
* same memory. NaN is identical to NaN; 0 and -0 are not identical.
*/
identical<T>(a: T, b: T): boolean;
identical<T>(a: T): (b: T) => boolean;
// identical<T>: CurriedFunction2<T, T, boolean>;
/**
* A function that does nothing but return the parameter supplied to it. Good as a default
* or placeholder function.
*/
identity<T>(a: T): T;
/**
* Creates a function that will process either the onTrue or the onFalse function depending upon the result
* of the condition predicate.
*/
ifElse<T,U,V>(fn: Pred<T>, onTrue: (v: T) => U, onFalse: (v: T) => V): (v: T) => U|V;
// ifElse<T,U,V>: CurriedFunction3<Pred<T>, (v: T) => U, (v: T) => V, (v: T) => U|V>;
/**
* Increments its argument.
*/
inc(n: number): number;
/**
* Given a function that generates a key, turns a list of objects into an object indexing the objects
* by the given key.
*/
indexBy<T>(fn: (a: T) => Prop, list: List<T>): Obj<T>;
indexBy<T>(fn: (a: T) => Prop): (list: List<T>) => Obj<T>;
// indexBy<T>: CurriedFunction2<(a: T) => Prop, List<T>, Obj<T>>;
/**
* Returns the position of the first occurrence of an item in an array
* (by strict equality),
* or -1 if the item is not included in the array.
*/
indexOf<T>(target: T, list: List<T>): number;
indexOf<T>(target: T): (list: List<T>) => number;
// indexOf<T>: CurriedFunction2<T, List<T>, number>;
/**
* Returns all but the last element of a list.
*/
init<T extends List<any>>(list: T): T;
/**
* Inserts the supplied element into the list, at index index. Note that
* this is not destructive: it returns a copy of the list with the changes.
*/
// homogeneous list
// insert<T>(index: number, elt: T, list: List<T>): T[];
// insert<T>(index: number, elt: T): (list: List<T>) => T[];
// insert<T>(index: number): CurriedFunction2<T, List<T>, T[]>;
// insert(index: number): <T>(elt: T, list: List<T>) => T[];
// insert(index: number): <T>(elt: T) => (list: List<T>) => T[];
// insert<T>: CurriedFunction3<number, T, List<T>, T[]>;
// base
insert<T>(index: number, elt: T, list: List<T>): T[];
insert<T>(index: number, elt: T):{
(list: List<T>): T[];
};
insert(index: number):{
<T>(elt: T, list: List<T>): T[];
<T>(elt: T):{
(list: List<T>): T[];
};
};
// TODO: tuples?
/**
* Inserts the sub-list into the list, at index `index`. _Note that this
* is not destructive_: it returns a copy of the list with the changes.
*/
// homogeneous lists (different types)
// insertAll<T,U>(index: number, elts: List<T>, list: List<U>): Array<T|U>;
// insertAll<T,U>(index: number, elts: List<T>): (list: List<U>) => Array<T|U>;
// insertAll<T,U>(index: number): CurriedFunction2<List<T>, List<U>, Array<T|U>>;
// insertAll(index: number): <T,U>(elts: List<T>, list: List<U>) => Array<T|U>;
// insertAll(index: number): <T>(elts: List<T>) => <U>(list: List<U>) => Array<T|U>;
// insertAll<T>: CurriedFunction3<number, List<T>, List<U>, Array<T|U>>;
// homogeneous lists (same type)
// insertAll<T extends List<any>>(index: number): CurriedFunction2<T, T, T>;
// TODO: allowing either or both arrays to be tuples?
// base
insertAll<T, U>(index: number, elts: List<T>, list: List<U>): Array<T|U>;
insertAll<T>(index: number, elts: List<T>):{
<U>(list: List<U>): Array<T|U>;
};
insertAll(index: number):{
<T, U>(elts: List<T>, list: List<U>): Array<T|U>;
<T>(elts: List<T>):{
<U>(list: List<U>): Array<T|U>;
};
};
/**
* Combines two lists into a set (i.e. no duplicates) composed of those elements common to both lists.
*/
intersection<T,U>(list1: List<T>, list2: List<U>): Array<T|U>;
intersection<T>(list1: List<T>): <U>(list2: List<U>) => Array<T|U>;
// intersection<T,U>: CurriedFunction2<List<T>, List<U>, Array<T|U>>;
/**
* Combines two lists into a set (i.e. no duplicates) composed of those
* elements common to both lists. Duplication is determined according
* to the value returned by applying the supplied predicate to two list
* elements.
*/
// intersectionWith<T>(pred: (a: T, b: T) => boolean, list1: List<T>, list2: List<T>): T[];
// intersectionWith<T>(pred: (a: T, b: T) => boolean): CurriedFunction2<List<T>, List<T>, T[]>;
// intersectionWith<T>: CurriedFunction3<(a: T, b: T) => boolean, List<T>, List<T>, T[]>;
// base
intersectionWith<T>(pred: (a: T, b: T) => boolean, list1: List<T>, list2: List<T>): T[];
intersectionWith<T>(pred: (a: T, b: T) => boolean, list1: List<T>):{
(list2: List<T>): T[];
};
intersectionWith<T>(pred: (a: T, b: T) => boolean):{
(list1: List<T>, list2: List<T>): T[];
(list1: List<T>):{
(list2: List<T>): T[];
};
};
/**
* Creates a new list with the separator interposed between elements.
*/
intersperse<T>(separator: T, list: List<T>): T[];
intersperse<T>(separator: T): (list: List<T>) => T[];
// intersperse<T>: CurriedFunction2<T, List<T>, T[]>;
/**
* Transforms the items of the list with the transducer and appends the transformed items to the accumulator
* using an appropriate iterator function based on the accumulator type.
*/
// into<T,U,V extends AccOpts<T,U>>(acc: V, xf: (list: List<T>) => U, list: List<T>): U;
// into<T,U,V extends AccOpts<T,U>>(acc: V, xf: (list: List<T>) => U): (list: List<T>) => U;
// into<T,U,V extends AccOpts<T,U>>(acc: V): CurriedFunction2<(list: List<T>) => U, List<T>, U>;
// into<T,U,V extends AccOpts<T,U>>(acc: V): (xf: (list: List<T>) => U, list: List<T>) => U;
// into<T,U,V extends AccOpts<T,U>>(acc: V): (xf: (list: List<T>) => U) => (list: List<T>) => U;
// into<T,U,V extends AccOpts<T,U>>: CurriedFunction3<V, (list: List<T>) => U, List<T>, U>;
// base
into<V extends AccOpts<T, U>, T, U>(acc: V, xf: (list: List<T>) => U, list: List<T>): U;
into<V extends AccOpts<T, U>, T, U>(acc: V, xf: (list: List<T>) => U):{
(list: List<T>): U;
};
into<V extends AccOpts<T, U>, T, U>(acc: V):{
<T, U>(xf: (list: List<T>) => U, list: List<T>): U;
<T, U>(xf: (list: List<T>) => U):{
(list: List<T>): U;
};
};
/**
* Same as R.invertObj, however this accounts for objects with duplicate values by putting the values into an array.
*/
invert(obj: Struct<Prop>): Obj<List<string>>;
/**
* Returns a new object with the keys of the given object as values, and the values of the given object as keys.
*/
invertObj(obj: Struct<Prop>): Obj<string>;
/**
* Turns a named method of an object (or object prototype) into a function that can be
* called directly. Passing the optional `len` parameter restricts the returned function to
* the initial `len` parameters of the method.
*
* The returned function is curried and accepts `len + 1` parameters (or `method.length + 1`
* when `len` is not specified), and the final parameter is the target object.
*/
// with keyof -- currently can't seem do to function application like this yet
// invoker<T, K extends keyof T, R> (len: number /* = 0 */, name: K, obj: T): obj[K]();
// // invoker<T, K extends keyof T, R>: CurriedFunction3<number /* = 0 */, K, T, obj[K]()>;
// invoker<T, K extends keyof T, P1, R>(len: number /* = 1 */, name: K, x1: P1, obj: T): obj[K](x1);
// // invoker<T, K extends keyof T, P1, R>: CurriedFunction4<number /* = 0 */, K, P1, T, obj[K](P1)>;
// invoker<T, K extends keyof T, P1, P2, R>(len: number /* = 2 */, name: K, x1: P1, x2: P2, obj: T): obj[K](x1, x2);
// // invoker<T, K extends keyof T, P1, P2, R>: CurriedFunction5<number /* = 0 */, K, P1, P2, T, obj[K](P1, P2)>;
// invoker<T, K extends keyof T, P1, P2, P3, R>(len: number /* = 3 */, name: K, x1: P1, x2: P2, x3: P3, obj: T): obj[K](x1, x2, x3);
// // invoker<T, K extends keyof T, P1, P2, P3, R>: CurriedFunction6<number /* = 0 */, K, P1, P2, P3, T, obj[K](P1, P2, P3)>;
// invoker<T, K extends keyof T, P1, P2, P3, P4, R>(len: number /* = 4 */, name: K, x1: P1, x2: P2, x3: P3, x4: P4, obj: T): obj[K](x1, x2, x3, x4);
// // invoker<T, K extends keyof T, P1, P2, P3, P4, R>: CurriedFunction7<number /* = 0 */, K, P1, P2, P3, P4, T, obj[K](P1, P2, P3, P4)>;
// invoker<T, K extends keyof T, P1, P2, P3, P4, P5, R>(len: number /* = 5 */, name: K, x1: P1, x2: P2, x3: P3, x4: P4, x5: P5, obj: T): obj[K](x1, x2, x3, x4, x5);
// // invoker<T, K extends keyof T, P1, P2, P3, P4, P5, R>: CurriedFunction8<number /* = 0 */, K, P1, P2, P3, P4, P5, T, obj[K](P1, P2, P3, P4, P5)>;
// manually type results
invoker<T, R> (len: number /* = 0 */, name: Prop, obj: T): R;
invoker<T, R> (len: number/* = 0 */, name: Prop): (obj: T) => R;
invoker<T, P1, R>(len: number /* = 1 */, name: Prop, x1: P1, obj: T): R;
invoker<T, P1, R>(len: number /* = 1 */, name: Prop): CurriedFunction2<P1, T, R>;
invoker<T, P1, P2, R>(len: number /* = 2 */, name: Prop, x1: P1, x2: P2, obj: T): R;
invoker<T, P1, P2, R>(len: number /* = 2 */, name: Prop): CurriedFunction3<P1, P2, T, R>;
invoker<T, P1, P2, P3, R>(len: number /* = 3 */, name: Prop, x1: P1, x2: P2, x3: P3, obj: T): R;
invoker<T, P1, P2, P3, R>(len: number /* = 3 */, name: Prop): CurriedFunction4<P1, P2, P3, T, R>;
invoker<T, P1, P2, P3, P4, R>(len: number /* = 4 */, name: Prop, x1: P1, x2: P2, x3: P3, x4: P4, obj: T): R;
invoker<T, P1, P2, P3, P4, R>(len: number /* = 4 */, name: Prop): CurriedFunction5<P1, P2, P3, P4, T, R>;
invoker<T, P1, P2, P3, P4, P5, R>(len: number /* = 5 */, name: Prop, x1: P1, x2: P2, x3: P3, x4: P4, x5: P5, obj: T): R;
invoker<T, P1, P2, P3, P4, P5, R>(len: number /* = 5 */, name: Prop): CurriedFunction6<P1, P2, P3, P4, P5, T, R>;
/**
* See if an object (`val`) is an instance of the supplied constructor.
* This function will check up the inheritance chain, if any.
*/
is<T>(ctor: Type<T>, val: any): val is T;
is<T>(ctor: Type<T>): (val: any) => val is T;
// is<T>: CurriedFunction2<T, any, val is T>; // um, val undefined
/**
* Tests whether or not an object is similar to an array.
* @deprecated: 0.23.0
*/
isArrayLike(val: any): val is List<any>;
// isArrayLike(val: any): boolean;
/**
* Reports whether the list has zero elements.
*/
isEmpty(value: any): boolean;
/**
* Returns true if the input value is NaN.
*/
isNaN(x: any): boolean;
/**
* Checks if the input value is null or undefined.
*/
isNil(value: any): boolean;
/**
* Returns a string made by inserting the `separator` between each
* element and concatenating all the elements into a single string.
*/
join(x: Prop, xs: Array<any>): string;
join(x: Prop): (xs: Array<any>) => string;
// join: CurriedFunction2<Prop, Array<any>, string>;
/**
* Applies a list of functions to a list of values.
*/
juxt<T,U>(fns: {(...args: T[]): U}[]): (...args: T[]) => U[];
/**
* Returns a list containing the names of all the enumerable own
* properties of the supplied object.
*/
keys(x: Struct<any>): string[];
/**
* Returns a list containing the names of all the
* properties of the supplied object, including prototype properties.
*/
keysIn(obj: Struct<any>): string[];
/**
* Returns the last element from a list.
*/
last<T, R extends List<T>>(list: R): T;
/**
* Returns the position of the last occurrence of an item (by strict equality) in
* an array, or -1 if the item is not included in the array.
*/
// = indexOf
lastIndexOf<T>(target: T, list: List<T>): number;
lastIndexOf<T>(target: T): (list: List<T>) => number;
// lastIndexOf<T>: CurriedFunction2<T, List<T>, number>;
/**
* Returns the number of elements in the array by returning list.length.
*/
length(list: List<any>): number;
/**
* Returns a lens for the given getter and setter functions. The getter
* "gets" the value of the focus; the setter "sets" the value of the focus.
* The setter should not mutate the data structure.
*/
// hard to mix cuz different generics
// assume setter doesn't change the type
lens<V, U extends Struct<any>>(getter: (s: U) => V, setter: (a: V, s: U) => U): ManualLens<V>;
lens<V, U extends Struct<any>>(getter: (s: U) => V): (setter: (a: V, s: U) => U) => ManualLens<V>;
lens<V>(getter: (s: Struct<any>) => V): <U extends Struct<any>>(setter: (a: V, s: U) => U) => ManualLens<V>;
// ^ ignore getter param being `U` so I can get away with 1 manual generic rather than having to add the inferred `U`. Useful if the getter doesn't have an explicit return type.
// lens<V, U extends Struct<any>>: CurriedFunction2<(s: U) => V, (a: V, s: U) => U, ManualLens<V>>;
// allows setter to change value type
lens<T,U,V>(getter: (s: T) => U, setter: (a: U, s: T) => V): Lens<T,U>;
lens<T,U,V>(getter: (s: T) => U): (setter: (a: U, s: T) => V) => Lens<T,U>;
// lens<T,U,V>: CurriedFunction2<(s: T) => U, (a: U, s: T) => V, Lens<T,U>>;
/**
* Creates a lens that will focus on index n of the source array.
*/
// lensIndex<T, K extends keyof T>(n: K): KeyLens<T, K>;
lensIndex<T>(n: number): ManualLens<T>;
lensIndex(n: number): UnknownLens;
/**
* Returns a lens whose focus is the specified path.
* See also view, set, over.
*/
lensPath<T>(path: Path): ManualLens<T>;
lensPath(path: Path): UnknownLens;
/**
* lensProp creates a lens that will focus on property k of the source object.
*/
// lensProp<T, K extends keyof T>(n: K): KeyLens<T, K>;
lensProp<T>(prop: Prop): ManualLens<T>;
lensProp(prop: Prop): UnknownLens;
/**
* "lifts" a function of arity > 1 so that it may "map over" a list, Function or other object that satisfies
* the FantasyLand Apply spec.
*/
lift<TResult>(fn: () => TResult): () => TResult[];
lift<T1, TResult>(fn: (v1: T1) => TResult): (v1: List<T1>) => TResult[];
lift<T1, T2, TResult>(fn: (v1: T1, v2: T2) => TResult): (v1: List<T1>, v2: List<T2>) => TResult[];
lift<T1, T2, T3, TResult>(fn: (v1: T1, v2: T2, v3: T3) => TResult): (v1: List<T1>, v2: List<T2>, v3: List<T3>) => TResult[];
lift<T1, T2, T3, T4, TResult>(fn: (v1: T1, v2: T2, v3: T3, v4: T4) => TResult): (v1: List<T1>, v2: List<T2>, v3: List<T3>, v4: List<T4>) => TResult[];
lift<T1, T2, T3, T4, T5, TResult>(fn: (v1: T1, v2: T2, v3: T3, v4: T4, v5: T5) => TResult): (v1: List<T1>, v2: List<T2>, v3: List<T3>, v4: List<T4>, v5: List<T5>) => TResult[];
lift<T1, T2, T3, T4, T5, T6, TResult>(fn: (v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6) => TResult): (v1: List<T1>, v2: List<T2>, v3: List<T3>, v4: List<T4>, v5: List<T5>, v6: List<T6>) => TResult[];
lift<T1, T2, T3, T4, T5, T6, T7, TResult>(fn: (v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7) => TResult): (v1: List<T1>, v2: List<T2>, v3: List<T3>, v4: List<T4>, v5: List<T5>, v6: List<T6>, v7: List<T7>) => TResult[];
lift<T1, T2, T3, T4, T5, T6, T7, T8, TResult>(fn: (v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8) => TResult): (v1: List<T1>, v2: List<T2>, v3: List<T3>, v4: List<T4>, v5: List<T5>, v6: List<T6>, v7: List<T7>, v8: List<T8>) => TResult[];
lift<T>(fn: Variadic<T>): (...argLists: any[][]) => T[];
/**
* "lifts" a function to be the specified arity, so that it may "map over" that many lists, Functions or other
* objects that satisfy the FantasyLand Apply spec.
*/
liftN<TResult>(n: number, fn: () => TResult): () => TResult[];
liftN<T1, TResult>(n: number, fn: (v1: T1) => TResult): (v1: List<T1>) => TResult[];
liftN<T1, T2, TResult>(n: number, fn: (v1: T1, v2: T2) => TResult): (v1: List<T1>, v2: List<T2>) => TResult[];
liftN<T1, T2, T3, TResult>(n: number, fn: (v1: T1, v2: T2, v3: T3) => TResult): (v1: List<T1>, v2: List<T2>, v3: List<T3>) => TResult[];
liftN<T1, T2, T3, T4, TResult>(n: number, fn: (v1: T1, v2: T2, v3: T3, v4: T4) => TResult): (v1: List<T1>, v2: List<T2>, v3: List<T3>, v4: List<T4>) => TResult[];
liftN<T1, T2, T3, T4, T5, TResult>(n: number, fn: (v1: T1, v2: T2, v3: T3, v4: T4, v5: T5) => TResult): (v1: List<T1>, v2: List<T2>, v3: List<T3>, v4: List<T4>, v5: List<T5>) => TResult[];
liftN<T1, T2, T3, T4, T5, T6, TResult>(n: number, fn: (v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6) => TResult): (v1: List<T1>, v2: List<T2>, v3: List<T3>, v4: List<T4>, v5: List<T5>, v6: List<T6>) => TResult[];
liftN<T1, T2, T3, T4, T5, T6, T7, TResult>(n: number, fn: (v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7) => TResult): (v1: List<T1>, v2: List<T2>, v3: List<T3>, v4: List<T4>, v5: List<T5>, v6: List<T6>, v7: List<T7>) => TResult[];
liftN<T1, T2, T3, T4, T5, T6, T7, T8, TResult>(n: number, fn: (v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8) => TResult): (v1: List<T1>, v2: List<T2>, v3: List<T3>, v4: List<T4>, v5: List<T5>, v6: List<T6>, v7: List<T7>, v8: List<T8>) => TResult[];
liftN(n: number): {
<TResult>(fn: () => TResult): () => TResult[];
<T1, TResult>(fn: (v1: T1) => TResult): (v1: List<T1>) => TResult[];
<T1, T2, TResult>(fn: (v1: T1, v2: T2) => TResult): (v1: List<T1>, v2: List<T2>) => TResult[];
<T1, T2, T3, TResult>(fn: (v1: T1, v2: T2, v3: T3) => TResult): (v1: List<T1>, v2: List<T2>, v3: List<T3>) => TResult[];
<T1, T2, T3, T4, TResult>(fn: (v1: T1, v2: T2, v3: T3, v4: T4) => TResult): (v1: List<T1>, v2: List<T2>, v3: List<T3>, v4: List<T4>) => TResult[];
<T1, T2, T3, T4, T5, TResult>(fn: (v1: T1, v2: T2, v3: T3, v4: T4, v5: T5) => TResult): (v1: List<T1>, v2: List<T2>, v3: List<T3>, v4: List<T4>, v5: List<T5>) => TResult[];
<T1, T2, T3, T4, T5, T6, TResult>(fn: (v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6) => TResult): (v1: List<T1>, v2: List<T2>, v3: List<T3>, v4: List<T4>, v5: List<T5>, v6: List<T6>) => TResult[];
<T1, T2, T3, T4, T5, T6, T7, TResult>(fn: (v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7) => TResult): (v1: List<T1>, v2: List<T2>, v3: List<T3>, v4: List<T4>, v5: List<T5>, v6: List<T6>, v7: List<T7>) => TResult[];
<T1, T2, T3, T4, T5, T6, T7, T8, TResult>(fn: (v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8) => TResult): (v1: List<T1>, v2: List<T2>, v3: List<T3>, v4: List<T4>, v5: List<T5>, v6: List<T6>, v7: List<T7>, v8: List<T8>) => TResult[];
<T>(fn: Variadic<T>): (...argLists: any[][]) => T[];
};
liftN<T>(n: number, fn: Variadic<T>): (...argLists: any[][]) => T[];
// liftN<T>: CurriedFunction2<number, Variadic<T>, (...argLists: any[][]) => T[]>;
/**
* Returns true if the first parameter is less than the second.
*/
lt(a: number, b: number): boolean;
lt(a: number): (b: number) => boolean;
// lt: CurriedFunction2<number, number, boolean>;
/**
* Returns true if the first parameter is less than or equal to the second.
*/
lte(a: number, b: number): boolean;
lte(a: number): (b: number) => boolean;
// lte: CurriedFunction2<number, number, boolean>;
/**
* Returns a new list, constructed by applying the supplied function to every element of the supplied list.
*/
// homogeneous:
// array-like
map<T, U>(fn: (x: T) => U, list: List<T>): U[];
// map<T, U>(fn: (x: T) => U): (list: List<T>) => U[]; // disabling for mix breaks a few tests?
// map<T, U>: CurriedFunction2<(x: T) => U, List<T>, U[]>;
// object: keyof version
map<T, U, M extends Obj<T>>(fn: (value: T) => U, obj: M): Obj<U>;
map<T, U, M extends Obj<T>>(fn: (value: T) => U, obj: M): Obj<U>;
// map<T, U>(fn: (value: T) => U): <M extends Obj<T>>(obj: M) => {[K in keyof M]: U}; // mix
// map<T, U, M extends Obj<T>>: CurriedFunction2<(value: T) => U, M, {[K in keyof M]: U}>;
// object: Record version
map<T, U, K extends string>(f: (x: T) => U, obj:Obj<T>): Obj<U>;
// map<T, U>(f: (x: T) => U): <K extends string>(obj: Record<K, T>) => Record<K, U>; // mix
// map<T, U, K extends string>: CurriedFunction2<(x: T) => U, Record<K, T>), Record<K, U>>;
// functor
map<T, U>(fn: (x: T) => U, obj: Functor<T>): Functor<U>;
// map<T, U>(fn: (x: T) => U): (obj: Functor<T>) => Functor<U>; // mix
// map<T, U>: CurriedFunction2<(x: T) => U, Functor<T>, Functor<U>>;
// separating values: https://github.com/Microsoft/TypeScript/issues/12342
// map<A,B,T,U>(fn: (a: A) => B, tpl: [T,U]): [ typeof fn(T), typeof fn(U) ];
// obj. version?
// TODO: heterogeneous versions
// array-like
// map<T, U, T1, T2>(fn: (x: T) => U, list: [T1, T2]): [fn(T1), fn(T1)];
// map<F extends Function, T1, T2>(fn: F, list: [T1, T2]): [F(T1), F(T1)];
// map<T, U, T1, T2>(fn: (x: T) => U, list: [T1, T2]): [typeof fn(T1), typeof fn(T1)];
// map<F extends Function, T1, T2>(fn: F, list: [T1, T2]): [typeof F(T1), typeof F(T1)];
// <T1, T2>(list: [T1, T2]): [fn(T1), fn(T1)];
// object
// mixed:
map<T, U>(fn: (x: T) => U): {
<M extends Obj<T>>(obj: M): Obj<U>;
<K extends string>(obj: Obj<T>): Obj<U>;
(obj: Functor<T>): Functor<U>;
(list: List<T>): U[];
};
/**
* The mapAccum function behaves like a combination of map and reduce.
*/
// mapAccum<T, U, TResult>(fn: (acc: U, value: T) => [U, TResult], acc: U, list: List<T>): [U, TResult[]];
// mapAccum<T, U, TResult>(fn: (acc: U, value: T) => [U, TResult], acc: U): (list: List<T>) => [U, TResult[]];
// mapAccum<T, U, TResult>(fn: (acc: U, value: T) => [U, TResult]): CurriedFunction2<U,List<T>,[U, TResult[]]>;
// mapAccum<T, U, TResult>: CurriedFunction3<(acc: U, value: T) => [U, TResult], U, List<T>, [U, TResult[]]>;
// base
mapAccum<T, U, TResult>(fn: (acc: U, value: T) => [U, TResult], acc: U, list: List<T>): [U, TResult[]];
mapAccum<T, U, TResult>(fn: (acc: U, value: T) => [U, TResult], acc: U):{
(list: List<T>): [U, TResult[]];
};
mapAccum<T, U, TResult>(fn: (acc: U, value: T) => [U, TResult]):{
(acc: U, list: List<T>): [U, TResult[]];
(acc: U):{
(list: List<T>): [U, TResult[]];
};
};
/**
* The mapAccumRight function behaves like a combination of map and reduce.
*/
// mapAccumRight<T, U, TResult>(fn: (value: T, acc: U) => [TResult, U], acc: U, list: List<T>): [TResult[], U];
// mapAccumRight<T, U, TResult>(fn: (value: T, acc: U) => [TResult, U], acc: U): (list: List<T>) => [TResult[], U];
// mapAccumRight<T, U, TResult>(fn: (value: T, acc: U) => [TResult, U]): CurriedFunction2<U, List<T>, [TResult[], U]>;
// mapAccumRight<T, U, TResult>: CurriedFunction3<(value: T, acc: U) => [TResult, U], U, List<T>, [TResult[], U]>;
// base
mapAccumRight<T, U, TResult>(fn: (value: T, acc: U) => [TResult, U], acc: U, list: List<T>): [TResult[], U];
mapAccumRight<T, U, TResult>(fn: (value: T, acc: U) => [TResult, U], acc: U):{
(list: List<T>): [TResult[], U];
};
mapAccumRight<T, U, TResult>(fn: (value: T, acc: U) => [TResult, U]):{
(acc: U, list: List<T>): [TResult[], U];
(acc: U):{
(list: List<T>): [TResult[], U];
};
};
/**
* Like map, but but passes additional parameters to the mapping function.
*/
mapIndexed<T, U, V extends List<T>>(fn: (val: T, key: number, list: V) => U, list: V): U[];
mapIndexed<T, U, V extends List<T>>(fn: (val: T, key: number, list: V) => U): (list: V) => U[];
// mapIndexed<T, U, V extends List<T>>: CurriedFunction2<(val: T, key: number, list: V) => U, V, U[]>;
/**
* Like mapObj, but but passes additional arguments to the predicate function.
*/
// hard to mix cuz different generics
// keyof
mapObjIndexed<T, V, M extends Obj<T>>(fn: (value: T, key: string, obj?: M) => V, obj: M): Obj<V>;
mapObjIndexed<T, V, M extends Obj<T>>(fn: (value: T, key: string, obj?: M) => V): (obj: M) => Obj<V>;
// mapObjIndexed<T, V, M extends Obj<T>>: CurriedFunction2<(value: T, key: string, obj?: M) => V, M, {[K in keyof M]: V}>;
// Record
mapObjIndexed<T, U, K extends string>(f: (value: T, key: string, obj?: Record<K, T>) => U, obj: Obj<T>): Obj<U>;
mapObjIndexed<T, U, K extends string>(f: (value: T, key: string, obj?: Record<K, T>) => U): <K extends string>(obj: Obj<T>) => Obj<U>; // potentially overwriting K but whatever
// mapObjIndexed<T, U, K extends string>: CurriedFunction2<(value: T, key: string, obj?: Record<K, T>) => U, Record<K, T>), Record<K, U>>;
/**
* Tests a regular expression agains a String
*/
match(regexp: RegExp, str: string): string[];
match(regexp: RegExp): (str: string) => string[];
// match: CurriedFunction2<RegExp, string, string[]>;
/**
* mathMod behaves like the modulo operator should mathematically, unlike the `%`
* operator (and by extension, R.modulo). So while "-17 % 5" is -2,
* mathMod(-17, 5) is 3. mathMod requires Integer arguments, and returns NaN
* when the modulus is zero or negative.
*/
// |NaN? what's its type?
mathMod(a: number, b: number): number;
mathMod(a: number): (b: number) => number;
// mathMod: CurriedFunction2<number, number, number>;
/**
* Returns the larger of its two arguments.
*/
max<T extends Ord>(a: T, b: T): T;
max<T extends Ord>(a: T): (b: T) => T;
// max<T extends Ord>: CurriedFunction2<T, T, T>;
/**
* Takes a function and two values, and returns whichever value produces
* the larger result when passed to the provided function.
*/
// maxBy<T>(keyFn: (a: T) => Ord, a: T, b: T): T;
// maxBy<T>(keyFn: (a: T) => Ord): CurriedFunction2<T, T, T>;
// maxBy<T>: CurriedFunction3<(a: T) => Ord, T, T, T>;
// base
maxBy<T>(keyFn: (a: T) => Ord, a: T, b: T): T;
maxBy<T>(keyFn: (a: T) => Ord, a: T):{
(b: T): T;
};
maxBy<T>(keyFn: (a: T) => Ord):{
(a: T, b: T): T;
(a: T):{
(b: T): T;
};
};
/**
* Returns the mean of the given list of numbers.
*/
mean(list: List<number>): number;
/**
* Returns the median of the given list of numbers.
*/
median(list: List<number>): number;
/**
* Creates a new function that, when invoked, caches the result of calling fn for a given argument set and
* returns the result. Subsequent calls to the memoized fn with the same argument set will not result in an
* additional call to fn; instead, the cached result for that set of arguments will be returned.
*/
memoize<T>(fn: Variadic<T>): Variadic<T>;
/**
* Create a new object with the own properties of a
* merged with the own properties of object b.
* This function will *not* mutate passed-in objects.
*/
merge<T1 extends Struct<any>, T2 extends Struct<any>>(a: T1, b: T2): T1 & T2;
merge<T1 extends Struct<any>>(a: T1): <T2 extends Struct<any>>(b: T2) => T1 & T2;
// merge<T1 extends Struct<any>, T2 extends Struct<any>>: CurriedFunction2<T1, T2, T1 & T2>;
/**
* Merges a list of objects together into one object.
*/
mergeAll<T>(list: List<any>): T;
/**
* Creates a new object with the own properties of the two provided objects. If a key exists in both objects,
* the provided function is applied to the values associated with the key in each object, with the result being used as
* the value associated with the key in the returned object. The key will be excluded from the returned object if the
* resulting value is undefined.
*/
// mergeWith<U,V>(fn: (x: any, z: any) => any, a: U, b: V): U & V;
// mergeWith<U>(fn: (x: any, z: any) => any, a: U): <V>(b: V) => U & V;
// // mergeWith(fn: (x: any, z: any) => any): <U,V>(a: U, b: V) => U & V;
// mergeWith<U,V>(fn: (x: any, z: any) => any): CurriedFunction2<U,V,U&V>;
// // mergeWith<U,V>: CurriedFunction3<(x: any, z: any) => any, U, V, U & V>;
// base
mergeWith<U, V>(fn: (x: any, z: any) => any, a: U, b: V): U & V;
mergeWith<U>(fn: (x: any, z: any) => any, a: U):{
<V>(b: V): U & V;
};
mergeWith(fn: (x: any, z: any) => any):{
<U, V>(a: U, b: V): U & V;
<U>(a: U):{
<V>(b: V): U & V;
};
};
/**
* Creates a new object with the own properties of the two provided objects. If a key exists in both objects,
* the provided function is applied to the key and the values associated with the key in each object, with the
* result being used as the value associated with the key in the returned object. The key will be excluded from
* the returned object if the resulting value is undefined.
*/
// mergeWithKey<U,V>(fn: (str: string, x: any, z: any) => any, a: U, b: V): U & V;
// mergeWithKey<U>(fn: (str: string, x: any, z: any) => any, a: U): <V>(b: V) => U & V;
// // mergeWithKey(fn: (str: string, x: any, z: any) => any): <U,V>(a: U, b: V) => U & V;
// mergeWithKey<U,V>(fn: (str: string, x: any, z: any) => any): CurriedFunction2<U,V,U&V>;
// // mergeWithKey<U,V>: CurriedFunction3<(str: string, x: any, z: any) => any, U, V, U & V>;
// mergeWithKey
mergeWithKey<U, V>(fn: (str: string, x: any, z: any) => any, a: U, b: V): U & V;
mergeWithKey<U>(fn: (str: string, x: any, z: any) => any, a: U):{
<V>(b: V): U & V;
};
mergeWithKey(fn: (str: string, x: any, z: any) => any):{
<U, V>(a: U, b: V): U & V;
<U>(a: U):{
<V>(b: V): U & V;
};
};
/**
* Returns the smaller of its two arguments.
*/
min<T extends Ord>(a: T, b: T): T;
min<T extends Ord>(a: T): (b: T) => T;
// min<T extends Ord>: CurriedFunction2<T, T, T>;
/**
* Takes a function and two values, and returns whichever value produces
* the smaller result when passed to the provided function.
*/
// minBy<T>(keyFn: (a: T) => Ord, a: T, b: T): T;
// minBy<T>(keyFn: (a: T) => Ord): CurriedFunction2<T, T, T>;
// // minBy<T>: CurriedFunction3<(a: T) => Ord, T, T, T>;
// base
minBy<T>(keyFn: (a: T) => Ord, a: T, b: T): T;
minBy<T>(keyFn: (a: T) => Ord, a: T):{
(b: T): T;
};
minBy<T>(keyFn: (a: T) => Ord):{
(a: T, b: T): T;
(a: T):{
(b: T): T;
};
};
/**
* Divides the second parameter by the first and returns the remainder.
* The flipped version (`moduloBy`) may be more useful curried.
* Note that this functions preserves the JavaScript-style behavior for
* modulo. For mathematical modulo see `mathMod`
*/
modulo(a: number, b: number): number;
modulo(a: number): (b: number) => number;
// modulo: CurriedFunction2<number, number, number>;
/**
* Multiplies two numbers. Equivalent to a * b but curried.
*/
multiply(a: number, b: number): number;
multiply(a: number): (b: number) => number;
// multiply: CurriedFunction2<number, number, number>;
/**
* Wraps a function of any arity (including nullary) in a function that accepts exactly n parameters.
* Any extraneous parameters will not be passed to the supplied function.
*/
nAry<T>(n: number, fn: Variadic<T>): Variadic<T>;
nAry(n: number): <T>(fn: Variadic<T>) => Variadic<T>;
// nAry<T>: CurriedFunction2<number, Variadic<T>, Variadic<T>>;
/**
* Negates its argument.
*/
negate(n: number): number;
/**
* Returns true if no elements of the list match the predicate, false otherwise.
*/
none<T>(fn: (a: T) => boolean, list: List<T>): boolean;
none<T>(fn: (a: T) => boolean): (list: List<T>) => boolean;
// none<T>: CurriedFunction2<(a: T) => boolean, List<T>, boolean>;
/**
* A function wrapping a call to the given function in a `!` operation. It will return `true` when the
* underlying function would return a false-y value, and `false` when it would return a truth-y one.
*/
not(value: any): boolean;
/**
* Returns the nth element in a list.
*/
nth<T>(n: number, list: List<T>): T;
nth(n: number): <T>(list: List<T>) => T;
// nth<T>: CurriedFunction2<number, List<T>, T>;
/**
* Returns a function which returns its nth argument.
*/
nthArg(n: number): <T>(...a: T[]) => T;
/**
* Creates an object containing a single key:value pair.
*/
// Record-based, key intact
objOf<K extends string, V, T extends Obj<V>>(key: K, value: V): T;
objOf<K extends string>(key: K): <V, T extends Obj<V>>(value: V) => T;
// objOf<K extends string, V, T extends Record<K,V>>: CurriedFunction2<K, V, T>;
// // Obj-based, loses key
// objOf<T>(key: Prop, value: T): Obj<T>;
// objOf(key: Prop): <T>(value: T) => Obj<T>;
// // objOf<T>: CurriedFunction2<Prop, T, Obj<T>>;
/**
* Returns a singleton array containing the value provided.
*/
of<T>(x: T): T[];
/**
* Returns a partial copy of an object omitting the keys specified.
*/
omit<T>(names: List<Prop>, obj: T): T;
omit(names: List<Prop>): <T>(obj: T) => T;
// omit<T>: CurriedFunction2<List<Prop>, T, T>;
/**
* Accepts a function fn and returns a function that guards invocation of fn such that fn can only ever be
* called once, no matter how many times the returned function is invoked. The first value calculated is
* returned in subsequent invocations.
*/
once<T>(fn: Variadic<T>): Variadic<T>;
/**
* A function that returns the first truthy of two arguments otherwise the last argument. Note that this is
* NOT short-circuited, meaning that if expressions are passed they are both evaluated.
* Dispatches to the or method of the first argument if applicable.
*/
// hard to mix cuz different generics
// values
or<T, U>(a: T, b: U): T|U;
or<T>(a: T): <U>(b: U) => T|U;
// or<T, U>: CurriedFunction2<T, U, T|U>;
// dispatch to some `or` method:
or<T extends {or?: (alt: U) => T|U;}, U>(fn1: T, val2: U): T|U;
or<T extends {or?: (alt: U) => T|U;}, U>(fn1: T): (val2: U) => T|U;
// or<T extends {or?: (alt: U) => T|U;}, U>: CurriedFunction2<T, U, T|U>;
/**
* Returns the result of "setting" the portion of the given data structure
* focused by the given lens to the given value.
*/
// hard to mix cuz different generics
// key lens:
over<T, K extends string>(lens: UnknownLens, fn: (v: any) => any, value: T): T;
over<T, K extends string>(lens: UnknownLens, fn: (v: any) => any): (value: T) => T;
// over(lens: KeyLens<T,K>): <T, K extends keyof T>(fn: (v: T[K]) => T[K], value: T) => T;
over<T, K extends string>(lens: UnknownLens): CurriedFunction2<(v: any) => any, T, T>;
// over<T, K extends keyof T>: CurriedFunction3<KeyLens<T,K>, (v: T[K]) => T[K], T, T>;
// regular lenses:
// // Functor version:
// over<V, T extends Functor<V>>(lens: Lens<T,V>|ManualLens<V>|UnknownLens, fn: (v: V) => V, value: T): T;
// over<V>(lens: ManualLens<V>|UnknownLens, fn: (v: V) => V): <T extends Functor<V>>(value: T) => T;
// over<V, T extends Functor<V>>(lens: Lens<T,V>|ManualLens<V>|UnknownLens): CurriedFunction2<(v: V) => V, T, T>;
// // over<V, T extends Functor<V>>(lens: Lens<T,V>|ManualLens<V>|UnknownLens): (fn: (v: V) => V, value: T) => T;
// // over<V, T extends Functor<V>>: CurriedFunction3<Lens<T,V>, (v: V) => V, T, T>;
// // Functor version applied to array:
// over<V, T extends List<V>>(lens: Lens<T,V>|ManualLens<V>|UnknownLens, fn: (v: V) => V, value: T): V[];
// over<V, T extends List<V>>(lens: Lens<T,V>|ManualLens<V>|UnknownLens, fn: (v: V) => V): <T>(value: T) => V[];
// over<V, T extends List<V>>(lens: Lens<T,V>|ManualLens<V>|UnknownLens): CurriedFunction2<(v: V) => V, T, V[]>;
// // over<V, T extends List<V>>(lens: Lens<T,V>|ManualLens<V>|UnknownLens): <V>(fn: (v: V) => V, value: T) => V[];
// // over<V, T extends List<V>>: CurriedFunction3<Lens<T,V>|ManualLens<V>|UnknownLens, (v: V) => V, T, V[]>;
// // unbound value:
// over<T,V>(lens: Lens<T,V>|ManualLens<V>|UnknownLens, fn: (v: V) => V, value: T): T;
// over<V>(lens: ManualLens<V>|UnknownLens, fn: (v: V) => V): <T>(value: T) => T;
// // over(lens: UnknownLens): <T,V>(fn: (v: V) => V, value: T) => T;
// over<T,V>(lens: UnknownLens): CurriedFunction2<(v: V) => V, T, T>;
// // over<T,V>: CurriedFunction3<Lens<T,V>, (v: V) => V, T, T>;
// Functor version
over<V, T extends Functor<V>>(lens: Lens<T, V>|ManualLens<V>|UnknownLens, fn: (v: V) => V, value: T): T;
over<V, T extends Functor<V>>(lens: Lens<T, V>|ManualLens<V>|UnknownLens, fn: (v: V) => V):{
(value: T): T;
};
over<V, T extends Functor<V>>(lens: Lens<T, V>|ManualLens<V>|UnknownLens):{
(fn: (v: V) => V, value: T): T;
(fn: (v: V) => V):{
(value: T): T;
};
};
// Functor version applied to array
over<V, T extends List<V>>(lens: Lens<T, V>|ManualLens<V>|UnknownLens, fn: (v: V) => V, value: T): V[];
over<V, T extends List<V>>(lens: Lens<T, V>|ManualLens<V>|UnknownLens, fn: (v: V) => V):{
(value: T): V[];
};
over<V, T extends List<V>>(lens: Lens<T, V>|ManualLens<V>|UnknownLens):{
(fn: (v: V) => V, value: T): V[];
(fn: (v: V) => V):{
(value: T): V[];
};
};
// unbound value
over<T, V>(lens: Lens<T, V>|ManualLens<V>|UnknownLens, fn: (v: V) => V, value: T): T;
over<T, V>(lens: Lens<T, V>|ManualLens<V>|UnknownLens, fn: (v: V) => V):{
(value: T): T;
};
over<T, V>(lens: Lens<T, V>|ManualLens<V>|UnknownLens):{
(fn: (v: V) => V, value: T): T;
(fn: (v: V) => V):{
(value: T): T;
};
};
/**
* Takes two arguments, fst and snd, and returns [fst, snd].
*/
pair<F,S>(fst: F, snd: S): [F, S];
pair<F>(fst: F): <S>(snd: S) => [F, S];
// pair<F,S>: CurriedFunction2<F, S, [F, S]>;
/**
* Accepts as its arguments a function and any number of values and returns a function that,
* when invoked, calls the original function with all of the values prepended to the
* original function's arguments list. In some libraries this function is named `applyLeft`.
*/
partial<T>(fn: Variadic<T>, args: any[]): Variadic<T>;
partial<T>(fn: Variadic<T>): (args: any[]) => Variadic<T>;
// partial<T>: CurriedFunction2<Variadic<T>, args: any[], Variadic<T>>;
// TODO: fixed-arity versions
/**
* Accepts as its arguments a function and any number of values and returns a function that,
* when invoked, calls the original function with all of the values appended to the original
* function's arguments list.
*/
partialRight<T>(fn: Variadic<T>, args: any[]): Variadic<T>;
partialRight<T>(fn: Variadic<T>): (args: any[]) => Variadic<T>;
// partialRight<T>: CurriedFunction2<Variadic<T>, args: any[], Variadic<T>>;
// TODO: fixed-arity versions
/**
* Takes a predicate and a list and returns the pair of lists of elements
* which do and do not satisfy the predicate, respectively.
*/
// arrays
partition<T>(fn: (a: T) => boolean, list: List<T>): [T[], T[]];
partition<T>(fn: (a: T) => boolean): (list: List<T>) => [T[], T[]];
// partition<T>: CurriedFunction2<(a: T) => boolean, List<T>, [T[], T[]]>;
// objects
partition<T extends Obj<V>,U extends Obj<V>,V>(fn: (a: V) => boolean, obj: T & U) : [T,U];
// partition<T extends Obj<V>,U extends Obj<V>,V>: CurriedFunction2<(a: T) => boolean, obj: T & U, [T,U]>;
// objects, alternative notation
partition<T, U extends Obj<T>>(fn: (a: T) => boolean, obj: U) : [Obj<T>, Obj<T>];
// partition<T, U extends Obj<T>>: CurriedFunction2<(a: T) => boolean, U, [Partial<U>,Partial<U>]>;
/**
* Retrieve the value at a given path.
*/
// fixed-length versions
// simpler versions, able to deal only with objects, not arrays:
// in-based
path<T1 extends string, T2 extends string, TResult>(path: [T1, T2], obj: {[K1 in T1]: {[K2 in T2]: TResult}}): TResult;
path<T1 extends string, T2 extends string, T3 extends string, TResult>(path: [T1, T2, T3], obj: {[K1 in T1]: {[K2 in T2]: {[K3 in T3]: TResult}}}): TResult;
path<T1 extends string, T2 extends string, T3 extends string, T4 extends string, TResult>(path: [T1, T2, T3, T4], obj: {[K1 in T1]: {[K2 in T2]: {[K3 in T3]: {[K4 in T4]: TResult}}}}): TResult;
path<T1 extends string, T2 extends string, T3 extends string, T4 extends string, T5 extends string, TResult>(path: [T1, T2, T3, T4, T5], obj: {[K1 in T1]: {[K2 in T2]: {[K3 in T3]: {[K4 in T4]: {[K5 in T5]: TResult}}}}}): TResult;
path<T1 extends string, T2 extends string, T3 extends string, T4 extends string, T5 extends string, T6 extends string, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K1 in T1]: {[K2 in T2]: {[K3 in T3]: {[K4 in T4]: {[K5 in T5]: {[K6 in T6]: TResult}}}}}}): TResult;
path<T1 extends string, T2 extends string, T3 extends string, T4 extends string, T5 extends string, T6 extends string, T7 extends string, TResult>(path: [T1, T2, T3, T4, T5, T6, T7], obj: {[K1 in T1]: {[K2 in T2]: {[K3 in T3]: {[K4 in T4]: {[K5 in T5]: {[K6 in T6]: {[K7 in T7]: TResult}}}}}}}): TResult;
path<T1 extends string, T2 extends string, T3 extends string, T4 extends string, T5 extends string, T6 extends string, T7 extends string, T8 extends string, TResult>(path: [T1, T2, T3, T4, T5, T6, T7, T8], obj: {[K1 in T1]: {[K2 in T2]: {[K3 in T3]: {[K4 in T4]: {[K5 in T5]: {[K6 in T6]: {[K7 in T7]: {[K8 in T8]: TResult}}}}}}}}): TResult;
path<T1 extends string, T2 extends string, T3 extends string, T4 extends string, T5 extends string, T6 extends string, T7 extends string, T8 extends string, T9 extends string, TResult>(path: [T1, T2, T3, T4, T5, T6, T7, T8, T9], obj: {[K1 in T1]: {[K2 in T2]: {[K3 in T3]: {[K4 in T4]: {[K5 in T5]: {[K6 in T6]: {[K7 in T7]: {[K8 in T8]: {[K9 in T9]: TResult}}}}}}}}}): TResult;
// Record-based
path<K1 extends string, K2 extends string, TResult>(path: [K1, K2], obj: Record<K1,Record<K2,TResult>>): TResult;
path<K1 extends string, K2 extends string, K3 extends string, TResult>(path: [K1, K2, K3], obj: Record<K1,Record<K2,Record<K3,TResult>>>): TResult;
path<K1 extends string, K2 extends string, K3 extends string, K4 extends string, TResult>(path: [K1, K2, K3, K4], obj: Record<K1,Record<K2,Record<K3,Record<K4,TResult>>>>): TResult;
path<K1 extends string, K2 extends string, K3 extends string, K4 extends string, K5 extends string, TResult>(path: [K1, K2, K3, K4, K5], obj: Record<K1,Record<K2,Record<K3,Record<K4,Record<K5,TResult>>>>>): TResult;
path<K1 extends string, K2 extends string, K3 extends string, K4 extends string, K5 extends string, K6 extends string, TResult>(path: [K1, K2, K3, K4, K5, K6], obj: Record<K1,Record<K2,Record<K3,Record<K4,Record<K5,Record<K6,TResult>>>>>>): TResult;
path<K1 extends string, K2 extends string, K3 extends string, K4 extends string, K5 extends string, K6 extends string, K7 extends string, TResult>(path: [K1, K2, K3, K4, K5, K6, K7], obj: Record<K1,Record<K2,Record<K3,Record<K4,Record<K5,Record<K6,Record<K7,TResult>>>>>>>): TResult;
path<K1 extends string, K2 extends string, K3 extends string, K4 extends string, K5 extends string, K6 extends string, K7 extends string, K8 extends string, TResult>(path: [K1, K2, K3, K4, K5, K6, K7, K8], obj: Record<K1,Record<K2,Record<K3,Record<K4,Record<K5,Record<K6,Record<K7,Record<K8,TResult>>>>>>>>): TResult;
path<K1 extends string, K2 extends string, K3 extends string, K4 extends string, K5 extends string, K6 extends string, K7 extends string, K8 extends string, K9 extends string, TResult>(path: [K1, K2, K3, K4, K5, K6, K7, K8, K9], obj: Record<K1,Record<K2,Record<K3,Record<K4,Record<K5,Record<K6,Record<K7,Record<K8,Record<K9,TResult>>>>>>>>>): TResult;
// for each path length list all combinations of objects and homogeneous arrays... tuples not supported yet.
path<T1 extends string, TResult>(path: [T1], obj: {[K1 in T1]: TResult}): TResult;
path<T1 extends number, TResult>(path: [T1], obj: TResult[]): TResult;
path<T1 extends string, T2 extends string, TResult>(path: [T1, T2], obj: {[K1 in T1]: {[K2 in T2]: TResult}}): TResult;
path<T1 extends string, T2 extends number, TResult>(path: [T1, T2], obj: {[K1 in T1]: TResult[]}): TResult;
path<T1 extends number, T2 extends string, TResult>(path: [T1, T2], obj: {[K2 in T2]: TResult}[]): TResult;
path<T1 extends number, T2 extends number, TResult>(path: [T1, T2], obj: TResult[][]): TResult;
path<T1 extends string, T2 extends string, T3 extends string, TResult>(path: [T1, T2, T3], obj: {[K1 in T1]: {[K2 in T2]: {[K3 in T3]: TResult}}}): TResult;
path<T1 extends string, T2 extends string, T3 extends number, TResult>(path: [T1, T2, T3], obj: {[K1 in T1]: {[K2 in T2]: TResult[]}}): TResult;
path<T1 extends string, T2 extends number, T3 extends string, TResult>(path: [T1, T2, T3], obj: {[K1 in T1]: {[K3 in T3]: TResult}[]}): TResult;
path<T1 extends string, T2 extends number, T3 extends number, TResult>(path: [T1, T2, T3], obj: {[K1 in T1]: TResult[][]}): TResult;
path<T1 extends number, T2 extends string, T3 extends string, TResult>(path: [T1, T2, T3], obj: {[K2 in T2]: {[K3 in T3]: TResult}}[]): TResult;
path<T1 extends number, T2 extends string, T3 extends number, TResult>(path: [T1, T2, T3], obj: {[K2 in T2]: TResult[]}[]): TResult;
path<T1 extends number, T2 extends number, T3 extends string, TResult>(path: [T1, T2, T3], obj: {[K3 in T3]: TResult}[][]): TResult;
path<T1 extends number, T2 extends number, T3 extends number, TResult>(path: [T1, T2, T3], obj: TResult[][][]): TResult;
// path<T1 extends string, T2 extends string, T3 extends string, T4 extends string, TResult>(path: [T1, T2, T3, T4], obj: {[K1 in T1]: {[K2 in T2]: {[K3 in T3]: {[K4 in T4]: TResult}}}}): TResult;
// path<T1 extends string, T2 extends string, T3 extends string, T4 extends number, TResult>(path: [T1, T2, T3, T4], obj: {[K1 in T1]: {[K2 in T2]: {[K3 in T3]: TResult[]}}}): TResult;
// path<T1 extends string, T2 extends string, T3 extends number, T4 extends string, TResult>(path: [T1, T2, T3, T4], obj: {[K1 in T1]: {[K2 in T2]: {[K4 in T4]: TResult}[]}}): TResult;
// path<T1 extends string, T2 extends string, T3 extends number, T4 extends number, TResult>(path: [T1, T2, T3, T4], obj: {[K1 in T1]: {[K2 in T2]: TResult[][]}}): TResult;
// path<T1 extends string, T2 extends number, T3 extends string, T4 extends string, TResult>(path: [T1, T2, T3, T4], obj: {[K1 in T1]: {[K3 in T3]: {[K4 in T4]: TResult}}[]}): TResult;
// path<T1 extends string, T2 extends number, T3 extends string, T4 extends number, TResult>(path: [T1, T2, T3, T4], obj: {[K1 in T1]: {[K3 in T3]: TResult[]}[]}): TResult;
// path<T1 extends string, T2 extends number, T3 extends number, T4 extends string, TResult>(path: [T1, T2, T3, T4], obj: {[K1 in T1]: {[K4 in T4]: TResult}[][]}): TResult;
// path<T1 extends string, T2 extends number, T3 extends number, T4 extends number, TResult>(path: [T1, T2, T3, T4], obj: {[K1 in T1]: TResult[][][]}): TResult;
// path<T1 extends number, T2 extends string, T3 extends string, T4 extends string, TResult>(path: [T1, T2, T3, T4], obj: {[K2 in T2]: {[K3 in T3]: {[K4 in T4]: TResult}}}[]): TResult;
// path<T1 extends number, T2 extends string, T3 extends string, T4 extends number, TResult>(path: [T1, T2, T3, T4], obj: {[K2 in T2]: {[K3 in T3]: TResult[]}}[]): TResult;
// path<T1 extends number, T2 extends string, T3 extends number, T4 extends string, TResult>(path: [T1, T2, T3, T4], obj: {[K2 in T2]: {[K4 in T4]: TResult}[]}[]): TResult;
// path<T1 extends number, T2 extends string, T3 extends number, T4 extends number, TResult>(path: [T1, T2, T3, T4], obj: {[K2 in T2]: TResult[][]}[]): TResult;
// path<T1 extends number, T2 extends number, T3 extends string, T4 extends string, TResult>(path: [T1, T2, T3, T4], obj: {[K3 in T3]: {[K4 in T4]: TResult}}[][]): TResult;
// path<T1 extends number, T2 extends number, T3 extends string, T4 extends number, TResult>(path: [T1, T2, T3, T4], obj: {[K3 in T3]: TResult[]}[][]): TResult;
// path<T1 extends number, T2 extends number, T3 extends number, T4 extends string, TResult>(path: [T1, T2, T3, T4], obj: {[K4 in T4]: TResult}[][][]): TResult;
// path<T1 extends number, T2 extends number, T3 extends number, T4 extends number, TResult>(path: [T1, T2, T3, T4], obj: TResult[][][][]): TResult;
// path<T1 extends string, T2 extends string, T3 extends string, T4 extends string, T5 extends string, TResult>(path: [T1, T2, T3, T4, T5], obj: {[K1 in T1]: {[K2 in T2]: {[K3 in T3]: {[K4 in T4]: {[K5 in T5]: TResult}}}}}): TResult;
// path<T1 extends string, T2 extends string, T3 extends string, T4 extends string, T5 extends number, TResult>(path: [T1, T2, T3, T4, T5], obj: {[K1 in T1]: {[K2 in T2]: {[K3 in T3]: {[K4 in T4]: TResult[]}}}}): TResult;
// path<T1 extends string, T2 extends string, T3 extends string, T4 extends number, T5 extends string, TResult>(path: [T1, T2, T3, T4, T5], obj: {[K1 in T1]: {[K2 in T2]: {[K3 in T3]: {[K5 in T5]: TResult}[]}}}): TResult;
// path<T1 extends string, T2 extends string, T3 extends string, T4 extends number, T5 extends number, TResult>(path: [T1, T2, T3, T4, T5], obj: {[K1 in T1]: {[K2 in T2]: {[K3 in T3]: TResult[][]}}}): TResult;
// path<T1 extends string, T2 extends string, T3 extends number, T4 extends string, T5 extends string, TResult>(path: [T1, T2, T3, T4, T5], obj: {[K1 in T1]: {[K2 in T2]: {[K4 in T4]: {[K5 in T5]: TResult}}[]}}): TResult;
// path<T1 extends string, T2 extends string, T3 extends number, T4 extends string, T5 extends number, TResult>(path: [T1, T2, T3, T4, T5], obj: {[K1 in T1]: {[K2 in T2]: {[K4 in T4]: TResult[]}[]}}): TResult;
// path<T1 extends string, T2 extends string, T3 extends number, T4 extends number, T5 extends string, TResult>(path: [T1, T2, T3, T4, T5], obj: {[K1 in T1]: {[K2 in T2]: {[K5 in T5]: TResult}[][]}}): TResult;
// path<T1 extends string, T2 extends string, T3 extends number, T4 extends number, T5 extends number, TResult>(path: [T1, T2, T3, T4, T5], obj: {[K1 in T1]: {[K2 in T2]: TResult[][][]}}): TResult;
// path<T1 extends string, T2 extends number, T3 extends string, T4 extends string, T5 extends string, TResult>(path: [T1, T2, T3, T4, T5], obj: {[K1 in T1]: {[K3 in T3]: {[K4 in T4]: {[K5 in T5]: TResult}}}[]}): TResult;
// path<T1 extends string, T2 extends number, T3 extends string, T4 extends string, T5 extends number, TResult>(path: [T1, T2, T3, T4, T5], obj: {[K1 in T1]: {[K3 in T3]: {[K4 in T4]: TResult[]}}[]}): TResult;
// path<T1 extends string, T2 extends number, T3 extends string, T4 extends number, T5 extends string, TResult>(path: [T1, T2, T3, T4, T5], obj: {[K1 in T1]: {[K3 in T3]: {[K5 in T5]: TResult}[]}[]}): TResult;
// path<T1 extends string, T2 extends number, T3 extends string, T4 extends number, T5 extends number, TResult>(path: [T1, T2, T3, T4, T5], obj: {[K1 in T1]: {[K3 in T3]: TResult[][]}[]}): TResult;
// path<T1 extends string, T2 extends number, T3 extends number, T4 extends string, T5 extends string, TResult>(path: [T1, T2, T3, T4, T5], obj: {[K1 in T1]: {[K4 in T4]: {[K5 in T5]: TResult}}[][]}): TResult;
// path<T1 extends string, T2 extends number, T3 extends number, T4 extends string, T5 extends number, TResult>(path: [T1, T2, T3, T4, T5], obj: {[K1 in T1]: {[K4 in T4]: TResult[]}[][]}): TResult;
// path<T1 extends string, T2 extends number, T3 extends number, T4 extends number, T5 extends string, TResult>(path: [T1, T2, T3, T4, T5], obj: {[K1 in T1]: {[K5 in T5]: TResult}[][][]}): TResult;
// path<T1 extends string, T2 extends number, T3 extends number, T4 extends number, T5 extends number, TResult>(path: [T1, T2, T3, T4, T5], obj: {[K1 in T1]: TResult[][][][]}): TResult;
// path<T1 extends number, T2 extends string, T3 extends string, T4 extends string, T5 extends string, TResult>(path: [T1, T2, T3, T4, T5], obj: {[K2 in T2]: {[K3 in T3]: {[K4 in T4]: {[K5 in T5]: TResult}}}}[]): TResult;
// path<T1 extends number, T2 extends string, T3 extends string, T4 extends string, T5 extends number, TResult>(path: [T1, T2, T3, T4, T5], obj: {[K2 in T2]: {[K3 in T3]: {[K4 in T4]: TResult[]}}}[]): TResult;
// path<T1 extends number, T2 extends string, T3 extends string, T4 extends number, T5 extends string, TResult>(path: [T1, T2, T3, T4, T5], obj: {[K2 in T2]: {[K3 in T3]: {[K5 in T5]: TResult}[]}}[]): TResult;
// path<T1 extends number, T2 extends string, T3 extends string, T4 extends number, T5 extends number, TResult>(path: [T1, T2, T3, T4, T5], obj: {[K2 in T2]: {[K3 in T3]: TResult[][]}}[]): TResult;
// path<T1 extends number, T2 extends string, T3 extends number, T4 extends string, T5 extends string, TResult>(path: [T1, T2, T3, T4, T5], obj: {[K2 in T2]: {[K4 in T4]: {[K5 in T5]: TResult}}[]}[]): TResult;
// path<T1 extends number, T2 extends string, T3 extends number, T4 extends string, T5 extends number, TResult>(path: [T1, T2, T3, T4, T5], obj: {[K2 in T2]: {[K4 in T4]: TResult[]}[]}[]): TResult;
// path<T1 extends number, T2 extends string, T3 extends number, T4 extends number, T5 extends string, TResult>(path: [T1, T2, T3, T4, T5], obj: {[K2 in T2]: {[K5 in T5]: TResult}[][]}[]): TResult;
// path<T1 extends number, T2 extends string, T3 extends number, T4 extends number, T5 extends number, TResult>(path: [T1, T2, T3, T4, T5], obj: {[K2 in T2]: TResult[][][]}[]): TResult;
// path<T1 extends number, T2 extends number, T3 extends string, T4 extends string, T5 extends string, TResult>(path: [T1, T2, T3, T4, T5], obj: {[K3 in T3]: {[K4 in T4]: {[K5 in T5]: TResult}}}[][]): TResult;
// path<T1 extends number, T2 extends number, T3 extends string, T4 extends string, T5 extends number, TResult>(path: [T1, T2, T3, T4, T5], obj: {[K3 in T3]: {[K4 in T4]: TResult[]}}[][]): TResult;
// path<T1 extends number, T2 extends number, T3 extends string, T4 extends number, T5 extends string, TResult>(path: [T1, T2, T3, T4, T5], obj: {[K3 in T3]: {[K5 in T5]: TResult}[]}[][]): TResult;
// path<T1 extends number, T2 extends number, T3 extends string, T4 extends number, T5 extends number, TResult>(path: [T1, T2, T3, T4, T5], obj: {[K3 in T3]: TResult[][]}[][]): TResult;
// path<T1 extends number, T2 extends number, T3 extends number, T4 extends string, T5 extends string, TResult>(path: [T1, T2, T3, T4, T5], obj: {[K4 in T4]: {[K5 in T5]: TResult}}[][][]): TResult;
// path<T1 extends number, T2 extends number, T3 extends number, T4 extends string, T5 extends number, TResult>(path: [T1, T2, T3, T4, T5], obj: {[K4 in T4]: TResult[]}[][][]): TResult;
// path<T1 extends number, T2 extends number, T3 extends number, T4 extends number, T5 extends string, TResult>(path: [T1, T2, T3, T4, T5], obj: {[K5 in T5]: TResult}[][][][]): TResult;
// path<T1 extends number, T2 extends number, T3 extends number, T4 extends number, T5 extends number, TResult>(path: [T1, T2, T3, T4, T5], obj: TResult[][][][][]): TResult;
// path<T1 extends string, T2 extends string, T3 extends string, T4 extends string, T5 extends string, T6 extends string, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K1 in T1]: {[K2 in T2]: {[K3 in T3]: {[K4 in T4]: {[K5 in T5]: {[K6 in T6]: TResult}}}}}}): TResult;
// path<T1 extends string, T2 extends string, T3 extends string, T4 extends string, T5 extends string, T6 extends number, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K1 in T1]: {[K2 in T2]: {[K3 in T3]: {[K4 in T4]: {[K5 in T5]: TResult[]}}}}}): TResult;
// path<T1 extends string, T2 extends string, T3 extends string, T4 extends string, T5 extends number, T6 extends string, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K1 in T1]: {[K2 in T2]: {[K3 in T3]: {[K4 in T4]: {[K6 in T6]: TResult}[]}}}}): TResult;
// path<T1 extends string, T2 extends string, T3 extends string, T4 extends string, T5 extends number, T6 extends number, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K1 in T1]: {[K2 in T2]: {[K3 in T3]: {[K4 in T4]: TResult[][]}}}}): TResult;
// path<T1 extends string, T2 extends string, T3 extends string, T4 extends number, T5 extends string, T6 extends string, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K1 in T1]: {[K2 in T2]: {[K3 in T3]: {[K5 in T5]: {[K6 in T6]: TResult}}[]}}}): TResult;
// path<T1 extends string, T2 extends string, T3 extends string, T4 extends number, T5 extends string, T6 extends number, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K1 in T1]: {[K2 in T2]: {[K3 in T3]: {[K5 in T5]: TResult[]}[]}}}): TResult;
// path<T1 extends string, T2 extends string, T3 extends string, T4 extends number, T5 extends number, T6 extends string, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K1 in T1]: {[K2 in T2]: {[K3 in T3]: {[K6 in T6]: TResult}[][]}}}): TResult;
// path<T1 extends string, T2 extends string, T3 extends string, T4 extends number, T5 extends number, T6 extends number, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K1 in T1]: {[K2 in T2]: {[K3 in T3]: TResult[][][]}}}): TResult;
// path<T1 extends string, T2 extends string, T3 extends number, T4 extends string, T5 extends string, T6 extends string, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K1 in T1]: {[K2 in T2]: {[K4 in T4]: {[K5 in T5]: {[K6 in T6]: TResult}}}[]}}): TResult;
// path<T1 extends string, T2 extends string, T3 extends number, T4 extends string, T5 extends string, T6 extends number, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K1 in T1]: {[K2 in T2]: {[K4 in T4]: {[K5 in T5]: TResult[]}}[]}}): TResult;
// path<T1 extends string, T2 extends string, T3 extends number, T4 extends string, T5 extends number, T6 extends string, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K1 in T1]: {[K2 in T2]: {[K4 in T4]: {[K6 in T6]: TResult}[]}[]}}): TResult;
// path<T1 extends string, T2 extends string, T3 extends number, T4 extends string, T5 extends number, T6 extends number, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K1 in T1]: {[K2 in T2]: {[K4 in T4]: TResult[][]}[]}}): TResult;
// path<T1 extends string, T2 extends string, T3 extends number, T4 extends number, T5 extends string, T6 extends string, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K1 in T1]: {[K2 in T2]: {[K5 in T5]: {[K6 in T6]: TResult}}[][]}}): TResult;
// path<T1 extends string, T2 extends string, T3 extends number, T4 extends number, T5 extends string, T6 extends number, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K1 in T1]: {[K2 in T2]: {[K5 in T5]: TResult[]}[][]}}): TResult;
// path<T1 extends string, T2 extends string, T3 extends number, T4 extends number, T5 extends number, T6 extends string, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K1 in T1]: {[K2 in T2]: {[K6 in T6]: TResult}[][][]}}): TResult;
// path<T1 extends string, T2 extends string, T3 extends number, T4 extends number, T5 extends number, T6 extends number, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K1 in T1]: {[K2 in T2]: TResult[][][][]}}): TResult;
// path<T1 extends string, T2 extends number, T3 extends string, T4 extends string, T5 extends string, T6 extends string, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K1 in T1]: {[K3 in T3]: {[K4 in T4]: {[K5 in T5]: {[K6 in T6]: TResult}}}}[]}): TResult;
// path<T1 extends string, T2 extends number, T3 extends string, T4 extends string, T5 extends string, T6 extends number, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K1 in T1]: {[K3 in T3]: {[K4 in T4]: {[K5 in T5]: TResult[]}}}[]}): TResult;
// path<T1 extends string, T2 extends number, T3 extends string, T4 extends string, T5 extends number, T6 extends string, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K1 in T1]: {[K3 in T3]: {[K4 in T4]: {[K6 in T6]: TResult}[]}}[]}): TResult;
// path<T1 extends string, T2 extends number, T3 extends string, T4 extends string, T5 extends number, T6 extends number, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K1 in T1]: {[K3 in T3]: {[K4 in T4]: TResult[][]}}[]}): TResult;
// path<T1 extends string, T2 extends number, T3 extends string, T4 extends number, T5 extends string, T6 extends string, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K1 in T1]: {[K3 in T3]: {[K5 in T5]: {[K6 in T6]: TResult}}[]}[]}): TResult;
// path<T1 extends string, T2 extends number, T3 extends string, T4 extends number, T5 extends string, T6 extends number, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K1 in T1]: {[K3 in T3]: {[K5 in T5]: TResult[]}[]}[]}): TResult;
// path<T1 extends string, T2 extends number, T3 extends string, T4 extends number, T5 extends number, T6 extends string, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K1 in T1]: {[K3 in T3]: {[K6 in T6]: TResult}[][]}[]}): TResult;
// path<T1 extends string, T2 extends number, T3 extends string, T4 extends number, T5 extends number, T6 extends number, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K1 in T1]: {[K3 in T3]: TResult[][][]}[]}): TResult;
// path<T1 extends string, T2 extends number, T3 extends number, T4 extends string, T5 extends string, T6 extends string, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K1 in T1]: {[K4 in T4]: {[K5 in T5]: {[K6 in T6]: TResult}}}[][]}): TResult;
// path<T1 extends string, T2 extends number, T3 extends number, T4 extends string, T5 extends string, T6 extends number, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K1 in T1]: {[K4 in T4]: {[K5 in T5]: TResult[]}}[][]}): TResult;
// path<T1 extends string, T2 extends number, T3 extends number, T4 extends string, T5 extends number, T6 extends string, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K1 in T1]: {[K4 in T4]: {[K6 in T6]: TResult}[]}[][]}): TResult;
// path<T1 extends string, T2 extends number, T3 extends number, T4 extends string, T5 extends number, T6 extends number, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K1 in T1]: {[K4 in T4]: TResult[][]}[][]}): TResult;
// path<T1 extends string, T2 extends number, T3 extends number, T4 extends number, T5 extends string, T6 extends string, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K1 in T1]: {[K5 in T5]: {[K6 in T6]: TResult}}[][][]}): TResult;
// path<T1 extends string, T2 extends number, T3 extends number, T4 extends number, T5 extends string, T6 extends number, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K1 in T1]: {[K5 in T5]: TResult[]}[][][]}): TResult;
// path<T1 extends string, T2 extends number, T3 extends number, T4 extends number, T5 extends number, T6 extends string, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K1 in T1]: {[K6 in T6]: TResult}[][][][]}): TResult;
// path<T1 extends string, T2 extends number, T3 extends number, T4 extends number, T5 extends number, T6 extends number, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K1 in T1]: TResult[][][][][]}): TResult;
// path<T1 extends number, T2 extends string, T3 extends string, T4 extends string, T5 extends string, T6 extends string, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K2 in T2]: {[K3 in T3]: {[K4 in T4]: {[K5 in T5]: {[K6 in T6]: TResult}}}}}[]): TResult;
// path<T1 extends number, T2 extends string, T3 extends string, T4 extends string, T5 extends string, T6 extends number, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K2 in T2]: {[K3 in T3]: {[K4 in T4]: {[K5 in T5]: TResult[]}}}}[]): TResult;
// path<T1 extends number, T2 extends string, T3 extends string, T4 extends string, T5 extends number, T6 extends string, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K2 in T2]: {[K3 in T3]: {[K4 in T4]: {[K6 in T6]: TResult}[]}}}[]): TResult;
// path<T1 extends number, T2 extends string, T3 extends string, T4 extends string, T5 extends number, T6 extends number, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K2 in T2]: {[K3 in T3]: {[K4 in T4]: TResult[][]}}}[]): TResult;
// path<T1 extends number, T2 extends string, T3 extends string, T4 extends number, T5 extends string, T6 extends string, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K2 in T2]: {[K3 in T3]: {[K5 in T5]: {[K6 in T6]: TResult}}[]}}[]): TResult;
// path<T1 extends number, T2 extends string, T3 extends string, T4 extends number, T5 extends string, T6 extends number, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K2 in T2]: {[K3 in T3]: {[K5 in T5]: TResult[]}[]}}[]): TResult;
// path<T1 extends number, T2 extends string, T3 extends string, T4 extends number, T5 extends number, T6 extends string, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K2 in T2]: {[K3 in T3]: {[K6 in T6]: TResult}[][]}}[]): TResult;
// path<T1 extends number, T2 extends string, T3 extends string, T4 extends number, T5 extends number, T6 extends number, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K2 in T2]: {[K3 in T3]: TResult[][][]}}[]): TResult;
// path<T1 extends number, T2 extends string, T3 extends number, T4 extends string, T5 extends string, T6 extends string, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K2 in T2]: {[K4 in T4]: {[K5 in T5]: {[K6 in T6]: TResult}}}[]}[]): TResult;
// path<T1 extends number, T2 extends string, T3 extends number, T4 extends string, T5 extends string, T6 extends number, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K2 in T2]: {[K4 in T4]: {[K5 in T5]: TResult[]}}[]}[]): TResult;
// path<T1 extends number, T2 extends string, T3 extends number, T4 extends string, T5 extends number, T6 extends string, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K2 in T2]: {[K4 in T4]: {[K6 in T6]: TResult}[]}[]}[]): TResult;
// path<T1 extends number, T2 extends string, T3 extends number, T4 extends string, T5 extends number, T6 extends number, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K2 in T2]: {[K4 in T4]: TResult[][]}[]}[]): TResult;
// path<T1 extends number, T2 extends string, T3 extends number, T4 extends number, T5 extends string, T6 extends string, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K2 in T2]: {[K5 in T5]: {[K6 in T6]: TResult}}[][]}[]): TResult;
// path<T1 extends number, T2 extends string, T3 extends number, T4 extends number, T5 extends string, T6 extends number, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K2 in T2]: {[K5 in T5]: TResult[]}[][]}[]): TResult;
// path<T1 extends number, T2 extends string, T3 extends number, T4 extends number, T5 extends number, T6 extends string, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K2 in T2]: {[K6 in T6]: TResult}[][][]}[]): TResult;
// path<T1 extends number, T2 extends string, T3 extends number, T4 extends number, T5 extends number, T6 extends number, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K2 in T2]: TResult[][][][]}[]): TResult;
// path<T1 extends number, T2 extends number, T3 extends string, T4 extends string, T5 extends string, T6 extends string, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K3 in T3]: {[K4 in T4]: {[K5 in T5]: {[K6 in T6]: TResult}}}}[][]): TResult;
// path<T1 extends number, T2 extends number, T3 extends string, T4 extends string, T5 extends string, T6 extends number, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K3 in T3]: {[K4 in T4]: {[K5 in T5]: TResult[]}}}[][]): TResult;
// path<T1 extends number, T2 extends number, T3 extends string, T4 extends string, T5 extends number, T6 extends string, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K3 in T3]: {[K4 in T4]: {[K6 in T6]: TResult}[]}}[][]): TResult;
// path<T1 extends number, T2 extends number, T3 extends string, T4 extends string, T5 extends number, T6 extends number, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K3 in T3]: {[K4 in T4]: TResult[][]}}[][]): TResult;
// path<T1 extends number, T2 extends number, T3 extends string, T4 extends number, T5 extends string, T6 extends string, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K3 in T3]: {[K5 in T5]: {[K6 in T6]: TResult}}[]}[][]): TResult;
// path<T1 extends number, T2 extends number, T3 extends string, T4 extends number, T5 extends string, T6 extends number, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K3 in T3]: {[K5 in T5]: TResult[]}[]}[][]): TResult;
// path<T1 extends number, T2 extends number, T3 extends string, T4 extends number, T5 extends number, T6 extends string, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K3 in T3]: {[K6 in T6]: TResult}[][]}[][]): TResult;
// path<T1 extends number, T2 extends number, T3 extends string, T4 extends number, T5 extends number, T6 extends number, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K3 in T3]: TResult[][][]}[][]): TResult;
// path<T1 extends number, T2 extends number, T3 extends number, T4 extends string, T5 extends string, T6 extends string, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K4 in T4]: {[K5 in T5]: {[K6 in T6]: TResult}}}[][][]): TResult;
// path<T1 extends number, T2 extends number, T3 extends number, T4 extends string, T5 extends string, T6 extends number, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K4 in T4]: {[K5 in T5]: TResult[]}}[][][]): TResult;
// path<T1 extends number, T2 extends number, T3 extends number, T4 extends string, T5 extends number, T6 extends string, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K4 in T4]: {[K6 in T6]: TResult}[]}[][][]): TResult;
// path<T1 extends number, T2 extends number, T3 extends number, T4 extends string, T5 extends number, T6 extends number, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K4 in T4]: TResult[][]}[][][]): TResult;
// path<T1 extends number, T2 extends number, T3 extends number, T4 extends number, T5 extends string, T6 extends string, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K5 in T5]: {[K6 in T6]: TResult}}[][][][]): TResult;
// path<T1 extends number, T2 extends number, T3 extends number, T4 extends number, T5 extends string, T6 extends number, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K5 in T5]: TResult[]}[][][][]): TResult;
// path<T1 extends number, T2 extends number, T3 extends number, T4 extends number, T5 extends number, T6 extends string, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: {[K6 in T6]: TResult}[][][][][]): TResult;
// path<T1 extends number, T2 extends number, T3 extends number, T4 extends number, T5 extends number, T6 extends number, TResult>(path: [T1, T2, T3, T4, T5, T6], obj: TResult[][][][][][]): TResult;
// fallback, prevents errors but lacks inference; expected result must be supplied manually.
path<T>(path: Path, obj: Struct<any>): T;
path(path: Path): <T>(obj: Struct<any>) => T;
// path<T>: CurriedFunction2<Path, Struct<any>, T>;
// failed attempt at proper typing, see https://github.com/Microsoft/TypeScript/issues/12393 :
// path<U, K1 extends keyof T, K2 extends keyof T[K1], T extends { [K1]: { [K2]: U } }>(keys: [K1, K2], obj: T): U;
// path<K1 extends keyof T, K2 extends keyof T[K1], T extends {}>(keys: [K1, K2], obj: T): T[K1][K2];
/**
* Determines whether a nested path on an object has a specific value,
* in `R.equals` terms. Most likely used to filter a list.
*/
// pathEq(path: Path, val: any, obj: Struct<any>): boolean;
// pathEq(path: Path, val: any): (obj: Struct<any>) => boolean;
// pathEq(path: Path): CurriedFunction2<any, Struct<any>, boolean>;
// // pathEq: CurriedFunction3<Path, any, Struct<any>, boolean>;
// base
pathEq(p: Path, v: any, o: any): boolean;
pathEq(p: Path, v: any): {
(o: any): boolean;
};
pathEq(p: Path):{
(v: any, o: any): boolean;
(v: any):{
(o: any): boolean;
};
};
/**
* If the given, non-null object has a value at the given path, returns the value at that path.
* Otherwise returns the provided default value.
*/
// pathOr<T>(d: T, p: Path, obj: Struct<any>): T|any;
// pathOr<T>(d: T, p: Path): (obj: Struct<any>) => T|any;
// pathOr<T>(d: T): CurriedFunction2<Path, Struct<any>, T|any>;
// // pathOr<T>(d: T, p: Path): (obj: Struct<any>) => T|any;
// // pathOr<T>(d: T): (p: Path, obj: Struct<any>) => T|any;
// // pathOr<T>: CurriedFunction3<T, Path, Struct<any>, T|any>;
// base
pathOr<T>(d: T, p: Path, obj: Struct<any>): T|any;
pathOr<T>(d: T, p: Path):{
(obj: Struct<any>): T|any;
};
pathOr<T>(d: T):{
(p: Path, obj: Struct<any>): T|any;
(p: Path):{
(obj: Struct<any>): T|any;
};
};
/**
* Returns `true` if the specified object property at given path satisfies the given predicate; `false`
* otherwise.
*/
// pathSatisfies<T>(fn: Pred<T>, p: Path, obj: any): boolean;
// pathSatisfies<T>(fn: Pred<T>, p: Path): (obj: any) => boolean;
// pathSatisfies<T>(fn: Pred<T>): CurriedFunction2<Path, any, boolean>;
// // pathSatisfies<T>: CurriedFunction3<Pred<T>, Path, any, boolean>;
// base
pathSatisfies<T>(fn: Pred<T>, p: Path, obj: any): boolean;
pathSatisfies<T>(fn: Pred<T>, p: Path):{
(obj: any): boolean;
};
pathSatisfies<T>(fn: Pred<T>):{
(p: Path, obj: any): boolean;
(p: Path):{
(obj: any): boolean;
};
};
/**
* Returns a partial copy of an object containing only the keys specified. If the key does not exist, the
* property is ignored.
*/
pick<T, K extends Prop>(names: List<K>, obj: T): T;
pick<T, K extends Prop>(names: List<K>): (obj: T) => T;
// pick<T, K extends keyof T>: CurriedFunction2<List<K>, T, Pick<T, K>>;
// pick<T>(names: List<Prop>, obj: T): Partial<T>;
// pick<T>(names: List<Prop>): (obj: T) => Partial<T>;
// // pick<T>: CurriedFunction2<List<Prop>, T, Partial<T>>;
// /**
// * Similar to `pick` except that this one includes a `key: undefined` pair for properties that don't exist.
// */
// pickAll<T, K /*extends keyof T*/>(names: List<K>, obj: T): Partial<T>;
// pickAll<T, K /*extends keyof T*/>(names: List<K>): (obj: T) => Partial<T>;
// // pickAll<T, K /*extends keyof T*/>: CurriedFunction2<List<K>, T, Partial<T>>;
// /**
// * Returns a partial copy of an object containing only the keys that satisfy the supplied predicate.
// */
// pickBy<T>(pred: ObjPred<any>, obj: T): Partial<T>;
// pickBy(pred: ObjPred<any>): <T>(obj: T) => Partial<T>;
// // pickBy<T>: CurriedFunction2<ObjPred<any>, T, Partial<T>>;
/**
* Performs left-to-right function composition.
* The leftmost function may have any arity; the remaining functions must be unary.
* In some libraries this function is named sequence.
* Note: The result of pipe is not automatically curried.
*/
pipe<V0, T1>(fn0: (x0: V0) => T1): (x0: V0) => T1;
pipe<V0, V1, T1>(fn0: (x0: V0, x1: V1) => T1): (x0: V0, x1: V1) => T1;
pipe<V0, V1, V2, T1>(fn0: (x0: V0, x1: V1, x2: V2) => T1): (x0: V0, x1: V1, x2: V2) => T1;
pipe<V0, V1, V2, V3, T1>(fn0: (x0: V0, x1: V1, x2: V2, x3: V3) => T1): (x0: V0, x1: V1, x2: V2, x3: V3) => T1;
pipe<V0, T1, T2>(fn0: (x0: V0) => T1, fn1: (x: T1) => T2): (x0: V0) => T2;
pipe<V0, V1, T1, T2>(fn0: (x0: V0, x1: V1) => T1, fn1: (x: T1) => T2): (x0: V0, x1: V1) => T2;
pipe<V0, V1, V2, T1, T2>(fn0: (x0: V0, x1: V1, x2: V2) => T1, fn1: (x: T1) => T2): (x0: V0, x1: V1, x2: V2) => T2;
pipe<V0, V1, V2, V3, T1, T2>(fn0: (x0: V0, x1: V1, x2: V2, x3: V3) => T1, fn1: (x: T1) => T2): (x0: V0, x1: V1, x2: V2, x3: V3) => T2;
pipe<V0, T1, T2, T3>(fn0: (x0: V0) => T1, fn1: (x: T1) => T2, fn2: (x: T2) => T3): (x0: V0) => T3;
pipe<V0, V1, T1, T2, T3>(fn0: (x0: V0, x1: V1) => T1, fn1: (x: T1) => T2, fn2: (x: T2) => T3): (x0: V0, x1: V1) => T3;
pipe<V0, V1, V2, T1, T2, T3>(fn0: (x0: V0, x1: V1, x2: V2) => T1, fn1: (x: T1) => T2, fn2: (x: T2) => T3): (x0: V0, x1: V1, x2: V2) => T3;
pipe<V0, V1, V2, V3, T1, T2, T3>(fn0: (x0: V0, x1: V1, x2: V2, x3: V3) => T1, fn1: (x: T1) => T2, fn2: (x: T2) => T3): (x0: V0, x1: V1, x2: V2, x3: V3) => T3;
pipe<V0, T1, T2, T3, T4>(fn0: (x0: V0) => T1, fn1: (x: T1) => T2, fn2: (x: T2) => T3, fn3: (x: T3) => T4): (x0: V0) => T4;
pipe<V0, V1, T1, T2, T3, T4>(fn0: (x0: V0, x1: V1) => T1, fn1: (x: T1) => T2, fn2: (x: T2) => T3, fn3: (x: T3) => T4): (x0: V0, x1: V1) => T4;
pipe<V0, V1, V2, T1, T2, T3, T4>(fn0: (x0: V0, x1: V1, x2: V2) => T1, fn1: (x: T1) => T2, fn2: (x: T2) => T3, fn3: (x: T3) => T4): (x0: V0, x1: V1, x2: V2) => T4;
pipe<V0, V1, V2, V3, T1, T2, T3, T4>(fn0: (x0: V0, x1: V1, x2: V2, x3: V3) => T1, fn1: (x: T1) => T2, fn2: (x: T2) => T3, fn3: (x: T3) => T4): (x0: V0, x1: V1, x2: V2, x3: V3) => T4;
pipe<V0, T1, T2, T3, T4, T5>(fn0: (x0: V0) => T1, fn1: (x: T1) => T2, fn2: (x: T2) => T3, fn3: (x: T3) => T4, fn4: (x: T4) => T5): (x0: V0) => T5;
pipe<V0, V1, T1, T2, T3, T4, T5>(fn0: (x0: V0, x1: V1) => T1, fn1: (x: T1) => T2, fn2: (x: T2) => T3, fn3: (x: T3) => T4, fn4: (x: T4) => T5): (x0: V0, x1: V1) => T5;
pipe<V0, V1, V2, T1, T2, T3, T4, T5>(fn0: (x0: V0, x1: V1, x2: V2) => T1, fn1: (x: T1) => T2, fn2: (x: T2) => T3, fn3: (x: T3) => T4, fn4: (x: T4) => T5): (x0: V0, x1: V1, x2: V2) => T5;
pipe<V0, V1, V2, V3, T1, T2, T3, T4, T5>(fn0: (x0: V0, x1: V1, x2: V2, x3: V3) => T1, fn1: (x: T1) => T2, fn2: (x: T2) => T3, fn3: (x: T3) => T4, fn4: (x: T4) => T5): (x0: V0, x1: V1, x2: V2, x3: V3) => T5;
pipe<V0, T1, T2, T3, T4, T5, T6>(fn0: (x0: V0) => T1, fn1: (x: T1) => T2, fn2: (x: T2) => T3, fn3: (x: T3) => T4, fn4: (x: T4) => T5, fn5: (x: T5) => T6): (x0: V0) => T6;
pipe<V0, V1, T1, T2, T3, T4, T5, T6>(fn0: (x0: V0, x1: V1) => T1, fn1: (x: T1) => T2, fn2: (x: T2) => T3, fn3: (x: T3) => T4, fn4: (x: T4) => T5, fn5: (x: T5) => T6): (x0: V0, x1: V1) => T6;
pipe<V0, V1, V2, T1, T2, T3, T4, T5, T6>(fn0: (x0: V0, x1: V1, x2: V2) => T1, fn1: (x: T1) => T2, fn2: (x: T2) => T3, fn3: (x: T3) => T4, fn4: (x: T4) => T5, fn5: (x: T5) => T6): (x0: V0, x1: V1, x2: V2) => T6;
pipe<V0, V1, V2, V3, T1, T2, T3, T4, T5, T6>(fn0: (x0: V0, x1: V1, x2: V2, x3: V3) => T1, fn1: (x: T1) => T2, fn2: (x: T2) => T3, fn3: (x: T3) => T4, fn4: (x: T4) => T5, fn5: (x: T5) => T6): (x0: V0, x1: V1, x2: V2, x3: V3) => T6;
pipe<V0, T1, T2, T3, T4, T5, T6, T7>(fn0: (x0: V0) => T1, fn1: (x: T1) => T2, fn2: (x: T2) => T3, fn3: (x: T3) => T4, fn4: (x: T4) => T5, fn5: (x: T5) => T6, fn6: (x: T6) => T7): (x0: V0) => T7;
pipe<V0, V1, T1, T2, T3, T4, T5, T6, T7>(fn0: (x0: V0, x1: V1) => T1, fn1: (x: T1) => T2, fn2: (x: T2) => T3, fn3: (x: T3) => T4, fn4: (x: T4) => T5, fn5: (x: T5) => T6, fn6: (x: T6) => T7): (x0: V0, x1: V1) => T7;
pipe<V0, V1, V2, T1, T2, T3, T4, T5, T6, T7>(fn0: (x0: V0, x1: V1, x2: V2) => T1, fn1: (x: T1) => T2, fn2: (x: T2) => T3, fn3: (x: T3) => T4, fn4: (x: T4) => T5, fn5: (x: T5) => T6, fn6: (x: T6) => T7): (x0: V0, x1: V1, x2: V2) => T7;
pipe<V0, V1, V2, V3, T1, T2, T3, T4, T5, T6, T7>(fn0: (x0: V0, x1: V1, x2: V2, x3: V3) => T1, fn1: (x: T1) => T2, fn2: (x: T2) => T3, fn3: (x: T3) => T4, fn4: (x: T4) => T5, fn5: (x: T5) => T6, fn6: (x: T6) => T7): (x0: V0, x1: V1, x2: V2, x3: V3) => T7;
pipe<V0, T1, T2, T3, T4, T5, T6, T7, T8>(fn0: (x0: V0) => T1, fn1: (x: T1) => T2, fn2: (x: T2) => T3, fn3: (x: T3) => T4, fn4: (x: T4) => T5, fn5: (x: T5) => T6, fn6: (x: T6) => T7, fn7: (x: T7) => T8): (x0: V0) => T8;
pipe<V0, V1, T1, T2, T3, T4, T5, T6, T7, T8>(fn0: (x0: V0, x1: V1) => T1, fn1: (x: T1) => T2, fn2: (x: T2) => T3, fn3: (x: T3) => T4, fn4: (x: T4) => T5, fn5: (x: T5) => T6, fn6: (x: T6) => T7, fn7: (x: T7) => T8): (x0: V0, x1: V1) => T8;
pipe<V0, V1, V2, T1, T2, T3, T4, T5, T6, T7, T8>(fn0: (x0: V0, x1: V1, x2: V2) => T1, fn1: (x: T1) => T2, fn2: (x: T2) => T3, fn3: (x: T3) => T4, fn4: (x: T4) => T5, fn5: (x: T5) => T6, fn6: (x: T6) => T7, fn7: (x: T7) => T8): (x0: V0, x1: V1, x2: V2) => T8;
pipe<V0, V1, V2, V3, T1, T2, T3, T4, T5, T6, T7, T8>(fn0: (x0: V0, x1: V1, x2: V2, x3: V3) => T1, fn1: (x: T1) => T2, fn2: (x: T2) => T3, fn3: (x: T3) => T4, fn4: (x: T4) => T5, fn5: (x: T5) => T6, fn6: (x: T6) => T7, fn7: (x: T7) => T8): (x0: V0, x1: V1, x2: V2, x3: V3) => T8;
pipe<V0, T1, T2, T3, T4, T5, T6, T7, T8, T9>(fn0: (x0: V0) => T1, fn1: (x: T1) => T2, fn2: (x: T2) => T3, fn3: (x: T3) => T4, fn4: (x: T4) => T5, fn5: (x: T5) => T6, fn6: (x: T6) => T7, fn7: (x: T7) => T8, fn8: (x: T8) => T9): (x0: V0) => T9;
pipe<V0, V1, T1, T2, T3, T4, T5, T6, T7, T8, T9>(fn0: (x0: V0, x1: V1) => T1, fn1: (x: T1) => T2, fn2: (x: T2) => T3, fn3: (x: T3) => T4, fn4: (x: T4) => T5, fn5: (x: T5) => T6, fn6: (x: T6) => T7, fn7: (x: T7) => T8, fn8: (x: T8) => T9): (x0: V0, x1: V1) => T9;
pipe<V0, V1, V2, T1, T2, T3, T4, T5, T6, T7, T8, T9>(fn0: (x0: V0, x1: V1, x2: V2) => T1, fn1: (x: T1) => T2, fn2: (x: T2) => T3, fn3: (x: T3) => T4, fn4: (x: T4) => T5, fn5: (x: T5) => T6, fn6: (x: T6) => T7, fn7: (x: T7) => T8, fn8: (x: T8) => T9): (x0: V0, x1: V1, x2: V2) => T9;
pipe<V0, V1, V2, V3, T1, T2, T3, T4, T5, T6, T7, T8, T9>(fn0: (x0: V0, x1: V1, x2: V2, x3: V3) => T1, fn1: (x: T1) => T2, fn2: (x: T2) => T3, fn3: (x: T3) => T4, fn4: (x: T4) => T5, fn5: (x: T5) => T6, fn6: (x: T6) => T7, fn7: (x: T7) => T8, fn8: (x: T8) => T9): (x0: V0, x1: V1, x2: V2, x3: V3) => T9;
/**
* Performs left-to-right composition of one or more Promise-returning functions. The leftmost function may have any arity; the remaining functions must be unary.
*/
pipeP<V0, T1>(fn0: (x0: V0) => Promise<T1>): (x0: V0) => Promise<T1>;
pipeP<V0, V1, T1>(fn0: (x0: V0, x1: V1) => Promise<T1>): (x0: V0, x1: V1) => Promise<T1>;
pipeP<V0, V1, V2, T1>(fn0: (x0: V0, x1: V1, x2: V2) => Promise<T1>): (x0: V0, x1: V1, x2: V2) => Promise<T1>;
pipeP<V0, V1, V2, V3, T1>(fn0: (x0: V0, x1: V1, x2: V2, x3: V3) => Promise<T1>): (x0: V0, x1: V1, x2: V2, x3: V3) => Promise<T1>;
pipeP<V0, T1, T2>(fn0: (x0: V0) => Promise<T1>, fn1: (x: T1) => Promise<T2>|T2): (x0: V0) => Promise<T2>;
pipeP<V0, V1, T1, T2>(fn0: (x0: V0, x1: V1) => Promise<T1>, fn1: (x: T1) => Promise<T2>|T2): (x0: V0, x1: V1) => Promise<T2>;
pipeP<V0, V1, V2, T1, T2>(fn0: (x0: V0, x1: V1, x2: V2) => Promise<T1>, fn1: (x: T1) => Promise<T2>|T2): (x0: V0, x1: V1, x2: V2) => Promise<T2>;
pipeP<V0, V1, V2, V3, T1, T2>(fn0: (x0: V0, x1: V1, x2: V2, x3: V3) => Promise<T1>, fn1: (x: T1) => Promise<T2>|T2): (x0: V0, x1: V1, x2: V2, x3: V3) => Promise<T2>;
pipeP<V0, T1, T2, T3>(fn0: (x0: V0) => Promise<T1>, fn1: (x: T1) => Promise<T2>|T2, fn2: (x: T2) => Promise<T3>|T3): (x0: V0) => Promise<T3>;
pipeP<V0, V1, T1, T2, T3>(fn0: (x0: V0, x1: V1) => Promise<T1>, fn1: (x: T1) => Promise<T2>|T2, fn2: (x: T2) => Promise<T3>|T3): (x0: V0, x1: V1) => Promise<T3>;
pipeP<V0, V1, V2, T1, T2, T3>(fn0: (x0: V0, x1: V1, x2: V2) => Promise<T1>, fn1: (x: T1) => Promise<T2>|T2, fn2: (x: T2) => Promise<T3>|T3): (x0: V0, x1: V1, x2: V2) => Promise<T3>;
pipeP<V0, V1, V2, V3, T1, T2, T3>(fn0: (x0: V0, x1: V1, x2: V2, x3: V3) => Promise<T1>, fn1: (x: T1) => Promise<T2>|T2, fn2: (x: T2) => Promise<T3>|T3): (x0: V0, x1: V1, x2: V2, x3: V3) => Promise<T3>;
pipeP<V0, T1, T2, T3, T4>(fn0: (x0: V0) => Promise<T1>, fn1: (x: T1) => Promise<T2>|T2, fn2: (x: T2) => Promise<T3>|T3, fn3: (x: T3) => Promise<T4>|T4): (x0: V0) => Promise<T4>;
pipeP<V0, V1, T1, T2, T3, T4>(fn0: (x0: V0, x1: V1) => Promise<T1>, fn1: (x: T1) => Promise<T2>|T2, fn2: (x: T2) => Promise<T3>|T3, fn3: (x: T3) => Promise<T4>|T4): (x0: V0, x1: V1) => Promise<T4>;
pipeP<V0, V1, V2, T1, T2, T3, T4>(fn0: (x0: V0, x1: V1, x2: V2) => Promise<T1>, fn1: (x: T1) => Promise<T2>|T2, fn2: (x: T2) => Promise<T3>|T3, fn3: (x: T3) => Promise<T4>|T4): (x0: V0, x1: V1, x2: V2) => Promise<T4>;
pipeP<V0, V1, V2, V3, T1, T2, T3, T4>(fn0: (x0: V0, x1: V1, x2: V2, x3: V3) => Promise<T1>, fn1: (x: T1) => Promise<T2>|T2, fn2: (x: T2) => Promise<T3>|T3, fn3: (x: T3) => Promise<T4>|T4): (x0: V0, x1: V1, x2: V2, x3: V3) => Promise<T4>;
pipeP<V0, T1, T2, T3, T4, T5>(fn0: (x0: V0) => Promise<T1>, fn1: (x: T1) => Promise<T2>|T2, fn2: (x: T2) => Promise<T3>|T3, fn3: (x: T3) => Promise<T4>|T4, fn4: (x: T4) => Promise<T5>|T5): (x0: V0) => Promise<T5>;
pipeP<V0, V1, T1, T2, T3, T4, T5>(fn0: (x0: V0, x1: V1) => Promise<T1>, fn1: (x: T1) => Promise<T2>|T2, fn2: (x: T2) => Promise<T3>|T3, fn3: (x: T3) => Promise<T4>|T4, fn4: (x: T4) => Promise<T5>|T5): (x0: V0, x1: V1) => Promise<T5>;
pipeP<V0, V1, V2, T1, T2, T3, T4, T5>(fn0: (x0: V0, x1: V1, x2: V2) => Promise<T1>, fn1: (x: T1) => Promise<T2>|T2, fn2: (x: T2) => Promise<T3>|T3, fn3: (x: T3) => Promise<T4>|T4, fn4: (x: T4) => Promise<T5>|T5): (x0: V0, x1: V1, x2: V2) => Promise<T5>;
pipeP<V0, V1, V2, V3, T1, T2, T3, T4, T5>(fn0: (x0: V0, x1: V1, x2: V2, x3: V3) => Promise<T1>, fn1: (x: T1) => Promise<T2>|T2, fn2: (x: T2) => Promise<T3>|T3, fn3: (x: T3) => Promise<T4>|T4, fn4: (x: T4) => Promise<T5>|T5): (x0: V0, x1: V1, x2: V2, x3: V3) => Promise<T5>;
pipeP<V0, T1, T2, T3, T4, T5, T6>(fn0: (x0: V0) => Promise<T1>, fn1: (x: T1) => Promise<T2>|T2, fn2: (x: T2) => Promise<T3>|T3, fn3: (x: T3) => Promise<T4>|T4, fn4: (x: T4) => Promise<T5>|T5, fn5: (x: T5) => Promise<T6>|T6): (x0: V0) => Promise<T6>;
pipeP<V0, V1, T1, T2, T3, T4, T5, T6>(fn0: (x0: V0, x1: V1) => Promise<T1>, fn1: (x: T1) => Promise<T2>|T2, fn2: (x: T2) => Promise<T3>|T3, fn3: (x: T3) => Promise<T4>|T4, fn4: (x: T4) => Promise<T5>|T5, fn5: (x: T5) => Promise<T6>|T6): (x0: V0, x1: V1) => Promise<T6>;
pipeP<V0, V1, V2, T1, T2, T3, T4, T5, T6>(fn0: (x0: V0, x1: V1, x2: V2) => Promise<T1>, fn1: (x: T1) => Promise<T2>|T2, fn2: (x: T2) => Promise<T3>|T3, fn3: (x: T3) => Promise<T4>|T4, fn4: (x: T4) => Promise<T5>|T5, fn5: (x: T5) => Promise<T6>|T6): (x0: V0, x1: V1, x2: V2) => Promise<T6>;
pipeP<V0, V1, V2, V3, T1, T2, T3, T4, T5, T6>(fn0: (x0: V0, x1: V1, x2: V2, x3: V3) => Promise<T1>, fn1: (x: T1) => Promise<T2>|T2, fn2: (x: T2) => Promise<T3>|T3, fn3: (x: T3) => Promise<T4>|T4, fn4: (x: T4) => Promise<T5>|T5, fn5: (x: T5) => Promise<T6>|T6): (x0: V0, x1: V1, x2: V2, x3: V3) => Promise<T6>;
pipeP<V0, T1, T2, T3, T4, T5, T6, T7>(fn0: (x0: V0) => Promise<T1>, fn1: (x: T1) => Promise<T2>|T2, fn2: (x: T2) => Promise<T3>|T3, fn3: (x: T3) => Promise<T4>|T4, fn4: (x: T4) => Promise<T5>|T5, fn5: (x: T5) => Promise<T6>|T6, fn6: (x: T6) => Promise<T7>|T7): (x0: V0) => Promise<T7>;
pipeP<V0, V1, T1, T2, T3, T4, T5, T6, T7>(fn0: (x0: V0, x1: V1) => Promise<T1>, fn1: (x: T1) => Promise<T2>|T2, fn2: (x: T2) => Promise<T3>|T3, fn3: (x: T3) => Promise<T4>|T4, fn4: (x: T4) => Promise<T5>|T5, fn5: (x: T5) => Promise<T6>|T6, fn6: (x: T6) => Promise<T7>|T7): (x0: V0, x1: V1) => Promise<T7>;
pipeP<V0, V1, V2, T1, T2, T3, T4, T5, T6, T7>(fn0: (x0: V0, x1: V1, x2: V2) => Promise<T1>, fn1: (x: T1) => Promise<T2>|T2, fn2: (x: T2) => Promise<T3>|T3, fn3: (x: T3) => Promise<T4>|T4, fn4: (x: T4) => Promise<T5>|T5, fn5: (x: T5) => Promise<T6>|T6, fn6: (x: T6) => Promise<T7>|T7): (x0: V0, x1: V1, x2: V2) => Promise<T7>;
pipeP<V0, V1, V2, V3, T1, T2, T3, T4, T5, T6, T7>(fn0: (x0: V0, x1: V1, x2: V2, x3: V3) => Promise<T1>, fn1: (x: T1) => Promise<T2>|T2, fn2: (x: T2) => Promise<T3>|T3, fn3: (x: T3) => Promise<T4>|T4, fn4: (x: T4) => Promise<T5>|T5, fn5: (x: T5) => Promise<T6>|T6, fn6: (x: T6) => Promise<T7>|T7): (x0: V0, x1: V1, x2: V2, x3: V3) => Promise<T7>;
pipeP<V0, T1, T2, T3, T4, T5, T6, T7, T8>(fn0: (x0: V0) => Promise<T1>, fn1: (x: T1) => Promise<T2>|T2, fn2: (x: T2) => Promise<T3>|T3, fn3: (x: T3) => Promise<T4>|T4, fn4: (x: T4) => Promise<T5>|T5, fn5: (x: T5) => Promise<T6>|T6, fn6: (x: T6) => Promise<T7>|T7, fn7: (x: T7) => Promise<T8>|T8): (x0: V0) => Promise<T8>;
pipeP<V0, V1, T1, T2, T3, T4, T5, T6, T7, T8>(fn0: (x0: V0, x1: V1) => Promise<T1>, fn1: (x: T1) => Promise<T2>|T2, fn2: (x: T2) => Promise<T3>|T3, fn3: (x: T3) => Promise<T4>|T4, fn4: (x: T4) => Promise<T5>|T5, fn5: (x: T5) => Promise<T6>|T6, fn6: (x: T6) => Promise<T7>|T7, fn7: (x: T7) => Promise<T8>|T8): (x0: V0, x1: V1) => Promise<T8>;
pipeP<V0, V1, V2, T1, T2, T3, T4, T5, T6, T7, T8>(fn0: (x0: V0, x1: V1, x2: V2) => Promise<T1>, fn1: (x: T1) => Promise<T2>|T2, fn2: (x: T2) => Promise<T3>|T3, fn3: (x: T3) => Promise<T4>|T4, fn4: (x: T4) => Promise<T5>|T5, fn5: (x: T5) => Promise<T6>|T6, fn6: (x: T6) => Promise<T7>|T7, fn7: (x: T7) => Promise<T8>|T8): (x0: V0, x1: V1, x2: V2) => Promise<T8>;
pipeP<V0, V1, V2, V3, T1, T2, T3, T4, T5, T6, T7, T8>(fn0: (x0: V0, x1: V1, x2: V2, x3: V3) => Promise<T1>, fn1: (x: T1) => Promise<T2>|T2, fn2: (x: T2) => Promise<T3>|T3, fn3: (x: T3) => Promise<T4>|T4, fn4: (x: T4) => Promise<T5>|T5, fn5: (x: T5) => Promise<T6>|T6, fn6: (x: T6) => Promise<T7>|T7, fn7: (x: T7) => Promise<T8>|T8): (x0: V0, x1: V1, x2: V2, x3: V3) => Promise<T8>;
pipeP<V0, T1, T2, T3, T4, T5, T6, T7, T8, T9>(fn0: (x0: V0) => Promise<T1>, fn1: (x: T1) => Promise<T2>|T2, fn2: (x: T2) => Promise<T3>|T3, fn3: (x: T3) => Promise<T4>|T4, fn4: (x: T4) => Promise<T5>|T5, fn5: (x: T5) => Promise<T6>|T6, fn6: (x: T6) => Promise<T7>|T7, fn7: (x: T7) => Promise<T8>|T8, fn8: (x: T8) => Promise<T9>|T9): (x0: V0) => Promise<T9>;
pipeP<V0, V1, T1, T2, T3, T4, T5, T6, T7, T8, T9>(fn0: (x0: V0, x1: V1) => Promise<T1>, fn1: (x: T1) => Promise<T2>|T2, fn2: (x: T2) => Promise<T3>|T3, fn3: (x: T3) => Promise<T4>|T4, fn4: (x: T4) => Promise<T5>|T5, fn5: (x: T5) => Promise<T6>|T6, fn6: (x: T6) => Promise<T7>|T7, fn7: (x: T7) => Promise<T8>|T8, fn8: (x: T8) => Promise<T9>|T9): (x0: V0, x1: V1) => Promise<T9>;
pipeP<V0, V1, V2, T1, T2, T3, T4, T5, T6, T7, T8, T9>(fn0: (x0: V0, x1: V1, x2: V2) => Promise<T1>, fn1: (x: T1) => Promise<T2>|T2, fn2: (x: T2) => Promise<T3>|T3, fn3: (x: T3) => Promise<T4>|T4, fn4: (x: T4) => Promise<T5>|T5, fn5: (x: T5) => Promise<T6>|T6, fn6: (x: T6) => Promise<T7>|T7, fn7: (x: T7) => Promise<T8>|T8, fn8: (x: T8) => Promise<T9>|T9): (x0: V0, x1: V1, x2: V2) => Promise<T9>;
pipeP<V0, V1, V2, V3, T1, T2, T3, T4, T5, T6, T7, T8, T9>(fn0: (x0: V0, x1: V1, x2: V2, x3: V3) => Promise<T1>, fn1: (x: T1) => Promise<T2>|T2, fn2: (x: T2) => Promise<T3>|T3, fn3: (x: T3) => Promise<T4>|T4, fn4: (x: T4) => Promise<T5>|T5, fn5: (x: T5) => Promise<T6>|T6, fn6: (x: T6) => Promise<T7>|T7, fn7: (x: T7) => Promise<T8>|T8, fn8: (x: T8) => Promise<T9>|T9): (x0: V0, x1: V1, x2: V2, x3: V3) => Promise<T9>;
/**
* Returns the left-to-right Kleisli composition of the provided functions, each of which must return a value of a type supported by chain.
*/
// skipped extra params on fn0 -- not mentioned in the docs!
pipeK<V, T1>(fn0: (v: Chain<V>) => Chain<T1>): (v: V) => Chain<T1>;
pipeK<V, T1, T2>(fn0: (v: Chain<V>) => Chain<T1>, fn1: (x: T1) => Chain<T2>): (v: V) => Chain<T2>;
pipeK<V, T1, T2, T3>(fn0: (v: Chain<V>) => Chain<T1>, fn1: (x: T1) => Chain<T2>, fn2: (x: T2) => Chain<T3>): (v: V) => Chain<T3>;
pipeK<V, T1, T2, T3, T4>(fn0: (v: Chain<V>) => Chain<T1>, fn1: (x: T1) => Chain<T2>, fn2: (x: T2) => Chain<T3>, fn3: (x: T3) => Chain<T4>): (v: V) => Chain<T4>;
pipeK<V, T1, T2, T3, T4, T5>(fn0: (v: Chain<V>) => Chain<T1>, fn1: (x: T1) => Chain<T2>, fn2: (x: T2) => Chain<T3>, fn3: (x: T3) => Chain<T4>, fn4: (x: T4) => Chain<T5>): (v: V) => Chain<T5>;
pipeK<V, T1, T2, T3, T4, T5, T6>(fn0: (v: Chain<V>) => Chain<T1>, fn1: (x: T1) => Chain<T2>, fn2: (x: T2) => Chain<T3>, fn3: (x: T3) => Chain<T4>, fn4: (x: T4) => Chain<T5>, fn5: (x: T5) => Chain<T6>): (v: V) => Chain<T6>;
pipeK<V, T1, T2, T3, T4, T5, T6, T7>(fn0: (v: Chain<V>) => Chain<T1>, fn1: (x: T1) => Chain<T2>, fn2: (x: T2) => Chain<T3>, fn3: (x: T3) => Chain<T4>, fn4: (x: T4) => Chain<T5>, fn5: (x: T5) => Chain<T6>, fn6: (x: T6) => Chain<T7>): (v: V) => Chain<T7>;
pipeK<V, T1, T2, T3, T4, T5, T6, T7, T8>(fn0: (v: Chain<V>) => Chain<T1>, fn1: (x: T1) => Chain<T2>, fn2: (x: T2) => Chain<T3>, fn3: (x: T3) => Chain<T4>, fn4: (x: T4) => Chain<T5>, fn5: (x: T5) => Chain<T6>, fn6: (x: T6) => Chain<T7>, fn7: (x: T7) => Chain<T8>): (v: V) => Chain<T8>;
pipeK<V, T1, T2, T3, T4, T5, T6, T7, T8, T9>(fn0: (v: Chain<V>) => Chain<T1>, fn1: (x: T1) => Chain<T2>, fn2: (x: T2) => Chain<T3>, fn3: (x: T3) => Chain<T4>, fn4: (x: T4) => Chain<T5>, fn5: (x: T5) => Chain<T6>, fn6: (x: T6) => Chain<T7>, fn7: (x: T7) => Chain<T8>, fn8: (x: T8) => Chain<T9>): (v: V) => Chain<T9>;
/**
* Returns a new list by plucking the same named property off all objects in the list supplied.
*/
// hard to mix cuz different generics
// infer
pluck<U, T extends Struct<U>, K extends keyof T>(p: K, list: List<T>): U[]; // fails on number keys
pluck<U, T extends Struct<U>, K extends keyof T>(p: K): (list: List<T>) => U[]; // doesn't work, T info late
// pluck<T extends Struct<any>, K extends keyof T>: CurriedFunction2<K, List<T>, T[K][]>;
// supply return object type manually when unable to infer it...
pluck<T>(p: Prop, list: Struct<any>[]): T[];
pluck(p: Prop): <T>(list: Struct<any>[]) => T[];
// pluck<T>: CurriedFunction2<Prop, Struct<any>[], T[]>;
/**
* Returns a new list with the given element at the front, followed by the contents of the
* list.
*/
prepend<T>(el: T, list: List<T>): T[];
prepend<T>(el: T): (list: List<T>) => T[];
// prepend<T>: CurriedFunction2<T, List<T>, T[]>;
/**
* Multiplies together all the elements of a list.
*/
product(list: List<number>): number;
/**
* Reasonable analog to SQL `select` statement.
*/
// hard to mix cuz different generics
// infer
project<T, K extends Prop>(props: List<K>, objs: List<T>): T[];
project<T, K extends Prop>(props: List<K>): (objs: List<T>) => T[]; // T info probably too late
// project<T, K extends keyof T>: CurriedFunction2<List<K>, List<T>, Pick<T, K>[]>;
// supply return object type manually when unable to infer it...
project<T,U>(props: List<Prop>, objs: List<T>): U[];
project(props: List<Prop>): <T,U>(objs: List<T>) => U[];
// project<T,U>: CurriedFunction2<List<Prop>, List<T>, U[]>;
/**
* Returns a function that when supplied an object returns the indicated property of that object, if it exists.
*/
// keyof version
prop<T>(p: Prop, obj: T): T;
// prop<T, K extends keyof T>(p: K): (obj: T) => T[K]; // T info late
// prop<T, K extends keyof T>: CurriedFunction2<K, T, T[K]>;
// prop<K extends Prop>(p: K): <T, K extends keyof T>(obj: T) => T[K]; // K redefined, fails
// prop<T, K extends Prop>: CurriedFunction2<K, T, T[K]>;
// Record version, more curry-friendly
prop<K extends string, V, T extends Obj<V>>(p: K, obj: T): V; // uncurried adds value only for {} from e.g. degeneration
prop<K extends string>(p: K): <V, T extends Obj<V>>(obj: T) => V;
// prop<K extends string, V, T extends Record<K,V>>: CurriedFunction2<K, T, V>;
/**
* Determines whether the given property of an object has a specific
* value according to strict equality (`===`). Most likely used to
* filter a list.
*/
// propEq<T extends Struct<any>>(name: Prop, val: any, obj: T): boolean;
// propEq<T extends Struct<any>>(name: Prop, val: any): (obj: T) => boolean;
// propEq<T extends Struct<any>>(name: Prop): CurriedFunction2<any, T, boolean>;
// // propEq<T extends Struct<any>>(name: Prop): (val: any, obj: T) => boolean;
// // propEq<T extends Struct<any>>(name: Prop): (val: any) => (obj: T) => boolean;
// // propEq<T extends Struct<any>>: CurriedFunction3<Prop, any, T, boolean>;
// base
propEq<T extends Struct<any>>(name: Prop, val: any, obj: T): boolean;
propEq(name: Prop, val: any):{
<T extends Struct<any>>(obj: T): boolean;
};
propEq(name: Prop):{
<T extends Struct<any>>(val: any, obj: T): boolean;
(val: any):{
<T extends Struct<any>>(obj: T): boolean;
};
};
/**
* Returns true if the specified object property is of the given type; false otherwise.
*/
// Record
propIs<T extends Function, K extends string, V, U extends Obj<V>>(type: T, name: K, obj: U): obj is (U & Obj<T>);
propIs<T extends Function, K extends string>(type: T, name: K): <V, U extends Obj<V>>(obj: U) => obj is (U & Obj<T>);
// propIs<T extends Function>(type: T): {
// <K extends string, V, U extends Record<K,V>>(name: K, obj: U): obj is (U & Record<K, T>);
// <K extends string>(name: K): <V, U extends Record<K,V>>(obj: U) => obj is (U & Record<K, T>);
// }
// propIs<T extends Function, K extends string, V, U extends Record<K,V>>: CurriedFunction3<T, K, U, V is (V & Record<K, T>)>; // obj is? name unavailable...
// inference, fails if name and object are supplied separately
propIs<T extends Function, V>(type: T, name: Prop, obj: V): obj is (V & Obj<T>);
// propIs<T extends Function, V, K extends keyof V>(type: T, name: K): (obj: V) => obj is (V & Record<K, T>); // object info not available in time :(
// propIs<T extends Function>(type: T): {
// <V, K extends keyof V>(name: K, obj: V): obj is (V & Record<K, T>);
// <V, K extends keyof V>(name: K): (obj: V) => obj is (V & Record<K, T>); // object info not available in time :(
// }
// propIs<T extends Function, V, K extends keyof V>: CurriedFunction3<T, K, V, V is (V & Record<K, T>)>; // obj is? name unavailable...
// curry-friendlier fallback
propIs(type: Function, name: Prop, obj: Struct<any>): boolean;
propIs(type: Function, name: Prop): (obj: Struct<any>) => boolean;
propIs(type: Function): CurriedFunction2<Prop, Struct<any>, boolean>;
// propIs(type: Function): {
// (name: Prop, obj: Struct<any>): boolean;
// (name: Prop): (obj: Struct<any>) => boolean;
// }
// propIs: CurriedFunction3<Function, Prop, Struct<any>, boolean>;
// mixed:
propIs<T extends Function>(type: T): {
// record
<K extends string, V, U extends Obj<V>>(name: K, obj: U): obj is (U & Obj<T>);
<K extends string>(name: K): <V, U extends Obj<V>>(obj: U) => obj is (U & Obj<T>);
// keyof
<V>(name: Prop, obj: V): obj is (V & Obj<T>);
// <V, K extends keyof V>(name: K): (obj: V) => obj is (V & Record<K, T>); // object info not available in time :(
};
/**
* If the given, non-null object has an own property with the specified name, returns the value of that property.
* Otherwise returns the provided default value.
*/
// // infer with Record (curry-friendly) -- can't use here: it'd error whenever the default should trigger
// propOr<T, K extends string, V, U extends Record<K,V>>(val: T, p: K, obj: U): V|T;
// propOr<T, K extends string>(val: T, p: K): <V, U extends Record<K,V>>(obj: U) => V|T;
// propOr<T, K extends string, V, U extends Record<K,V>>(val: T): CurriedFunction2<K, U, V|T>;
// // propOr<T, K extends string, V, U extends Record<K,V>>: CurriedFunction3<T, K, U, V|T>;
// infer with keyof (not curry-friendly), allowing a default value with a type different from the actual one
propOr<T,U,K extends keyof U>(val: T, p: K, obj: Obj<K>): K|T; // obj[K]?
propOr<T,U,K extends keyof U>(val: T, p: K): (obj: Obj<K>) => K|T; // generics too early?
propOr<T,U,K extends keyof U>(val: T): CurriedFunction2<K, Obj<K>, K|T>; // generics too early?
// propOr<T>(val: T): <U,K extends keyof U>(p: K, obj: U) => U[K]|T;
// propOr<T>(val: T): <U,K extends keyof U>(p: K) => (obj: U) => U[K]|T; // U too early?
// propOr<T,U,K extends keyof U>: CurriedFunction3<T, K, U, U[K]|T>;
// presume the value at the given key matches the type of the default value, bad but less likely to fail with currying
propOr<T>(val: T, p: Prop, obj: Struct<any>): T; // adds value only to protect against {} from e.g. generic degeneration
// propOr<T>(val: T, p: Prop): (obj: Struct<any>) => T;
// // propOr<T>(val: T): (p: Prop, obj: Struct<any>) => T;
// propOr<T>(val: T): CurriedFunction2<Prop, Struct<any>, T>;
// // propOr<T>: CurriedFunction3<T, Prop, Struct<any>, T>;
propOr<T>(val: T, p: Prop): Struct<any>;
propOr<T>(val: T):{
(p: Prop): Struct<any>;
};
// // useless unbound generics?
// propOr<T,U,V>(val: T, p: Prop, obj: U): V;
// propOr<T>(val: T, p: Prop): <U,V>(obj: U) => V;
// // propOr<T>(val: T): <U,V>(p: Prop, obj: U) => V;
// propOr<T,U,V>(val: T): CurriedFunction2<Prop, U, V>;
// // propOr<T,U,V>: CurriedFunction3<T, Prop, U, V>;
/**
* Acts as multiple `prop`: array of keys in, array of values out. Preserves order.
*/
// generic version
props<T>(ps: List<Prop>, obj: Struct<T>): T[];
props(ps: List<Prop>): <T>(obj: Struct<T>) => T[];
// props<T>: CurriedFunction2<List<Prop>, Struct<T>, T[]>;
// TODO: heterogeneous version
// Record-based?
// props<K extends keyof T, T extends Struct<any>>(ps: List<K>, obj: Struct<T>): ???;
/**
* Returns true if the specified object property satisfies the given predicate; false otherwise.
*/
// // Record (curry-friendly)
// propSatisfies<V, K extends string, U extends Record<K,V>>(pred: Pred<V>, name: K, obj: U): boolean;
// propSatisfies<V, K extends string>(pred: Pred<V>, name: K): <U extends Record<K,V>>(obj: U) => boolean;
// propSatisfies<V, K extends string, U extends Record<K,V>>(pred: Pred<V>): CurriedFunction2<K, U, boolean>;
// // propSatisfies<V, K extends string, U extends Record<K,V>>: CurriedFunction3<Pred<V>, K, U, boolean>;
// // keyof, info too late on currying
// propSatisfies<U extends Struct<any>, K extends keyof U>(pred: Pred<U[K]>, name: Prop, obj: U): boolean;
// propSatisfies<T,U>(pred: Pred<T>, name: Prop): (obj: U) => boolean;
// propSatisfies<T,U>(pred: Pred<T>): CurriedFunction2<Prop, U, boolean>;
// // propSatisfies<T,U>: CurriedFunction3<Pred<T>, Prop, U, boolean>;
// Record (curry-friendly)
propSatisfies<V, K extends string, U extends Obj<V>>(pred: Pred<V>, name: K, obj: U): boolean;
propSatisfies<V, K extends string>(pred: Pred<V>, name: K):{
<U extends Obj<V>>(obj: U): boolean;
};
propSatisfies<V>(pred: Pred<V>):{
<K extends string, U extends Record<K, V>>(name: K, obj: U): boolean;
<K extends string>(name: K):{
<U extends Obj<V>>(obj: U): boolean;
};
};
// keyof, info too late on currying
propSatisfies<T, U>(pred: Pred<T>, name: Prop, obj: U): boolean;
propSatisfies<T>(pred: Pred<T>, name: Prop):{
<U>(obj: U): boolean;
};
propSatisfies<T>(pred: Pred<T>):{
<U>(name: Prop, obj: U): boolean;
(name: Prop):{
<U>(obj: U): boolean;
};
};
/**
* Returns a list of numbers from `from` (inclusive) to `to`
* (exclusive). In mathematical terms, `range(a, b)` is equivalent to
* the half-open interval `[a, b)`.
*/
range(from: number, to: number): number[];
range(from: number): (to: number) => number[];
// range: CurriedFunction2<number, number, number[]>;
/**
* Returns a single item by iterating through the list, successively calling the iterator
* function and passing it an accumulator value and the current value from the array, and
* then passing the result to the next call.
*/
// reduce<T, TResult, R extends List<T>>(fn: (acc: TResult, elem: T, idx: Number, list: R) => TResult|Reduced, acc: TResult, list: R): TResult;
// reduce<T, TResult, R extends List<T>>(fn: (acc: TResult, elem: T, idx: Number, list: R) => TResult|Reduced, acc: TResult): (list: R) => TResult;
// reduce<T, TResult, R extends List<T>>(fn: (acc: TResult, elem: T, idx: Number, list: R) => TResult|Reduced): CurriedFunction2<TResult, R, TResult>;
// // reduce<T, TResult, R extends List<T>>(fn: (acc: TResult, elem: T, idx: Number, list: R) => TResult|Reduced): (acc: TResult, list: R) => TResult;
// // reduce<T, TResult, R extends List<T>>: CurriedFunction3<(acc: TResult, elem: T, idx: Number, list: R) => TResult|Reduced, TResult, R, TResult>;
// base
reduce<T, TResult, R extends List<T>>(fn: (acc: TResult, elem: T, idx: number, list: R) => TResult|Reduced, acc: TResult, list: R): TResult;
reduce<T, TResult, R extends List<T>>(fn: (acc: TResult, elem: T, idx: number, list: R) => TResult|Reduced, acc: TResult):{
(list: R): TResult;
};
reduce<T, TResult, R extends List<T>>(fn: (acc: TResult, elem: T, idx: number, list: R) => TResult|Reduced):{
(acc: TResult, list: R): TResult;
(acc: TResult):{
(list: R): TResult;
};
};
/**
* Groups the elements of the list according to the result of calling the String-returning function keyFn on each
* element and reduces the elements of each group to a single value via the reducer function valueFn.
*/
// // reason for 'any' on acc: somehow empty accumulators like '[]' won't work well when matching
// reduceBy<T, TResult, R extends List<T>>(valueFn: (acc: TResult, elem: T, idx: number, list: R) => TResult, acc: TResult|any, keyFn: (elem: T) => string, list: R): TResult;
// reduceBy<T, TResult, R extends List<T>>(valueFn: (acc: TResult, elem: T, idx: number, list: R) => TResult, acc: TResult|any, keyFn: (elem: T) => string): (list: R) => TResult;
// reduceBy<T, TResult, R extends List<T>>(valueFn: (acc: TResult, elem: T, idx: number, list: R) => TResult, acc: TResult|any): CurriedFunction2<(elem: T) => string, R, TResult>;
// reduceBy<T, TResult, R extends List<T>>(valueFn: (acc: TResult, elem: T, idx: number, list: R) => TResult): CurriedFunction3<TResult|any, (elem: T) => string, R, TResult>;
// // reduceBy<T, TResult, R extends List<T>>: CurriedFunction4<(acc: TResult, elem: T, idx: number, list: R) => TResult, TResult|any, (elem: T) => string, R, TResult>;
// base
reduceBy<T, TResult, R extends List<T>>(valueFn: (acc: TResult, elem: T, idx: number, list: R) => TResult, acc: TResult|any, keyFn: (elem: T) => string, list: R): TResult;
reduceBy<T, TResult, R extends List<T>>(valueFn: (acc: TResult, elem: T, idx: number, list: R) => TResult, acc: TResult|any, keyFn: (elem: T) => string):{
(list: R): TResult;
};
reduceBy<T, TResult, R extends List<T>>(valueFn: (acc: TResult, elem: T, idx: number, list: R) => TResult, acc: TResult|any):{
(keyFn: (elem: T) => string, list: R): TResult;
(keyFn: (elem: T) => string):{
(list: R): TResult;
};
};
reduceBy<T, TResult, R extends List<T>>(valueFn: (acc: TResult, elem: T, idx: number, list: R) => TResult):{
(acc: TResult|any, keyFn: (elem: T) => string, list: R): TResult;
(acc: TResult|any, keyFn: (elem: T) => string):{
(list: R): TResult;
};
(acc: TResult|any):{
(keyFn: (elem: T) => string, list: R): TResult;
(keyFn: (elem: T) => string):{
(list: R): TResult;
};
};
};
/**
* Returns a value wrapped to indicate that it is the final value of the reduce and
* transduce functions. The returned value should be considered a black box: the internal
* structure is not guaranteed to be stable.
*/
reduced<T>(elem: T): Reduced;
/**
* Returns a single item by iterating through the list, successively calling the iterator
* function and passing it an accumulator value and the current value from the array, and
* then passing the result to the next call.
*/
// // reason for 'any' on acc: somehow empty accumulators like '[]' won't work well when matching
// reduceRight<T, TResult>(fn: (elem: T, acc: TResult) => TResult|Reduced, acc: TResult|any, list: List<T>): TResult;
// reduceRight<T, TResult>(fn: (elem: T, acc: TResult) => TResult|Reduced, acc: TResult|any): (list: List<T>) => TResult;
// reduceRight<T, TResult>(fn: (elem: T, acc: TResult) => TResult|Reduced): CurriedFunction2<TResult, List<T>, TResult>;
// // reduceRight<T, TResult>(fn: (elem: T, acc: TResult) => TResult|Reduced): (acc: TResult|any, list: List<T>) => TResult;
// // reduceRight<T, TResult>(fn: (elem: T, acc: TResult) => TResult|Reduced): (acc: TResult|any) => (list: List<T>) => TResult;
// // reduceRight<T, TResult>: CurriedFunction3<(elem: T, acc: TResult) => TResult|Reduced, TResult|any, List<T>, TResult>;
// base
reduceRight<T, TResult>(fn: (elem: T, acc: TResult) => TResult|Reduced, acc: TResult|any, list: List<T>): TResult;
reduceRight<T, TResult>(fn: (elem: T, acc: TResult) => TResult|Reduced, acc: TResult|any):{
(list: List<T>): TResult;
};
reduceRight<T, TResult>(fn: (elem: T, acc: TResult) => TResult|Reduced):{
(acc: TResult|any, list: List<T>): TResult;
(acc: TResult|any):{
(list: List<T>): TResult;
};
};
/**
* Like reduce, reduceWhile returns a single item by iterating through the list, successively calling the iterator function.
* reduceWhile also takes a predicate that is evaluated before each step. If the predicate returns false, it "short-circuits"
* the iteration and returns the current value of the accumulator.
*/
// reduceWhile<T, TResult>(pred: (acc: TResult, elem: T) => boolean, fn: (acc: TResult, elem: T) => TResult|Reduced, acc: TResult, list: List<T>): TResult;
// reduceWhile<T, TResult>(pred: (acc: TResult, elem: T) => boolean, fn: (acc: TResult, elem: T) => TResult|Reduced, acc: TResult): (list: List<T>) => TResult;
// reduceWhile<T, TResult>(pred: (acc: TResult, elem: T) => boolean, fn: (acc: TResult, elem: T) => TResult|Reduced): CurriedFunction2<TResult, List<T>, TResult>;
// reduceWhile<T, TResult>(pred: (acc: TResult, elem: T) => boolean): CurriedFunction3<(acc: TResult, elem: T) => TResult|Reduced, TResult, List<T>, TResult>;
// // reduceWhile<T, TResult>: CurriedFunction4<(acc: TResult, elem: T) => boolean, (acc: TResult, elem: T) => TResult|Reduced, TResult, List<T>, TResult>;
// base
reduceWhile<T, TResult>(pred: (acc: TResult, elem: T) => boolean, fn: (acc: TResult, elem: T) => TResult|Reduced, acc: TResult, list: List<T>): TResult;
reduceWhile<T, TResult>(pred: (acc: TResult, elem: T) => boolean, fn: (acc: TResult, elem: T) => TResult|Reduced, acc: TResult):{
(list: List<T>): TResult;
};
reduceWhile<T, TResult>(pred: (acc: TResult, elem: T) => boolean, fn: (acc: TResult, elem: T) => TResult|Reduced):{
(acc: TResult, list: List<T>): TResult;
(acc: TResult):{
(list: List<T>): TResult;
};
};
reduceWhile<T, TResult>(pred: (acc: TResult, elem: T) => boolean):{
(fn: (acc: TResult, elem: T) => TResult|Reduced, acc: TResult, list: List<T>): TResult;
(fn: (acc: TResult, elem: T) => TResult|Reduced, acc: TResult):{
(list: List<T>): TResult;
};
(fn: (acc: TResult, elem: T) => TResult|Reduced):{
(acc: TResult, list: List<T>): TResult;
(acc: TResult):{
(list: List<T>): TResult;
};
};
};
/**
* Similar to `filter`, except that it keeps only values for which the given predicate
* function returns falsy.
*/
// = filter
// array
reject<T>(pred: Pred<T>, list: List<T>): T[];
// reject<T>(pred: Pred<T>): (list: List<T>) => T[]; // mix
// reject<T>: CurriedFunction2<Pred<T>, List<T>, T[]>;
// functor to functor
reject<T>(pred: Pred<T>, list: Functor<T>): Functor<T>;
// reject<T>(pred: Pred<T>): (list: Functor<T>) => Functor<T>; // mix
// reject<T>: CurriedFunction2<Pred<T>, Functor<T>, Functor<T>>;
// functor to array
reject<T>(pred: Pred<T>, list: Functor<T>): T[];
// reject<T>(pred: Pred<T>): (list: Functor<T>) => T[]; // mix
// reject<T>: CurriedFunction2<Pred<T>, Functor<T>, T[]>;
// object
reject<T,U extends Obj<T>>(pred: Pred<T>, obj: U) : U;
// reject<T>(pred: Pred<T>): <U extends Obj<T>>(obj: U) => Partial<U>; // mix
// reject<T,U extends Obj<T>>: CurriedFunction2<Pred<T>, U, Partial<U>>;
// mixed
reject<T>(pred: Pred<T>): {
(list: List<T>): T[];
(list: Functor<T>): Functor<T>;
(list: Functor<T>): T[];
<U extends Obj<T>>(obj: U): U;
};
/**
* Removes the sub-list of `list` starting at index `start` and containing `count` elements.
*/
// remove<T>(start: number, count: number, list: List<T>): T[];
// remove<T>(start: number, count: number): (list: List<T>) => T[];
// remove<T>(start: number): CurriedFunction2<number,List<T>,T[]>;
// // remove<T>: CurriedFunction3<number, number, List<T>, T[]>;
// base
remove<T>(start: number, count: number, list: List<T>): T[];
remove(start: number, count: number):{
<T>(list: List<T>): T[];
};
remove(start: number):{
<T>(count: number, list: List<T>): T[];
(count: number):{
<T>(list: List<T>): T[];
};
};
/**
* Returns a fixed list of size n containing a specified identical value.
*/
repeat<T>(a: T, n: number): T[];
repeat<T>(a: T): (n: number) => T[];
// repeat<T>: CurriedFunction2<T, number, T[]>;
/**
* Replace a substring or regex match in a string with a replacement.
*/
// replace(pattern: RegExp|Prop, replacement: Prop, str: string): string;
// replace(pattern: RegExp|Prop, replacement: Prop): (str: string) => string;
// replace(pattern: RegExp|Prop): CurriedFunction2<Prop, string, string>;
// // replace(pattern: RegExp|Prop): (replacement: Prop, str: string) => string;
// // replace(pattern: RegExp|Prop): (replacement: Prop) => (str: string) => string;
// // replace: CurriedFunction3<RegExp|Prop, Prop, string, string>;
// base
replace(pattern: RegExp|Prop, replacement: Prop, str: string): string;
replace(pattern: RegExp|Prop, replacement: Prop):{
(str: string): string;
};
replace(pattern: RegExp|Prop):{
(replacement: Prop, str: string): string;
(replacement: Prop):{
(str: string): string;
};
};
/**
* Returns a new list with the same elements as the original list, just in the reverse order.
*/
reverse<T>(list: List<T>): T[];
/**
* Scan is similar to reduce, but returns a list of successively reduced values from the left.
*/
// scan<T, TResult>(fn: (acc: TResult, elem: T) => TResult|Reduced, acc: TResult, list: List<T>): TResult[];
// scan<T, TResult>(fn: (acc: TResult, elem: T) => TResult|Reduced, acc: TResult): (list: List<T>) => TResult[];
// scan<T, TResult>(fn: (acc: TResult, elem: T) => TResult|Reduced): CurriedFunction2<TResult, List<T>, TResult[]>;
// // scan<T, TResult>: CurriedFunction3<(acc: TResult, elem: T) => TResult|Reduced, TResult, List<T>, TResult[]>;
// base
scan<T, TResult>(fn: (acc: TResult, elem: T) => TResult|Reduced, acc: TResult, list: List<T>): TResult[];
scan<T, TResult>(fn: (acc: TResult, elem: T) => TResult|Reduced, acc: TResult):{
(list: List<T>): TResult[];
};
scan<T, TResult>(fn: (acc: TResult, elem: T) => TResult|Reduced):{
(acc: TResult, list: List<T>): TResult[];
(acc: TResult):{
(list: List<T>): TResult[];
};
};
/**
* Transforms a Traversable of Applicative into an Applicative of Traversable.
*/
// common case of array as traversable:
sequence<T>(f: (v: T) => Applicative<T>, traversable: List<Applicative<T>>): Applicative<Array<T>>;
// sequence<T>(f: (v: T) => Applicative<T>): (traversable: List<Applicative<T>>) => Applicative<Array<T>>; // mix
// sequence<T>: CurriedFunction2<(v: T) => Applicative<T>, List<Applicative<T>>, Applicative<Array<T>>>;
// general ADT case:
sequence<T>(f: (v: T) => Applicative<T>, traversable: Traversable<Applicative<T>>): Applicative<Traversable<T>>;
// sequence<T>(f: (v: T) => Applicative<T>): (traversable: Traversable<Applicative<T>>) => Applicative<Traversable<T>>; // mix
// sequence<T>: CurriedFunction2<(v: T) => Applicative<T>, Traversable<Applicative<T>>, Applicative<Traversable<T>>>;
// mixed:
sequence<T>(f: (v: T) => Applicative<T>): {
(traversable: List<Applicative<T>>): Applicative<Array<T>>;
(traversable: Traversable<Applicative<T>>): Applicative<Traversable<T>>;
};
/**
* Returns the result of "setting" the portion of the given data structure focused by the given lens to the
* given value.
*/
// hard to mix cuz different generics
// // key lens:
// set<T, K extends keyof T>(lens: KeyLens<T,K>, a: T[K], obj: T): T;
// set<T, K extends keyof T>(lens: KeyLens<T,K>, a: T[K]): (obj: T) => T;
// set<T, K extends keyof T>(lens: KeyLens<T,K>): CurriedFunction2<T[K], T, T>;
// // set<T, K extends keyof T>: CurriedFunction3<KeyLens<T,K>, T[K], T, T>;
// regular lenses:
// // smart approach, unreliable:
// set<T,U>(lens: Lens<T,U>, a: U, obj: T): T;
// set<T,U>(lens: Lens<T,U>, a: U): (obj: T) => T;
// // set<T,U>(lens: Lens<T,U>): (a: U, obj: T) => T;
// set<T,U>(lens: Lens<T,U>): CurriedFunction2<U, T, T>;
// // set<T,U>: CurriedFunction3<Lens<T,U>, U, T, T>;
// // // manually set lens; is this useful?
// // set<T,U>(lens: ManualLens<U>, a: U, obj: T): T;
// // set<U>(lens: ManualLens<U>, a: U): <T>(obj: T) => T;
// // set<T,U>(lens: ManualLens<U>): CurriedFunction2<U,T,T>;
// // // set<T,U>: CurriedFunction3<ManualLens<U>, U, T, T>;
// // assume result type equal to input object:
// set<T>(lens: UnknownLens, a: any, obj: T): T;
// set<T>(lens: UnknownLens, a: any): (obj: T) => T;
// // set<T>(lens: UnknownLens): (a: any, obj: T) => T;
// set<T>(lens: UnknownLens): CurriedFunction2<any, T, T>;
// // set<T>: CurriedFunction3<UnknownLens, any, T, T>;
// // // old version, with value as an unbound generic; is this useful?
// // set<T,U>(lens: UnknownLens, a: U, obj: T): T;
// // set<T,U>(lens: UnknownLens, a: U): (obj: T) => T;
// // set<T,U>(lens: UnknownLens): CurriedFunction2<U,T,T>;
// // // set<T,U>: CurriedFunction3<UnknownLens, U, T, T>;
// base
set<T, U>(lens: Lens<T, U>, a: U, obj: T): T;
set<T, U>(lens: Lens<T, U>, a: U):{
(obj: T): T;
};
set<T, U>(lens: Lens<T, U>):{
(a: U, obj: T): T;
(a: U):{
(obj: T): T;
};
};
// unknow
set<T>(lens: UnknownLens, a: any, obj: T): T;
set(lens: UnknownLens, a: any):{
<T>(obj: T): T;
};
set(lens: UnknownLens):{
<T>(a: any, obj: T): T;
(a: any):{
<T>(obj: T): T;
};
};
/**
* Returns the elements from `xs` starting at `a` and ending at `b - 1`.
*/
// slice<T extends List<any>>(a: number, b: number, list: T): T;
// slice(a: number, b: number): <T extends List<any>>(list: T) => T;
// slice<T extends List<any>>(a: number): CurriedFunction2<number, T, T>;
// // slice(a: number): <T extends List<any>>(b: number, list: T) => T;
// // slice(a: number): <T extends List<any>>(b: number) => (list: T) => T;
// // slice<T extends List<any>>: CurriedFunction3<number, number, T, T>;
// base
slice<T extends List<any>>(a: number, b: number, list: T): T;
slice(a: number, b: number):{
<T extends List<any>>(list: T): T;
};
slice(a: number):{
<T extends List<any>>(b: number, list: T): T;
(b: number):{
<T extends List<any>>(list: T): T;
};
};
/**
* Returns a copy of the list, sorted according to the comparator function, which should accept two values at a
* time and return a negative number if the first value is smaller, a positive number if it's larger, and zero
* if they are equal.
*/
sort<T>(fn: (a: T, b: T) => number, list: List<T>): T[];
sort<T>(fn: (a: T, b: T) => number): (list: List<T>) => T[];
// sort<T>: CurriedFunction2<(a: T, b: T) => number, List<T>, T[]>;
/**
* Sorts the list according to a key generated by the supplied function.
*/
sortBy<T, K extends Ord>(fn: (a: T) => K, list: List<T>): T[];
sortBy<T, K extends Ord>(fn: (a: T) => K): (list: List<T>) => T[];
// sortBy<T, K extends Ord>: CurriedFunction2<(a: T) => K, List<T>, T[]>;
/**
* Sorts a list according to a list of comparators.
*/
sortWith<T>(comparators: List<(a: T, b: T) => number>, list: List<T>): T[];
sortWith<T>(comparators: List<(a: T, b: T) => number>): (list: List<T>) => T[];
// sortWith<T>: CurriedFunction2<List<(a: T, b: T) => number>, List<T>, T[]>;
/**
* Splits a string into an array of strings based on the given
* separator.
*/
split(sep: RegExp|Prop, str: string): string[];
split(sep: RegExp|Prop): (str: string) => string[];
// split: CurriedFunction2<RegExp|Prop, string, string[]>;
/**
* Splits a given list or string at a given index.
*/
// string
splitAt(index: number, list: string): [string, string];
// splitAt(index: number): (list: string) => [string, string];
// splitAt: CurriedFunction2<number, string, [string, string]>;
// array
splitAt<T>(index: number, list: List<T>): T[][];
// splitAt(index: number): <T>(list: List<T>) => T[][];
// splitAt<T>: CurriedFunction2<number, List<T>, T[][]>;
// mixed
splitAt(index: number): {
(list: string): [string, string];
<T>(list: List<T>): T[][];
};
/**
* Splits a collection into slices of the specified length.
*/
splitEvery<T, R extends List<T>>(a: number, list: R): R[];
splitEvery(a: number): <T, R extends List<T>>(list: R) => R[];
// splitEvery<T, R extends List<T>>: CurriedFunction2<number, R, R[]>;
/**
* Takes a list and a predicate and returns a pair of lists with the following properties:
* - the result of concatenating the two output lists is equivalent to the input list;
* - none of the elements of the first output list satisfies the predicate; and
* - if the second output list is non-empty, its first element satisfies the predicate.
*/
splitWhen<T, R extends List<T>>(pred: Pred<T>, list: R): R[];
splitWhen<T>(pred: Pred<T>): <R extends List<T>>(list: R) => R[];
// splitWhen<T, R extends List<T>>: CurriedFunction2<Pred<T>, R, R[]>;
/**
* Subtracts two numbers. Equivalent to `a - b` but curried.
*/
subtract(a: number, b: number): number;
subtract(a: number): (b: number) => number;
// subtract: CurriedFunction2<number, number, number>;
/**
* Adds together all the elements of a list.
*/
sum(list: List<number>): number;
/**
* Finds the set (i.e. no duplicates) of all elements contained in the first or second list, but not both.
*/
symmetricDifference<T>(list1: List<T>, list2: List<T>): T[];
symmetricDifference<T>(list: List<T>): (list: List<T>) => T[];
// symmetricDifference<T>: CurriedFunction2<List<T>, List<T>, T[]>;
/**
* Finds the set (i.e. no duplicates) of all elements contained in the first or second list, but not both.
* Duplication is determined according to the value returned by applying the supplied predicate to two list elements.
*/
// symmetricDifferenceWith<T>(pred: (a: T, b: T) => boolean, list1: List<T>, list2: List<T>): T[];
// symmetricDifferenceWith<T>(pred: (a: T, b: T) => boolean): CurriedFunction2<List<T>, List<T>, T[]>;
// // symmetricDifferenceWith<T>: CurriedFunction3<(a: T, b: T) => boolean, List<T>, List<T>, T[]>;
// base
symmetricDifferenceWith<T>(pred: (a: T, b: T) => boolean, list1: List<T>, list2: List<T>): T[];
symmetricDifferenceWith<T>(pred: (a: T, b: T) => boolean, list1: List<T>):{
(list2: List<T>): T[];
};
symmetricDifferenceWith<T>(pred: (a: T, b: T) => boolean):{
(list1: List<T>, list2: List<T>): T[];
(list1: List<T>):{
(list2: List<T>): T[];
};
};
/**
* A function that always returns true. Any passed in parameters are ignored.
*/
T(): true;
/**
* Returns all but the first element of a list.
*/
tail<T extends List<any>>(list: T): T;
/**
* Returns a new list containing the first `n` elements of the given list. If
* `n > * list.length`, returns a list of `list.length` elements.
*/
take<T extends List<any>>(n: number, xs: T): T;
take(n: number): <T extends List<any>>(xs: T) => T;
// take<T extends List<any>>: CurriedFunction2<number, T, T>;
/**
* Returns a new list containing the last n elements of the given list. If n > list.length,
* returns a list of list.length elements.
*/
// = take
takeLast<T extends List<any>>(n: number, xs: T): T;
takeLast(n: number): <T extends List<any>>(xs: T) => T;
// takeLast<T extends List<any>>: CurriedFunction2<number, T, T>;
/**
* Returns a new list containing the last n elements of a given list, passing each value
* to the supplied predicate function, and terminating when the predicate function returns
* false. Excludes the element that caused the predicate function to fail. The predicate
* function is passed one argument: (value).
*/
// = takeWhile
takeLastWhile<T, R extends List<T>>(pred: Pred<T>, list: R): R;
takeLastWhile<T>(pred: Pred<T>): <R extends List<T>>(list: R) => R;
// takeLastWhile<T, R extends List<T>>: CurriedFunction2<Pred<T>, R, R>;
/**
* Returns a new list containing the first `n` elements of a given list, passing each value
* to the supplied predicate function, and terminating when the predicate function returns
* `false`.
*/
takeWhile<T, R extends List<T>>(pred: Pred<T>, list: R): R;
takeWhile<T>(pred: Pred<T>): <R extends List<T>>(list: R) => R;
// takeWhile<T, R extends List<T>>: CurriedFunction2<Pred<T>, R, R>;
/**
* The function to call with x. The return value of fn will be thrown away.
*/
tap<T>(fn: (a: T) => any, value: T): T;
tap<T>(fn: (a: T) => any): (value: T) => T;
// tap<T>: CurriedFunction2<(a: T) => any, T, T>;
/**
* Determines whether a given string matches a given regular expression.
*/
test(regexp: RegExp, str: Prop): boolean;
test(regexp: RegExp): (str: Prop) => boolean;
// test: CurriedFunction2<RegExp, Prop, boolean>;
/**
* Calls an input function `n` times, returning an array containing the results of those
* function calls.
*/
times<T>(fn: (i: number) => T, n: number): T[];
times<T>(fn: (i: number) => T): (n: number) => T[];
// times<T>: CurriedFunction2<(i: number) => T, number, T[]>;
/**
* The lower case version of a string.
*/
toLower(str: string): string;
/**
* Converts an object into an array of key, value arrays.
* Only the object's own properties are used.
* Note that the order of the output array is not guaranteed to be
* consistent across different JS platforms.
*/
toPairs<T>(obj: Obj<T>): [string,T][];
/**
* Converts an object into an array of key, value arrays.
* The object's own properties and prototype properties are used.
* Note that the order of the output array is not guaranteed to be
* consistent across different JS platforms.
*/
toPairsIn<T>(obj: Obj<T>): [string,T][];
toPairsIn(obj: Object): [string,any][];
/**
* Returns the string representation of the given value. eval'ing the output should
* result in a value equivalent to the input value. Many of the built-in toString
* methods do not satisfy this requirement.
*
* If the given value is an [object Object] with a toString method other than
* Object.prototype.toString, this method is invoked with no arguments to produce the
* return value. This means user-defined constructor functions can provide a suitable
* toString method.
*/
toString(val: StringRepresentable<string> | any): string;
/**
* The upper case version of a string.
*/
toUpper(str: string): string;
/**
* Initializes a transducer using supplied iterator function. Returns a single item by iterating through the
* list, successively calling the transformed iterator function and passing it an accumulator value and the
* current value from the array, and then passing the result to the next call.
*/
// transduce<T,U>(xf: (arg: List<T>) => List<T>, fn: (acc: List<U>, val: U) => List<U>, acc: List<T>, list: List<T>): U;
// transduce<T,U>(xf: (arg: List<T>) => List<T>, fn: (acc: List<U>, val: U) => List<U>, acc: List<T>): (list: List<T>) => U;
// transduce<T,U>(xf: (arg: List<T>) => List<T>, fn: (acc: List<U>, val: U) => List<U>): CurriedFunction2<List<T>,List<T>,U>;
// transduce<T,U>(xf: (arg: List<T>) => List<T>): CurriedFunction3<(acc: List<U>, val: U) => List<U>,List<T>,List<T>,U>;
// // transduce<T,U>: CurriedFunction4<(arg: List<T>) => List<T>, (acc: List<U>, val: U) => List<U>, List<T>, List<T>, U>;
// base
transduce<T, U>(xf: (arg: List<T>) => List<T>, fn: (acc: List<U>, val:U) => List<U>, acc: List<T>, list: List<T>): U;
transduce<T, U>(xf: (arg: List<T>) => List<T>, fn: (acc: List<U>, val:U) => List<U>, acc: List<T>):{
(list: List<T>): U;
};
transduce<T, U>(xf: (arg: List<T>) => List<T>, fn: (acc: List<U>, val:U) => List<U>):{
(acc: List<T>, list: List<T>): U;
(acc: List<T>):{
(list: List<T>): U;
};
};
transduce<T>(xf: (arg: List<T>) => List<T>):{
<U>(fn: (acc: List<U>, val:U) => List<U>, acc: List<T>, list: List<T>): U;
<U>(fn: (acc: List<U>, val:U) => List<U>, acc: List<T>):{
(list: List<T>): U;
};
<U>(fn: (acc: List<U>, val:U) => List<U>):{
(acc: List<T>, list: List<T>): U;
(acc: List<T>):{
(list: List<T>): U;
};
};
};
/**
* Transposes the rows and columns of a 2D list. When passed a list of n lists of length x, returns a list of x lists of length n.
*/
transpose<T>(list: List<List<T>>): T[][];
transpose(list: List<List<any>>): any[][];
/**
* Maps an Applicative-returning function over a Traversable, then uses
* `sequence` to transform the resulting Traversable of Applicative into
* an Applicative of Traversable.
*/
// // common case of array as traversable:
// traverse<T, U>(ap: (v: T) => Applicative<T>, fn: (v: T) => Applicative<U>, traversable: List<T>): Applicative<Array<U>>;
// // traverse<T, U>(ap: (v: T) => Applicative<T>, fn: (v: T) => Applicative<U>): (traversable: List<T>) => Applicative<Array<U>>; // mix
// traverse<T, U>(ap: (v: T) => Applicative<T>): CurriedFunction2<(v: T) => Applicative<U>, List<T>, Applicative<Array<U>>>;
// // traverse<T, U>: CurriedFunction3<(v: T) => Applicative<T>, (v: T) => Applicative<U>, List<T>, Applicative<Array<U>>>;
// // general ADT case:
// traverse<T, U>(ap: (v: T) => Applicative<T>, fn: (v: T) => Applicative<U>, traversable: Traversable<T>): Applicative<Traversable<U>>;
// // traverse<T, U>(ap: (v: T) => Applicative<T>, fn: (v: T) => Applicative<U>): (traversable: Traversable<T>) => Applicative<Traversable<U>>; // mix
// traverse<T, U>(ap: (v: T) => Applicative<T>): CurriedFunction2<(v: T) => Applicative<U>, Traversable<T>, Applicative<Traversable<U>>>;
// // traverse<T, U>: CurriedFunction3<(v: T) => Applicative<T>, (v: T) => Applicative<U>, Traversable<T>, Applicative<Traversable<U>>>;
// // mixed:
// traverse<T, U>(ap: (v: T) => Applicative<T>, fn: (v: T) => Applicative<U>): {
// (traversable: List<T>): Applicative<Array<U>>;
// (traversable: Traversable<T>): Applicative<Traversable<U>>;
// };
// base
traverse<T, U>(ap: (v: T) => Applicative<T>, fn: (v: T) => Applicative<U>, traversable: List<T>): Applicative<Array<U>>;
traverse<T, U>(ap: (v: T) => Applicative<T>, fn: (v: T) => Applicative<U>):{
(traversable: List<T>): Applicative<Array<U>>;
};
traverse<T>(ap: (v: T) => Applicative<T>):{
<U>(fn: (v: T) => Applicative<U>, traversable: List<T>): Applicative<Array<U>>;
<U>(fn: (v: T) => Applicative<U>):{
(traversable: List<T>): Applicative<Array<U>>;
};
};
// general ADT case
traverse<T, U>(ap: (v: T) => Applicative<T>, fn: (v: T) => Applicative<U>, traversable: List<T>): Applicative<Traversable<U>>;
traverse<T, U>(ap: (v: T) => Applicative<T>, fn: (v: T) => Applicative<U>):{
(traversable: List<T>): Applicative<Traversable<U>>;
};
traverse<T>(ap: (v: T) => Applicative<T>):{
<U>(fn: (v: T) => Applicative<U>, traversable: List<T>): Applicative<Traversable<U>>;
<U>(fn: (v: T) => Applicative<U>):{
(traversable: List<T>): Applicative<Traversable<U>>;
};
};
/**
* Removes (strips) whitespace from both ends of the string.
*/
trim(str: string): string;
/**
* tryCatch takes two functions, a tryer and a catcher. The returned function evaluates the tryer; if it does
* not throw, it simply returns the result. If the tryer does throw, the returned function evaluates the catcher
* function and returns its result. Note that for effective composition with this function, both the tryer and
* catcher functions must return the same type of results.
*/
tryCatch<T>(tryer: Variadic<T>, catcher: Variadic<T>): Variadic<T>;
// tryCatch<T>: CurriedFunction2<Variadic<T>, Variadic<T>, Variadic<T>>;
/**
* Gives a single-word string description of the (native) type of a value, returning such answers as 'Object',
* 'Number', 'Array', or 'Null'. Does not attempt to distinguish user Object types any further, reporting them
* all as 'Object'.
*/
type(val: any): string;
/**
* Takes a function fn, which takes a single array argument, and returns a function which:
* - takes any number of positional arguments;
* - passes these arguments to fn as an array; and
* - returns the result.
* In other words, R.unapply derives a variadic function from a function which takes an array.
* R.unapply is the inverse of R.apply.
*/
unapply<T>(fn: (args: any[]) => T): Variadic<T>;
/**
* Wraps a function of any arity (including nullary) in a function that accepts exactly 1 parameter.
* Any extraneous parameters will not be passed to the supplied function.
*/
unary<T,U>(fn: (a: T, ...args: any[]) => U): (a: T) => U;
/**
* Returns a function of arity n from a (manually) curried function.
*/
uncurryN<T>(len: number, fn: (a: any) => any): Variadic<T>;
// uncurryN<T>: CurriedFunction2<number, (a: any) => any, Variadic<T>>;
/**
* Builds a list from a seed value. Accepts an iterator function, which returns either false
* to stop iteration or an array of length 2 containing the value to add to the resulting
* list and the seed to be used in the next call to the iterator function.
*/
unfold<T, TResult>(fn: (seed: T) => [TResult, T]|false, seed: T): TResult[];
unfold<T, TResult>(fn: (seed: T) => [TResult, T]|false): (seed: T) => TResult[];
// unfold<T, TResult>: CurriedFunction2<(seed: T) => TResult[]|boolean, T, TResult[]>;
/**
* Combines two lists into a set (i.e. no duplicates) composed of the
* elements of each list.
*/
union<T>(as: List<T>, bs: List<T>): T[];
union<T>(as: List<T>): (bs: List<T>) => T[];
// union<T>: CurriedFunction2<List<T>, List<T>, T[]>;
/**
* Combines two lists into a set (i.e. no duplicates) composed of the elements of each list. Duplication is
* determined according to the value returned by applying the supplied predicate to two list elements.
*/
// unionWith<T>(pred: (a: T, b: T) => boolean, list1: List<T>, list2: List<T>): T[];
// unionWith<T>(pred: (a: T, b: T) => boolean, list1: List<T>): (list2: List<T>) => T[];
// unionWith<T>(pred: (a: T, b: T) => boolean): CurriedFunction2<List<T>, List<T>, T[]>;
// // unionWith<T>: CurriedFunction3<(a: T, b: T) => boolean, List<T>, List<T>, T[]>;
// base
unionWith<T>(pred: (a: T, b: T) => boolean, list1: List<T>, list2: List<T>): T[];
unionWith<T>(pred: (a: T, b: T) => boolean, list1: List<T>):{
(list2: List<T>): T[];
};
unionWith<T>(pred: (a: T, b: T) => boolean):{
(list1: List<T>, list2: List<T>): T[];
(list1: List<T>):{
(list2: List<T>): T[];
};
};
/**
* Returns a new list containing only one copy of each element in the original list.
*/
uniq<T>(list: List<T>): T[];
/**
* Returns a new list containing only one copy of each element in the original list, based upon the value returned by applying the supplied function to each list element. Prefers the first item if the supplied function produces the same value on two items. R.equals is used for comparison.
*/
uniqBy<T,U>(fn: (a: T) => U, list: List<T>): T[];
uniqBy<T,U>(fn: (a: T) => U): (list: List<T>) => T[];
// uniqBy<T,U>: CurriedFunction2<(a: T) => U, List<T>, T[]>;
/**
* Returns a new list containing only one copy of each element in the original list, based upon the value
* returned by applying the supplied predicate to two list elements.
*/
uniqWith<T,U>(pred: (x: T, y: T) => boolean, list: List<T>): T[];
uniqWith<T,U>(pred: (x: T, y: T) => boolean): (list: List<T>) => T[];
// uniqWith<T,U>: CurriedFunction2<(x: T, y: T) => boolean, List<T>, T[]>;
/**
* Tests the final argument by passing it to the given predicate function. If the predicate is not satisfied,
* the function will return the result of calling the whenFalseFn function with the same argument. If the
* predicate is satisfied, the argument is returned as is.
*/
unless<T,U>(pred: Pred<T>, whenFalseFn: (a: T) => U, obj: T): U;
unless<T,U>(pred: Pred<T>, whenFalseFn: (a: T) => U): (obj: T) => U;
// unless<T,U>: CurriedFunction3<Pred<T>, (a: T) => U, T, U>;
/**
* Returns a new list by pulling every item at the first level of nesting out, and putting
* them in a new array.
*/
unnest<T>(x: List<List<T>>): T[];
unnest<T>(x: List<T>): T[];
/**
* Takes a predicate, a transformation function, and an initial value, and returns a value of the same type as
* the initial value. It does so by applying the transformation until the predicate is satisfied, at which point
* it returns the satisfactory value.
*/
// until<T,U>(pred: Pred<T>, fn: (val: T) => U, init: U): U;
// until<T,U>(pred: Pred<T>, fn: (val: T) => U): (init: U) => U;
// until<T,U>(pred: Pred<T>): CurriedFunction2<(val: T) => U, U, U>;
// // until<T,U>: CurriedFunction3<Pred<T>, (val: T) => U, U, U>;
// base
until<T, U>(pred: Pred<T>, fn: (val: T) => U, init: U): U;
until<T, U>(pred: Pred<T>, fn: (val: T) => U):{
(init: U): U;
};
until<T>(pred: Pred<T>):{
<U>(fn: (val: T) => U, init: U): U;
<U>(fn: (val: T) => U):{
(init: U): U;
};
};
/**
* Returns a new copy of the array with the element at the provided index replaced with the given value.
*/
// update<T>(index: number, value: T, list: List<T>): T[];
// update<T>(index: number, value: T): (list: List<T>) => T[];
// update<T>(index: number): CurriedFunction2<T,List<T>,T[]>;
// // update<T>: CurriedFunction3<number, T, List<T>, T[]>;
// base
update<T>(index: number, value: T, list: List<T>): T[];
update<T>(index: number, value: T):{
(list: List<T>): T[];
};
update(index: number):{
<T>(value: T, list: List<T>): T[];
<T>(value: T):{
(list: List<T>): T[];
};
};
/**
* Accepts a function fn and a list of transformer functions and returns a new curried function.
* When the new function is invoked, it calls the function fn with parameters consisting of the
* result of calling each supplied handler on successive arguments to the new function.
*
* If more arguments are passed to the returned function than transformer functions, those arguments
* are passed directly to fn as additional parameters. If you expect additional arguments that don't
* need to be transformed, although you can ignore them, it's best to pass an identity function so
* that the new function reports the correct arity.
*/
useWith<T>(fn: Variadic<T>, transformers: List<Function>): Variadic<T>;
useWith<T>(fn: Variadic<T>): (transformers: List<Function>) => Variadic<T>;
// useWith<T>: CurriedFunction2<Variadic<T>, List<Function>, Variadic<T>>;
/**
* Returns a list of all the enumerable own properties of the supplied object.
* Note that the order of the output array is not guaranteed across
* different JS platforms.
*/
values<T>(obj: Struct<T>): T[];
values<T>(obj: Object): any[];
/**
* Returns a list of all the properties, including prototype properties, of the supplied
* object. Note that the order of the output array is not guaranteed to be consistent across different JS platforms.
*/
valuesIn<T>(obj: Struct<T>): T[];
valuesIn(obj: Object): any[];
/**
* Returns a "view" of the given data structure, determined by the given lens. The lens's focus determines which
* portion of the data structure is visible.
*/
// hard to mix cuz different generics
// // key lens:
// view<T, K extends keyof T>(lens: KeyLens<T,K>, obj: T): T[K];
// view<T, K extends keyof T>(lens: KeyLens<T,K>): (obj: T) => T[K];
// // view<T, K extends keyof T>: CurriedFunction2<KeyLens<T,K>, T, T[K]>;
// regular lenses:
// smart approach, unreliable:
view<T,U>(lens: Lens<T,U>, obj: T): U;
view<T,U>(lens: Lens<T,U>): (obj: T) => U;
// view<T,U>: CurriedFunction2<Lens<T,U>, T, U>;
// lens with type manually set
view<T>(lens: ManualLens<T>, obj: Struct<any>): T;
view<T>(lens: ManualLens<T>): (obj: Struct<any>) => T;
// view<T>: CurriedFunction2<ManualLens<T>, Struct<any>, T>;
// unknown lens, manually supply return type. does this add to the above case?
view<T>(lens: UnknownLens, obj: Struct<any>): T;
view<T>(lens: UnknownLens): (obj: Struct<any>) => T;
// view<T>: CurriedFunction2<UnknownLens, Struct<any>, T>;
/**
* Tests the final argument by passing it to the given predicate function. If the predicate is satisfied, the function
* will return the result of calling the whenTrueFn function with the same argument. If the predicate is not satisfied,
* the argument is returned as is.
*/
// when<T,U>(pred: Pred<T>, whenTrueFn: (a: T) => U, obj: T): U;
// when<T,U>(pred: Pred<T>, whenTrueFn: (a: T) => U): (obj: T) => U;
// when<T,U>(pred: Pred<T>): CurriedFunction2<(a: T) => U, T, U>;
// // when<T,U>: CurriedFunction3<Pred<T>, (a: T) => U, T, U>;
// base
when<T, U>(pred: Pred<T>, whenTrueFn: (a: T) => U, obj: T): U;
when<T, U>(pred: Pred<T>, whenTrueFn: (a: T) => U):{
(obj: T): U;
};
when<T>(pred: Pred<T>):{
<U>(whenTrueFn: (a: T) => U, obj: T): U;
<U>(whenTrueFn: (a: T) => U):{
(obj: T): U;
};
};
/**
* Takes a spec object and a test object and returns true if the test satisfies the spec.
* Any property on the spec that is not a function is interpreted as an equality
* relation.
*
* If the spec has a property mapped to a function, then `where` evaluates the function, passing in
* the test object's value for the property in question, as well as the whole test object.
*
* `where` is well suited to declarativley expressing constraints for other functions, e.g.,
* `filter`, `find`, etc.
*/
// hard to mix cuz different generics
// // heterogeneous version
// where<T extends Obj<any>>(spec: { [P in keyof T]?: Pred<T[P]>; }, testObj: T): boolean;
// where<T extends Obj<any>>(spec: { [P in keyof T]?: Pred<T[P]>; }): (testObj: T) => boolean; // generics too early?
// // where<T extends Obj<any>>: CurriedFunction2<{ [P in keyof T]?: Pred<T[P]>; }, T, boolean>;
// homogeneous version
where<T>(spec: Obj<Pred<T>>, testObj: Obj<T>): boolean;
where<T>(spec: Obj<Pred<T>>): (testObj: Obj<T>) => boolean;
// where<T>: CurriedFunction2<Obj<Pred<T>>, Obj<T>, boolean>;
// DIY "fill in the type params yourself" version
where<T,U>(spec: T, testObj: U): boolean;
where<T>(spec: T): <U>(testObj: U) => boolean;
// where<T,U>: CurriedFunction2<T, U, boolean>;
/**
* Takes a spec object and a test object; returns true if the test satisfies the spec,
* false otherwise. An object satisfies the spec if, for each of the spec's own properties,
* accessing that property of the object gives the same value (in R.eq terms) as accessing
* that property of the spec.
*/
// hard to mix cuz different generics
// // heterogeneous version
// whereEq<T extends Obj<any>>(spec: Partial<T>, testObj: T): boolean;
// whereEq<T extends Obj<any>>(spec: Partial<T>): (testObj: T) => boolean;
// // whereEq<T extends Obj<any>>: CurriedFunction2<Partial<T>, T, boolean>;
// homogeneous version
whereEq<T>(spec: Obj<T>, testObj: Obj<T>): boolean;
whereEq<T>(spec: Obj<T>): (testObj: Obj<T>) => boolean;
// whereEq<T>: CurriedFunction2<Obj<T>, Obj<T>, boolean>;
// DIY "fill in the type params yourself" version
whereEq<T,U>(spec: T, testObj: U): boolean;
whereEq<T>(spec: T): <U>(testObj: U) => boolean;
// whereEq<T,U>: CurriedFunction2<T, U, boolean>;
/**
* Returns a new list without values in the first argument. R.equals is used to determine equality.
* Acts as a transducer if a transformer is given in list position.
*/
without<T>(list1: List<T>, list2: List<T>): T[];
without<T>(list1: List<T>): (list2: List<T>) => T[];
// without<T>: CurriedFunction2<List<T>, List<T>, T[]>;
/**
* Creates a new list out of the two supplied by creating each possible pair from the lists.
*/
xprod<K,V>(as: List<K>, bs: List<V>): KeyValuePair<K,V>[];
xprod<K>(as: List<K>): <V>(bs: List<V>) => KeyValuePair<K,V>[];
// xprod<K,V>: CurriedFunction2<List<K>, List<V>, KeyValuePair<K,V>[]>;
/**
* Creates a new list out of the two supplied by pairing up equally-positioned items from
* both lists. Note: `zip` is equivalent to `zipWith(function(a, b) { return [a, b] })`.
*/
zip<K,V>(list1: List<K>, list2: List<V>): KeyValuePair<K,V>[];
zip<K>(list1: List<K>): <V>(list2: List<V>) => KeyValuePair<K,V>[];
// zip<K,V>: CurriedFunction2<List<K>, List<V>, KeyValuePair<K,V>[]>;
/**
* Creates a new object out of a list of keys and a list of values.
*/
// TODO: Obj<T> as a return value is to specific, any seems to loose
zipObj<T>(keys: List<Prop>, values: List<T>): Obj<T>;
zipObj(keys: List<Prop>): <T>(values: List<T>) => Obj<T>;
// zipObj<T>: CurriedFunction2<List<Prop>, List<T>, Obj<T>>;
/**
* Creates a new list out of the two supplied by applying the function to each
* equally-positioned pair in the lists.
*/
zipWith<T, U, TResult>(fn: (x: T, y: U) => TResult, list1: List<T>, list2: List<U>): TResult[];
zipWith<T, U, TResult>(fn: (x: T, y: U) => TResult, list1: List<T>): (list2: List<U>) => TResult[];
zipWith<T, U, TResult>(fn: (x: T, y: U) => TResult): CurriedFunction2<List<T>, List<U>, TResult[]>;
// zipWith<T, U, TResult>: CurriedFunction3<(x: T, y: U) => TResult, List<T>, List<U>, TResult[]>;
}
}
console.log('Big file TS here.')
function path11<A extends string, B extends string, C extends string, D>
(path: [A, B, C],
d: {[K1 in A]: {[K2 in B]: {[K3 in C]: D}}}
): D
function path11(path: any, d: any): any {
let ret = d
for (let k of path) {
ret = ret[k]
}
} | the_stack |
import { AccessPermissionsContract } from '@nestjs-bff/global-contracts/lib/domain/access-permissions/access-permissions.contract';
import { IEntity } from '@nestjs-bff/global-contracts/lib/domain/core/entity.interface';
import * as _ from 'lodash';
import { Document, Model } from 'mongoose';
import { AuthCheckContract } from '../../../shared/authchecks/authcheck.contract';
import { CrudOperations } from '../../../shared/authchecks/crud-operations.enum';
import { ScopedEntityAuthCheck } from '../../../shared/authchecks/scoped-entity.authcheck';
import { CacheStore } from '../../../shared/caching/cache-store.shared';
import { CachingUtils } from '../../../shared/caching/caching.utils';
import { AppError } from '../../../shared/exceptions/app.exception';
import { LoggerSharedService } from '../../../shared/logging/logger.shared.service';
import { ClassValidator } from '../validators/class-validator';
export interface IBaseRepoParams<TEntity extends IEntity> {
loggerService: LoggerSharedService;
model: Model<Document & TEntity>;
cacheStore: CacheStore;
defaultTTL: number;
entityValidator: ClassValidator<TEntity>;
entityAuthChecker?: AuthCheckContract<IEntity, CrudOperations>;
}
/**
* Base repo query repository
*
* Notes:
* - By default will try to validate that org and user filtering in in place, unless overridden with params
* - FindAll can be achieved with find, passing no conditions
*/
export abstract class BaseRepo<TEntity extends IEntity> {
private readonly name: string;
protected readonly loggerService: LoggerSharedService;
protected readonly model: Model<Document & TEntity>;
protected readonly cacheStore: CacheStore;
protected readonly defaultTTL: number;
public readonly modelName: string;
public readonly entityValidator: ClassValidator<TEntity>;
public readonly entityAuthChecker: AuthCheckContract<IEntity, CrudOperations>;
constructor(params: IBaseRepoParams<TEntity>) {
this.loggerService = params.loggerService;
this.model = params.model;
this.name = `RepoBase<${this.model.modelName}>`;
this.modelName = this.model.modelName;
this.cacheStore = params.cacheStore;
this.defaultTTL = params.defaultTTL;
this.entityValidator = params.entityValidator;
this.entityAuthChecker = params.entityAuthChecker || new ScopedEntityAuthCheck();
}
//
// findOne
//
public async findById(
id: string,
options?: {
accessPermissions?: AccessPermissionsContract;
skipAuthorization?: boolean;
skipCache?: boolean;
ttl?: number;
},
): Promise<TEntity> {
const result = await this.tryFindById(id, options);
// validate not null
if (!result) throw new AppError(`Could not find entity ${this.name} with id ${id}`, options);
// Return
return result;
}
//
// tryFindOne
//
public async tryFindById(
id: string,
options?: {
accessPermissions?: AccessPermissionsContract;
skipAuthorization?: boolean;
skipCache?: boolean;
ttl?: number;
},
): Promise<TEntity | null> {
// debug logging
this.loggerService.debug(`${this.name}.findOneById`, id, options);
// Setup
let key: string | undefined;
let result: TEntity | null;
let cachedResult: TEntity | null | undefined;
options = options || {}; // ensure options is not null
// cache access
if (!options.skipCache === true) {
key = CachingUtils.makeCacheKeyFromId(id);
cachedResult = await this.cacheStore.get<TEntity>(key);
}
if (cachedResult) {
result = cachedResult;
} else {
// data store access
result = await this._dbFindById(id);
}
// cache population
if (!options.skipCache === true && result && !cachedResult) {
// tslint:disable-next-line:no-non-null-assertion
this.cacheStore.set(key!, result, { ttl: options.ttl || this.defaultTTL });
}
// authorization checks
if (!options.skipAuthorization && result) {
await this.entityAuthChecker.ensureAuthorized({
accessPermissions: options.accessPermissions,
origin: this.name,
targetResource: result,
operation: CrudOperations.read,
});
}
// Return
return result;
}
//
// findOne
//
public async findOne(
conditions: object,
options?: {
accessPermissions?: AccessPermissionsContract;
skipAuthorization?: boolean;
skipCache?: boolean;
ttl?: number;
},
): Promise<TEntity> {
const result = await this.tryFindOne(conditions, options);
// validate not null
if (!result) throw new AppError(`Could not find entity ${this.name} with conditions ${JSON.stringify(conditions)}`, options);
// Return
return result;
}
//
// tryFindOne
//
public async tryFindOne(
conditions: object,
options?: {
accessPermissions?: AccessPermissionsContract;
skipAuthorization?: boolean;
skipCache?: boolean;
ttl?: number;
},
): Promise<TEntity | null> {
// debug logging
this.loggerService.debug(`${this.name}.findOne`, conditions, options);
// Setup
let key: string | undefined;
let result: TEntity | null;
let cachedResult: TEntity | null | undefined;
options = options || {}; // ensure options is not null
// cache access
if (!options.skipCache === true) {
key = CachingUtils.makeCacheKeyFromObject(conditions);
cachedResult = await this.cacheStore.get<TEntity>(key);
}
if (cachedResult) {
result = cachedResult;
} else {
// data store access
result = await this._dbFindOne(conditions);
}
// cache population
if (!options.skipCache === true && result && !cachedResult) {
// tslint:disable-next-line:no-non-null-assertion
this.cacheStore.set(key!, result, { ttl: options.ttl || this.defaultTTL });
}
// authorization checks
if (!options.skipAuthorization && result) {
await this.entityAuthChecker.ensureAuthorized({
accessPermissions: options.accessPermissions,
origin: this.name,
targetResource: result,
operation: CrudOperations.read,
});
}
// Return
return result;
}
//
// find
//
public async find(
conditions: object,
options?: {
accessPermissions?: AccessPermissionsContract;
skipAuthorization?: boolean;
skipCache?: boolean;
ttl?: number;
},
): Promise<TEntity[]> {
// debug logging
this.loggerService.debug(`${this.name}.find`, conditions, options);
// setup
let key: string | undefined;
let result: TEntity[] | null;
let cachedResult: TEntity[] | null | undefined;
options = options || {}; // ensure options is not null
// cache access
if (!options.skipCache === true) {
key = CachingUtils.makeCacheKeyFromObject(conditions);
cachedResult = await this.cacheStore.get<TEntity[]>(key);
}
if (cachedResult) {
result = cachedResult;
} else {
// data store access
result = await this._dbFind(conditions);
}
// cache population
if (!options.skipCache === true && result && !cachedResult) {
// tslint:disable-next-line:no-non-null-assertion
this.cacheStore.set(key!, result, { ttl: options.ttl || this.defaultTTL });
}
// authorization checks
if (!options.skipAuthorization && result) {
for (const entity of result) {
if (!options.skipAuthorization) {
await this.entityAuthChecker.ensureAuthorized({
accessPermissions: options.accessPermissions,
origin: this.name,
targetResource: entity,
operation: CrudOperations.read,
});
}
}
}
// return
return result;
}
//
// create
//
public async create(
newEntity: Partial<TEntity>,
options?: { accessPermissions?: AccessPermissionsContract; skipAuthorization?: boolean; customValidator?: ClassValidator<TEntity> },
): Promise<TEntity> {
// debug logging
this.loggerService.debug(`${this.name}.create`, newEntity, options);
// setup
options = options || {}; // ensure options is not null
const validator = options.customValidator || this.entityValidator;
// validation
validator.validate(newEntity);
// authorization checks
if (!options.skipAuthorization) {
await this.entityAuthChecker.ensureAuthorized({
accessPermissions: options.accessPermissions,
origin: this.name,
targetResource: newEntity,
operation: CrudOperations.create,
});
}
// transfer values to the model
const createModel: Document & TEntity = new this.model();
Object.assign(createModel, newEntity);
// persist
return this._dbSave(createModel);
}
//
// patch
//
public async patch(
patchEntity: Partial<TEntity>,
options?: { accessPermissions?: AccessPermissionsContract; skipAuthorization?: boolean; customValidator?: ClassValidator<TEntity> },
): Promise<TEntity> {
// debug logging
this.loggerService.debug(`${this.name}.patch`, patchEntity, options);
// setup
options = options || {}; // ensure options is not null
const validator = options.customValidator || this.entityValidator;
// partial validation
validator.validate(patchEntity);
// fetch entity
let fullModel = await this._dbFindById(patchEntity.id);
if (!fullModel) throw new AppError(`No ${this.modelName} found with id ${patchEntity.id}`);
// merge values
fullModel = _.merge(fullModel, patchEntity);
// full validation
await this.entityValidator.validate(fullModel);
// authorization checks
if (!options.skipAuthorization) {
await this.entityAuthChecker.ensureAuthorized({
accessPermissions: options.accessPermissions,
origin: this.name,
targetResource: fullModel,
operation: CrudOperations.update,
});
}
// persist
const savedFullModel = await this._dbSave(fullModel);
// clear cache
this.clearCacheByEntity(fullModel);
return savedFullModel;
}
//
// update
//
public async update(
entity: TEntity,
options?: { accessPermissions?: AccessPermissionsContract; skipAuthorization?: boolean; customValidator?: ClassValidator<TEntity> },
): Promise<TEntity> {
// debug logging
this.loggerService.debug(`${this.name}.update`, entity, options);
// setup
options = options || {}; // ensure options is not null
const validator = options.customValidator || this.entityValidator;
// validation
await validator.validate(entity, { skipMissingProperties: false });
// authorization checks
if (!options.skipAuthorization) {
await this.entityAuthChecker.ensureAuthorized({
accessPermissions: options.accessPermissions,
origin: this.name,
targetResource: entity,
operation: CrudOperations.update,
});
}
// persist
const savedReplacedModel = await this._dbFindOneAndReplace(entity);
// clear cache
this.clearCacheByEntity(savedReplacedModel);
return savedReplacedModel;
}
//
// delete
//
public async delete(id: string, options?: { accessPermissions?: AccessPermissionsContract; skipAuthorization?: boolean }): Promise<TEntity | undefined> {
// debug logging
this.loggerService.debug(`${this.name}.delete`, id, options);
// setup
let deletedEntity;
options = options || {}; // ensure options is not null
// retrieve
const deleteModel = await this._dbFindById(id);
if (!deleteModel) throw new AppError(`No ${this.modelName} found with id ${id}`);
// authorization checks
if (!options.skipAuthorization && deleteModel) {
await this.entityAuthChecker.ensureAuthorized({
accessPermissions: options.accessPermissions,
origin: this.name,
targetResource: deleteModel,
operation: CrudOperations.delete,
});
}
if (deleteModel) {
// persist deletion
deletedEntity = await this._dbRemove(deleteModel);
// clear cache
this.clearCacheByEntity(deletedEntity);
}
return deletedEntity;
}
//
// utility methods
//
protected async clearCacheByEntity(entity: TEntity, options?: { customValidator?: ClassValidator<TEntity> }) {
// setup
options = options || {}; // ensure options is not null
const validator = options.customValidator || this.entityValidator;
// validation
await validator.validate(entity);
// clear by ID
this.clearCacheByKey(CachingUtils.makeCacheKeyFromId(entity.id));
// clear by query conditions
this.generateValidQueryConditionsForCacheClear(entity).forEach(cacheClearEntity => {
this.clearCacheByKey(CachingUtils.makeCacheKeyFromObject(cacheClearEntity));
});
}
protected clearCacheByKey(cacheKey: string) {
if (cacheKey.trim.length > 0) throw new AppError('cacheKey can not be null or whitespace');
return this.cacheStore.del(cacheKey);
}
protected abstract generateValidQueryConditionsForCacheClear(entity: TEntity): object[];
//
// Abstracted Mongoose calls, to allow for easier testing through mocked mongoose calls
//
protected async _dbFindOne(conditions: object) {
this.loggerService.debug(`${this.name}._dbFindOne`, conditions);
const result = await this.model.findOne(conditions).exec();
return result;
}
protected async _dbFind(conditions: object): Promise<Array<Document & TEntity>> {
this.loggerService.debug(`${this.name}._dbFind`, conditions);
return this.model.find(conditions).exec();
}
protected async _dbSave(createModel: Document & TEntity): Promise<Document & TEntity> {
this.loggerService.debug(`${this.name}._dbSave`, createModel);
return createModel.save();
}
protected async _dbRemove(deleteModel: Document & TEntity): Promise<Document & TEntity> {
this.loggerService.debug(`${this.name}._dbRemove`, deleteModel);
return deleteModel.remove();
}
protected async _dbFindById(id: any): Promise<Document & TEntity | null> {
this.loggerService.debug(`${this.name}._dbFindById`, id);
return this.model.findById(id).exec();
}
protected async _dbFindOneAndReplace(entity: Partial<TEntity>) {
this.loggerService.debug(`${this.name}._dbFindOneAndReplace`, entity);
const result = await this.model.collection.findOneAndReplace({ id: entity.id }, entity, { returnOriginal: false });
return result.value;
}
} | the_stack |
import * as crypto from 'crypto';
import * as retry from 'p-retry';
import * as zlib from 'zlib';
import {handler as dockerAuth} from './auth/dockerio';
import {GCRAuthOptions, handler as gcrAuth} from './auth/gcr';
import {DockerAuthResult, DockerCredentialHelpers} from './credentials-helper';
import {ImageLocation, parse as parseSpecifier} from './image-specifier';
import * as packer from './packer';
import {pending, PendingTracker} from './pending';
import {ImageConfig, ManifestV2, RegistryClient} from './registry';
// expose plain registry client.
export {RegistryClient} from './registry';
export type ImageOptions = {
auth?: AuthConfig,
// sync?: false|undefined
};
export type ImageData = {
manifest: ManifestV2,
config: ImageConfig
};
export class Image {
private options: ImageOptions;
private image: ImageLocation;
// where to save the image and where to upload blobs
private targetImage: ImageLocation;
// the manifest and config for the source image
private imageData: ImageData|false;
private originalManifest?: ManifestV2;
private clients: {[k: string]: Promise<RegistryClient>} = {};
private pending: PendingTracker;
private syncedBaseImage = false;
// convenience properties. these values get updated in the image config when
// saving. these overwrite values updated manually in ImageData.config.config
// tslint:disable-next-line:variable-name
WorkingDir?: string;
// tslint:disable-next-line:variable-name
Cmd?: string[];
// tslint:disable-next-line:variable-name
Env?: string[];
// queue all pending actions so i can Promise.all them in save before saving
// manifest
constructor(
imageSpecifier: string, targetImage?: string|ImageOptions,
options?: ImageOptions) {
this.options = options || {};
if (typeof targetImage !== 'string') {
this.options = this.options || targetImage;
targetImage = undefined;
}
this.image = parseSpecifier(imageSpecifier);
this.targetImage = parseSpecifier(targetImage || imageSpecifier);
this.pending = pending();
// setup default client.
// if the to and from match host and namespace we'll make one write client.
const readOnly =
this.authKey(this.image) !== this.authKey(this.targetImage);
// optimistic client loading. if it error's the user will be able to get the
// error the next time they grab a client or try to save.
this.client(this.image, !readOnly)
.catch(
() => {
// we can ignore the unhandled rejection here because we cache
// this promise and return it when they try to get the client.
});
this.imageData = false;
}
// returns the part of the config object you care about for things like
// entrypoint and env.
async getImageConfig() {
const imageData = await this.getImageData();
return imageData.config.config;
}
async addLayer(
digest: string, uncompressedDigest: string, size: number,
urls?: string[]) {
const imageData = await this.getImageData();
let layerMediaType = 'application/vnd.oci.image.layer.v1.tar+gzip';
if (imageData.manifest.mediaType.indexOf('docker') > -1) {
layerMediaType = 'application/vnd.docker.image.rootfs.diff.tar.gzip';
}
const layerResult = {mediaType: layerMediaType, digest, size, urls};
imageData.manifest.layers.push(layerResult);
imageData.config.rootfs.diff_ids.push(uncompressedDigest);
return Object.assign({}, layerResult, {uncompressedDigest});
}
async removeLayer(digest: string) {
// edit the config and manifest returned from getImageData
const imageData = await this.getImageData();
const layers = imageData.manifest.layers;
let found: number|undefined;
layers.forEach((layerData, i) => {
if (layerData.digest === digest) {
found = i;
}
});
if (found !== undefined) {
layers.splice(found, 1);
imageData.config.rootfs.diff_ids.splice(found, 1);
return true;
}
return false;
}
// "./myfiles", "/workspace"
// "localDirectory", "imageDirectory"
addFiles(
dir: string|{[dir: string]: string},
targetDir?: string|packer.PackOptions,
options?: packer.PackOptions): Promise<{
mediaType: string,
digest: string,
size: number,
uncompressedDigest: string
}> {
// dir,target,options
// dir,options
// {dir:target,....},options
if (typeof targetDir === 'string') {
if (typeof dir !== 'string') {
// addFiles({"apples":"oranges"},"pears")
throw new Error(
'specifying a target directory name when the dir is an object of name:target doesn\'t make sense. try addFiles({dir:target})');
}
dir = {[targetDir]: dir};
} else if (targetDir) {
// options!
options = targetDir;
}
// have to wrap in promise because the tar stream can emit error out of band
let p = new Promise(async (resolve, reject) => {
const tarStream = packer.pack(dir, options);
tarStream.on('error', (e: Error) => reject(e));
const gzip = zlib.createGzip();
const uncompressedHash = crypto.createHash('sha256');
tarStream.on('data', (buf: Buffer) => {
uncompressedHash.update(buf);
});
tarStream.pipe(gzip);
const client = await this.client(this.targetImage, true);
const result = await client.upload(gzip);
const uncompressedDigest = 'sha256:' + uncompressedHash.digest('hex');
resolve(await this.addLayer(
result.digest, uncompressedDigest, result.contentLength));
});
p = this.pending.track(p);
return p as Promise<{
mediaType: string; digest: string; size: number;
urls: string[] | undefined;
uncompressedDigest: string;
}>;
}
async getImageData() {
if (!this.imageData) {
this.imageData = await this.loadImageData();
this.originalManifest =
JSON.parse(JSON.stringify(this.imageData.manifest));
}
return this.imageData;
}
async loadImageData(image?: ImageLocation) {
image = (image ? image : this.image);
const client = await this.client(image);
const manifest = await client.manifest(image.tag || 'latest');
const configBlob = await client.blob(manifest.config.digest) + '';
const config = JSON.parse(configBlob) as ImageConfig;
return {manifest, config};
}
client(_image?: ImageLocation|string, write?: boolean) {
let image: ImageLocation;
if (typeof _image === 'string') {
image = parseSpecifier(_image);
} else {
// typescript!!!
image = _image as ImageLocation;
}
image = (image ? image : this.image);
const scope = write ? 'push,pull' : 'pull';
let key = [image.registry, image.namespace, image.image].join(',');
const writeKey = key + ',push,pull';
const readKey = key + ',pull';
// default to most permissive cached client even if it doesn't match scope.
if (this.clients[writeKey]) {
return this.clients[writeKey];
} else if (!write && this.clients[readKey]) {
return this.clients[readKey];
}
key += ',' + scope;
const promiseOfClient =
auth(image, scope, this.options.auth || {}).then((registryAuth) => {
const registryClient = new RegistryClient(
image!.registry, this.nameSpacedImageName(image), registryAuth);
return registryClient;
});
this.clients[key] = promiseOfClient;
return promiseOfClient;
}
async save(tags?: string[], options?: SyncOptions&{
Env?: string[],
Cmd?: string[],
WorkingDir?: string
}) {
const targetImage = this.targetImage;
const client = await this.client(targetImage, true);
const imageData = await this.getImageData();
tags = tags || [targetImage.tag || 'latest'];
options = options || {};
await this.syncBaseImage(options);
await Promise.all(this.pending.active());
if (options.Cmd || this.Cmd) {
imageData.config.config.Cmd = options.Cmd || this.Cmd!;
}
if (options.Env || this.Env) {
imageData.config.config.Env =
imageData.config.config.Env.concat(options.Env || this.Env || []);
}
if (options.WorkingDir || this.WorkingDir) {
imageData.config.config.WorkingDir =
options.WorkingDir || this.WorkingDir!;
}
// are all layers done uploading?
const uploadResult =
await client.upload(Buffer.from(JSON.stringify(imageData.config)));
imageData.manifest.config.digest = uploadResult.digest;
imageData.manifest.config.size = uploadResult.contentLength;
// put the manifest once per tag
return Promise
.all(tags.filter((v) => !!v).map((tag) => {
return client.manifestUpload(
encodeURIComponent(tag), imageData.manifest);
}))
.then((results) => {
return results[0];
});
}
private authKey(image: ImageLocation) {
if (image.registry.indexOf('gcr.io')) {
// should use same auth if in same google cloud project.
return [image.registry, image.namespace].join(',');
}
return [image.registry, image.namespace, image.image].join(',');
}
// verify that every layer is in the target and copy missing layers from base
// to target
async sync(options?: SyncOptions) {
options =
Object.assign({copyRemoteLayers: true, ignoreExists: false}, options);
// ensure image data has been loaded.
await this.getImageData();
// use base manifest for sync
const manifest = this.originalManifest;
if (!manifest) {
throw new Error(
'get image data failed to populate originalManifest somehow.');
}
const client = await this.client(this.image);
const targetClient = await this.client(this.targetImage, true);
// check that every layer in source image is present in target registry
const copies: Array<Promise<{}>> = [];
manifest.layers.forEach((layer) => {
// note: support *not* copying nondistributable layers by default once
// it's supported by gcr.
if (!options!.copyRemoteLayers && layer.urls) {
return;
}
const p = targetClient.blobExists(layer.digest).then((exists) => {
if (!exists || options!.ignoreExists) {
// TODO if they are the same registry but different namespaces try
// mount first.
const action = () => {
return new Promise(async (resolve, reject) => {
const stream = await client.blob(layer.digest, true);
// if the stream ends without the correct byte length
let bytes = 0;
stream.on('data', (b) => {
bytes += b.length;
});
stream.on('end', () => {
if (bytes !== layer.size) {
reject(Error(
'failed to get all of the bytes from the blob stream.'));
}
});
stream.on('error', (err: Error) => {
reject(err);
});
try {
resolve(await targetClient.upload(
stream, layer.size, layer.digest));
} catch (e) {
console.error(
'error syncing layer ' + layer.digest +
' to target registry:\n' + e);
reject(e);
}
});
};
return retry(action, {retries: 3}).then(() => true);
}
return true;
});
copies.push(p);
});
const all = Promise.all(copies);
this.pending.track(all);
return all;
}
private async syncBaseImage(options?: SyncOptions) {
const sameRegistry = this.image.registry === this.targetImage.registry;
const sameProject = this.image.namespace === this.targetImage.namespace;
if (sameRegistry && sameProject) {
return;
}
if (this.syncedBaseImage) {
return;
}
this.syncedBaseImage = true;
return this.sync(options);
}
private nameSpacedImageName(image?: ImageLocation) {
image = (image ? image : this.image);
return (image.namespace ? image.namespace + '/' : '') + image.image;
}
}
// the ordinary auth mechanism that might work on any docker registry is not
// implemented. https://docs.docker.com/registry/spec/auth/token/
export const auth = async (
imageArg: ImageLocation|string, scope: string, options?: AuthConfig) => {
// todo: distinguish better between when we should try creds helpers vs only
// built in.
let image: ImageLocation;
if (typeof imageArg === 'string') {
image = parseSpecifier(imageArg);
} else {
image = imageArg;
}
try {
if (image.registry.indexOf('gcr.io') > -1) {
return await gcrAuth(
image, scope,
options ? options[image.registry + '/' + image.namespace] ||
options['registry'] || options['gcr.io'] || {} :
{});
} else if (image.registry.indexOf('docker.io') > -1) {
// dockerhub requires you log in every 5 minutes.
// we'll always try to get a new token for the user with the provided auth
// data
return await dockerAuth(
image, scope, options ? options['docker.io'] : undefined);
}
} catch (e) {
console.error(
'gcr or docker.io auth threw.\n' + e +
'\n falling back to cred helpers.');
}
// if the user provided auth options
if (options) {
const checked = ['gcr.io', 'docker.io'];
let providedAuth: DockerAuthResult|undefined;
Object.keys(options).forEach((s) => {
if (!checked.includes(s)) {
if (image.registry.indexOf(s) > -1) {
// we have auth provided directly for this registry.
providedAuth = options[s];
}
}
});
// if we dont have an auth helper we assume the user has passed valid
// credentials.
if (providedAuth) {
return providedAuth;
}
}
const credHelpers = new DockerCredentialHelpers();
const res = await credHelpers.auth(image.registry);
return res;
};
export const pack = packer.pack;
// expose CustomFile to pass in image.addFiles
export const CustomFile = packer.CustomFile;
export interface AuthConfig {
'gcr.io'?: GCRAuthOptions;
// tslint:disable-next-line:no-any
'docker.io'?: any;
// tslint:disable-next-line:no-any
[k: string]: DockerAuthResult|any;
}
export interface SyncOptions {
copyRemoteLayers?: boolean;
ignoreExists?: boolean;
} | the_stack |
import { setupController, views, actionChartView, state, ActionChartItem, SectionItem, EquipmentSectionMechanics, translations, template, mechanicsEngine, Item, SpecialObjectsUse, CombatMechanics, Bonus, InventoryState } from "..";
/**
* The action chart controller
*/
export const actionChartController = {
/**
* Render the action chart
*/
index() {
if (!setupController.checkBook()) {
return;
}
views.loadView("actionChart.html")
.then(() => {
actionChartView.fill(state.actionChart);
template.addSectionReadyMarker();
});
},
/**
* Pick an object by its id
* @param objectId The object id to pick
* @param showError True we should show a toast if the player
* cannot pick the object
* @param fromUITable True if we are picking the object from the UI
* @return True if the object has been get. False if the object cannot be get
*/
pick(objectId: string, showError: boolean = false, fromUITable: boolean = false): boolean {
// TODO: Check if fromUITable here should be always false...
return actionChartController.pickActionChartItem(new ActionChartItem(objectId), showError, fromUITable);
},
/**
* Pick an object from the user interface
* @param sectionItem The object to pick
* @return True if the object has been get. False if the object cannot be get
*/
pickFromUi(sectionItem: SectionItem): boolean {
const aChartItem = new ActionChartItem(sectionItem.id, sectionItem.usageCount);
return actionChartController.pickActionChartItem(aChartItem, true, true);
},
/**
* Pick an object
* @param aChartItem The object to pick
* @param showError True we should show a toast if the player
* cannot pick the object
* @param fromUITable True if we are picking the object from the UI
* @return True if the object has been get. False if the object cannot be get
*/
pickActionChartItem(aChartItem: ActionChartItem, showError: boolean = false, fromUITable: boolean = false): boolean {
try {
// Get object info
const o = aChartItem.getItem();
if (o === null) {
return false;
}
// Check if the section has restrictions about picking objects
// This will throw an exception if no more objects can be picked
if (fromUITable) {
EquipmentSectionMechanics.checkMoreObjectsCanBePicked(aChartItem.id);
}
// Try to pick the object
if (!state.actionChart.pick(aChartItem)) {
return false;
}
// Show toast
actionChartView.showInventoryMsg("pick", o, translations.text("msgGetObject", [o.name]));
// Update player statistics (for objects with effects)
actionChartView.updateStatistics();
template.updateStatistics();
return true;
} catch (e) {
// Error picking
if (showError) {
toastr.error(e);
}
console.log(e); // This is not really an application error, so do not call mechanicsEngine.debugWarning()
return false;
}
},
/**
* The player pick a set of objects
* @param arrayOfItems Array with object to pick
*/
pickItemsList(arrayOfItems: ActionChartItem[]) {
let renderAvailableObjects = false;
const sectionState = state.sectionStates.getSectionState();
for (const item of arrayOfItems) {
if (!actionChartController.pickActionChartItem(item, true, false)) {
// Object cannot be picked. Add the object as available on the current section
sectionState.addActionChartItemToSection(item);
renderAvailableObjects = true;
}
}
if (renderAvailableObjects) {
// Render available objects on this section (game view)
mechanicsEngine.fireInventoryEvents();
}
},
/**
* Drop an object
* @param objectId The object to drop,
* or "allweapons" to drop all weapons (it does not drop special items weapons),
* or "allweaponlike" to drop all weapons and special items weapons
* or "backpackcontent" to drop all backpack content, but not the backpack
* or "currentweapon" to drop the current weapon,
* or "allspecial" to drop all the special items
* or "allspecialgrdmaster" to drop all the special items except the ones allowed when beginning Grand Master serie
* or "allmeals" to drop all meals
* or "all" to drop all (weapons, backpack, special items, and money)
* or "allobjects" to drop all objects (weapons, backpack content, special items)
* @param availableOnSection Only applies if objectId is really an object id. True if the object should be available on
* the current section
* @param fromUI True if the action is fired from the UI
* @param arrowsCount Object count (only for quivers. count === n. arrows to drop)
* @param objectIndex Only applies if objectId is an object id. If specified, object index in the Action Chart object
* array to drop. If it's not specified the first object with the given objectId will be dropped (there can be more than one
* item with the same id)
* @returns If objectId was an really object id and the object was deleted, it returns the delete object info.
* Otherwise, it returns true if something was deleted, or false if not
*/
drop(objectId: string, availableOnSection: boolean = false, fromUI: boolean = false, arrowsCount: number = 0,
objectIndex: number = -1): boolean|ActionChartItem {
if (objectId === "allweapons") {
actionChartController.dropItemsList(state.actionChart.getWeaponsIds());
return true;
}
if (objectId === "currentweapon") {
const selectedWeapon = state.actionChart.getSelectedWeapon();
if (selectedWeapon) {
this.drop(selectedWeapon);
}
return true;
}
if (objectId === "allweaponlike") {
const weaponsIds = [];
for (const w of state.actionChart.getWeaponObjects(false)) {
weaponsIds.push(w.id);
}
actionChartController.dropItemsList(weaponsIds);
return true;
}
if (objectId === "backpackcontent") {
actionChartController.dropBackpackContent();
return true;
}
if (objectId === "allspecial") {
actionChartController.dropItemsList(state.actionChart.getSpecialItemsIds());
return true;
}
if (objectId === "allspecialgrdmaster") {
actionChartController.dropItemsList(state.actionChart.getSpecialItemsIds().filter((itemId) => {
return !Item.ALLOWED_GRAND_MASTER.contains(itemId);
}));
return true;
}
if (objectId === "allmeals") {
actionChartController.increaseMeals(-state.actionChart.meals);
return true;
}
if (objectId === "all" || objectId === "allobjects") {
if (objectId === "all") {
actionChartController.drop("backpack");
actionChartController.increaseMoney(- state.actionChart.beltPouch);
} else {
// objectId === 'allobjects' => Backpack content, but not the backpack itself
actionChartController.drop("backpackcontent");
}
actionChartController.drop("allweapons");
actionChartController.drop("allspecial");
return true;
}
// TODO: o can be removed, and use droppedItem.getItem() as replacement
const o = state.mechanics.getObject(objectId);
if (!o) {
return false;
}
const droppedItem = state.actionChart.drop(objectId, arrowsCount, objectIndex);
if (droppedItem) {
actionChartView.showInventoryMsg("drop", o, translations.text("msgDropObject", [o.name]));
// Update the action chart view
actionChartView.updateObjectsLists();
// Update player statistics (for objects with effects)
actionChartView.updateStatistics();
template.updateStatistics();
if (availableOnSection) {
// Add the droped object as available on the current section
const sectionState = state.sectionStates.getSectionState();
sectionState.addActionChartItemToSection(droppedItem, arrowsCount);
// Render available objects on this section (game view)
mechanicsEngine.fireInventoryEvents(fromUI, o);
}
return droppedItem;
} else {
return false;
}
},
/**
* Drop all backpack content
*/
dropBackpackContent() {
actionChartController.increaseMeals(-state.actionChart.meals);
actionChartController.dropItemsList(state.actionChart.getBackpackItemsIds());
},
/**
* Drop an array of objects
* @param arrayOfItems Array of the objects ids to drop.
*/
dropItemsList(arrayOfItems: string[]) {
// arrayOfItems can be a reference to a state.actionChart member, so don't
// traverse it as is, or we will lose elements
const elementsToDrop = arrayOfItems.clone();
for (const objectId of elementsToDrop) {
actionChartController.drop(objectId, false, false);
}
},
/**
* Drop a set of objects by its index
* @param arrayOfItems Source array of objects
* @param indices Indices to arrayOfItems of objects to drop. IT MUST NOT CONTAIN DUPLICATED INDICES !!!
* @returns Dropped objects
*/
dropItemIndicesList(arrayOfItems: ActionChartItem[], indices: number[]): ActionChartItem[] {
// We will delete objects one by one. To be sure indices still valid, delete in descending orde
indices = indices.clone();
indices.sort();
indices.reverse();
// Drop objects
const droppedItems: ActionChartItem[] = [];
for (const index of indices) {
if (index < 0 || index >= arrayOfItems.length) {
continue;
}
const item = arrayOfItems[index];
if (actionChartController.drop(item.id, false, false, 0, index)) {
droppedItems.push(item);
}
}
return droppedItems;
},
/**
* Use an object
* @param objectId The object to use
* @param dropObject True if the object should be droped from the action chart
* @param index If used object was a owned object, this is the object index in its Action Chart array. If not specified
* or < 0, the first owned object will be used
* @param displayToast True if a message must to be displayed
*/
use(objectId: string, dropObject: boolean = true, index: number = -1, displayToast = false) {
// Get the object
const o = state.mechanics.getObject(objectId);
if (!o) {
return;
}
if (o.usage) {
// Do the usage action:
if (o.usage.cls === Item.ENDURANCE) {
actionChartController.increaseEndurance(o.usage.increment);
} else if (o.usage.cls === Item.COMBATSKILL) {
// Combat skill modifiers only apply to the current section combats
const sectionState = state.sectionStates.getSectionState();
sectionState.combatSkillUsageModifier(o.usage.increment);
} else if (o.usage.cls === "special") {
// Special usage
SpecialObjectsUse.use(o);
}
}
if (displayToast) {
toastr.info(translations.text("objectUsed", [o.name]));
}
// Update player statistics
actionChartView.updateStatistics();
template.updateStatistics();
// Owned object to drop?
if (dropObject) {
// Decrease the usageCount. If there are no more uses, drop the object
const aChartItem = state.actionChart.getActionChartItem(objectId, index);
if (aChartItem) {
// Be sure usageCount is not null
if (!aChartItem.usageCount) {
aChartItem.usageCount = 0;
}
aChartItem.usageCount--;
if (aChartItem.usageCount <= 0) {
actionChartController.drop(objectId, false, false, 0, index);
} else {
actionChartView.updateObjectsLists();
}
}
}
// Fire mechanics rules
mechanicsEngine.fireObjectUsed(objectId);
},
/**
* Increase / decrease the meals number
* @param count Number to increase. Negative to decrease
*/
increaseMeals(count: number) {
try {
state.actionChart.increaseMeals(count);
const o = state.mechanics.getObject("meal");
if (count > 0) {
actionChartView.showInventoryMsg("pick", o,
translations.text("msgGetMeal", [count]));
} else if (count < 0) {
actionChartView.showInventoryMsg("drop", o,
translations.text("msgDropMeal", [-count]));
}
} catch (e) {
toastr.error(e);
}
},
/**
* Increase / decrease the money counter
* @param count Number to increase. Negative to decrease
* @param availableOnSection The dropped money should be available on the current section? Only applies if count < 0
* @returns Amount really picked.
*/
increaseMoney(count: number, availableOnSection: boolean = false): number {
const amountPicked = state.actionChart.increaseMoney(count);
const o = state.mechanics.getObject("money");
if (count > 0) {
actionChartView.showInventoryMsg("pick", o,
translations.text("msgGetMoney", [count]));
} else if (count < 0) {
actionChartView.showInventoryMsg("drop", o, translations.text("msgDropMoney", [-count]));
if (availableOnSection && count < 0) {
// Add the droped money as available on the current section
const sectionState = state.sectionStates.getSectionState();
sectionState.addObjectToSection(Item.MONEY, 0, false, -count);
}
}
actionChartView.updateMoney();
return amountPicked;
},
/**
* Display a toast with an endurance increase / decrease
* @param count Number to increase. Negative to decrease
* @param permanent True if the increase is permanent (it changes the original endurance)
*/
displayEnduranceChangeToast(count: number, permanent: boolean) {
if (count > 0) {
toastr.success(translations.text("msgEndurance", ["+" + count]));
} else if (count < 0) {
let toast = translations.text("msgEndurance", [count]);
if (permanent) {
toast += " (" + translations.text("permanent") + ")";
toastr.error(toast);
} else {
toastr.warning(toast);
}
}
},
/**
* Increase / decrease the current endurance
* @param count Number to increase. Negative to decrease
* @param noToast True if no message should be show
* @param permanent True if the increase is permanent (it changes the original endurance)
*/
increaseEndurance(count: number, noToast: boolean = false, permanent: boolean = false) {
state.actionChart.increaseEndurance(count, permanent);
if (!noToast) {
// Display toast
actionChartController.displayEnduranceChangeToast(count, permanent);
}
if (count < 0) {
mechanicsEngine.testDeath();
// Check if the Psi-surge should be disabled
CombatMechanics.checkSurgeEnabled();
} else {
// Check if +20EP button is still available
actionChartView.updateRestore20EPState();
}
template.updateStatistics();
},
/** Set the current endurance, just for debug */
setEndurance(endurance: number) {
actionChartController.increaseEndurance(endurance - state.actionChart.currentEndurance);
},
/**
* Increase / decrease the combat skill permanently
* @param count Number to increase. Negative to decrease
* @param showToast True if we should show a "toast" on the UI with the CS increase
*/
increaseCombatSkill(count, showToast: boolean = true) {
state.actionChart.combatSkill += count;
if (showToast) {
if (count > 0) {
toastr.success(translations.text("msgCombatSkill", ["+" + count]));
} else if (count < 0) {
toastr.warning(translations.text("msgCombatSkill", [count]));
}
}
template.updateStatistics();
},
/**
* Set the current weapon
* @param weaponId The weapon id to set selected
*/
setSelectedWeapon(weaponId: string) {
if (state.actionChart.getSelectedWeapon() === weaponId) {
return;
}
if (!state.actionChart.hasObject(weaponId)) {
return;
}
state.actionChart.setSelectedWeapon(weaponId);
actionChartController.updateSelectedWeaponUI();
},
/**
* Change the "Fight unarmed" flag.
* @param fightUnarmed New value for "Fight unarmed" flag
*/
setFightUnarmed(fightUnarmed: boolean) {
state.actionChart.fightUnarmed = fightUnarmed;
actionChartController.updateSelectedWeaponUI();
},
/**
* Update the UI related to the currently selected weapon
*/
updateSelectedWeaponUI() {
// Update weapon list
actionChartView.updateWeapons();
// There can be weapons on backpack / special items, so update these lists
actionChartView.updateObjectsLists();
// Update statistics
actionChartView.updateStatistics();
template.updateStatistics();
// Show toast with the weapon change
const weapon = state.actionChart.getSelectedWeaponItem(false);
const name = weapon ? weapon.name : translations.text("noneFemenine");
toastr.info(translations.text("msgCurrentWeapon", [name]));
},
/**
* Returns a string with a set of bonuses
* @param {Array} Bonuses to render
* @return {string} The bonuses text
*/
getBonusesText(bonuses: Bonus[]) {
const txt = [];
for (const bonus of bonuses) {
let txtInc = bonus.increment.toString();
if (bonus.increment > 0) {
txtInc = "+" + txtInc;
}
txt.push(bonus.concept + ": " + txtInc);
}
return txt.join(", ");
},
/**
* Restore the inventory from an object generated with ActionChart.getInventoryState.
* This does not replace the current inventory, just append objects to the current.
* @param inventoryState Inventory to recover. Objects restored will be removed from the state
* @param recoverWeapons Should we recover weapons (includes special items)?
*/
restoreInventoryState(inventoryState: InventoryState, recoverWeapons: boolean) {
if (!state.actionChart.hasBackpack && inventoryState.hasBackpack) {
actionChartController.pick(Item.BACKPACK, false, false);
}
inventoryState.hasBackpack = false;
actionChartController.increaseMoney(inventoryState.beltPouch);
inventoryState.beltPouch = 0;
actionChartController.increaseMeals(inventoryState.meals);
inventoryState.meals = 0;
actionChartController.pickItemsList(inventoryState.backpackItems);
inventoryState.backpackItems = [];
if (recoverWeapons) {
actionChartController.pickItemsList(inventoryState.weapons);
inventoryState.weapons = [];
}
if (recoverWeapons) {
actionChartController.pickItemsList(inventoryState.specialItems);
inventoryState.specialItems = [];
} else {
// Recover only non-weapon special items
actionChartController.pickItemsList(inventoryState.getAndRemoveSpecialItemsNonWeapon());
}
// This must be done after picking quivers (special items)
actionChartController.increaseArrows(inventoryState.arrows);
inventoryState.arrows = 0;
},
/**
* Increase the number of arrows of the player
* @param increment N. of arrows to increment. Negative to decrement
* @returns Number of really increased arrows. Arrows number on action chart is limited by the number of quivers
*/
increaseArrows(increment: number): number {
const realIncrement = state.actionChart.increaseArrows(increment);
const o = state.mechanics.getObject("arrow");
if (realIncrement > 0) {
actionChartView.showInventoryMsg("pick", o,
translations.text("msgGetArrows", [realIncrement]));
} else if (increment < 0) {
// If increment is negative, show always the original amount, not the real (useful for debugging)
actionChartView.showInventoryMsg("drop", o,
translations.text("msgDropArrows", [-increment]));
} else if (increment > 0 && realIncrement === 0) {
// You cannot pick more arrows (not quivers enough)
toastr.error(translations.text("noQuiversEnough"));
}
return realIncrement;
},
/**
* Use the Magnakai Medicine Archmaster +20 EP.
*/
use20EPRestore() {
if (state.actionChart.use20EPRestore()) {
toastr.success(translations.text("msgEndurance", ["+20"]));
template.updateStatistics();
}
},
/** Return page */
getBackController() { return "game"; },
}; | the_stack |
import assert from 'assert';
import * as Tp from 'thingpedia';
import { Ast, Type } from 'thingtalk';
import type Engine from '../engine';
import type { DeviceInfo } from '../engine';
import { cleanKind } from '../utils/misc-utils';
import { ReplacedList, ReplacedConcatenation } from '../utils/template-string';
import ValueCategory from './value-category';
import StatementExecutor from './statement_executor';
import { CancellationError } from './errors';
import { EntityRecord, getBestEntityMatch } from './entity-linking/entity-finder';
import { Contact } from './entity-linking/contact_search';
import { PlatformData } from './protocol';
import { ConversationState } from './conversation';
import AbstractDialogueAgent, {
DisambiguationHints,
} from './abstract_dialogue_agent';
interface AbstractConversation {
id : string;
getState() : ConversationState;
}
/**
* The interface that the {@link ExecutionDialogueAgent} uses to communicate
* with outside.
*
* In some code paths, {@link ExecutionDialogueAgent} needs to send messages
* to the user or ask questions, in the middle of preparing for execution
* and outside of the normal dialogue loop.
*
* It does so by calling this interface, which for the normal assistant is
* implemented by {@link DialogueLoop}.
*
* TODO: This interface has some ugly inversion of control where the outside
* code that drives the dialogue gets called synchronously by this code.
* We should refactor all of this.
*/
export interface AbstractDialogueLoop {
platformData : PlatformData;
isAnonymous : boolean;
_ : (x : string) => string;
conversation : AbstractConversation;
reply(msg : string) : Promise<void>;
replyLink(title : string, link : string, state ?: ConversationState) : Promise<void>;
interpolate(msg : string, args : Record<string, unknown>) : string;
replyInterp(msg : string, args : Record<string, unknown>) : Promise<void>;
ask(expected : ValueCategory.PhoneNumber|ValueCategory.EmailAddress|ValueCategory.Location|ValueCategory.Time,
question : string,
args ?: Record<string, unknown>) : Promise<Ast.Value>;
askChoices(question : string, choices : string[]) : Promise<number>;
}
/**
* The execution time dialogue agent.
*
* Provides access to the real user's information, stored in the engine.
*/
export default class ExecutionDialogueAgent extends AbstractDialogueAgent<undefined> {
private _engine : Engine;
private _platform : Tp.BasePlatform;
private _dlg : AbstractDialogueLoop;
private _executor : StatementExecutor;
constructor(engine : Engine, dlg : AbstractDialogueLoop, debug : boolean) {
super(engine.thingpedia, engine.schemas, {
debug: debug,
locale: engine.platform.locale,
timezone: engine.platform.timezone
});
this._engine = engine;
this._platform = engine.platform;
this._executor = new StatementExecutor(engine, dlg.conversation.id);
this._dlg = dlg;
}
get _() {
return this._dlg._;
}
get executor() {
return this._executor;
}
async getAllDevicesOfKind(kind : string) {
return this._engine.getDeviceInfos(kind);
}
private async _requireRegistration(msg : string) : Promise<never> {
const state = this._dlg.conversation.getState();
await this._dlg.reply(msg);
await this._dlg.replyLink(this._("Sign up for Genie"), "/user/register", state);
throw new CancellationError();
}
protected async checkForPermission(stmt : Ast.ExpressionStatement) {
if (!this._dlg.isAnonymous)
return;
if (stmt.last.schema!.functionType === 'action' &&
!['org.thingpedia.builtin.thingengine.builtin.faq_reply',
'org.thingpedia.builtin.thingengine.builtin.say'].includes(stmt.last.schema!.qualifiedName))
await this._requireRegistration(this._("To use this command you must first create a personal Genie account."));
if (stmt.stream) {
// check available notification backends
// if we have one, we allow notifications from anonymous accounts
// and we'll ask the user for the notification configuration
// otherwise, we reject them
const available = this._engine.assistant.getAvailableNotificationBackends();
if (available.length === 0)
await this._requireRegistration(this._("To receive notifications you must first create a personal Genie account."));
}
}
async disambiguate(type : 'device'|'device-missing'|'contact',
name : string|null,
choices : string[],
hint ?: string) : Promise<number> {
let question : string;
if (type === 'device-missing') {
assert(name);
question = this._dlg.interpolate(this._("I cannot find any ${name} ${device} device. Which device do you want to use?"), {
name, device: cleanKind(hint!)
});
} else if (type === 'device') {
question = this._dlg.interpolate(this._("You have multiple {${name}| }${device} devices. Which one do you want to use?"), {
name, device: cleanKind(hint!)
});
} else {
question = this._dlg.interpolate(this._("Multiple contacts match “${name}”. Who do you mean?"), { name });
}
return this._dlg.askChoices(question, choices);
}
protected async tryConfigureDevice(kind : string) : Promise<DeviceInfo|null> {
const factories = await this._tpClient.getDeviceSetup([kind]);
const factory = factories[kind];
if (!factory) {
await this._dlg.replyInterp(this._("You need to enable ${device} before you can use that command."), {
device: cleanKind(kind)
});
await this._dlg.replyLink(this._dlg.interpolate(this._("Configure ${device}"), {
device: cleanKind(kind)
}), "/devices/create");
return null;
}
if (factory.type === 'none') {
const device = await this._engine.createDevice({ kind: factory.kind });
return this._engine.getDeviceInfo(device.uniqueId!);
} else {
if (this._dlg.isAnonymous) {
await this._requireRegistration(this._dlg.interpolate(this._("Sorry, to use ${device}, you must create a personal Almond account."), {
device: factory.text,
}));
}
if (factory.type === 'multiple' && factory.choices.length === 0) {
await this._dlg.replyInterp(this._("You need to enable ${device} before you can use that command."), {
device: factory.text
});
} else if (factory.type === 'multiple') {
await this._dlg.replyInterp(this._("You do not have a ${device} configured. You will need to enable ${choices} before you can use that command."), {
device: factory.text,
choices: new ReplacedList(factory.choices.map((f) => new ReplacedConcatenation([f.text], {}, {})), this._engine.platform.locale, 'disjunction')
});
} else if ((await this.getAllDevicesOfKind(factory.kind)).length > 0) {
await this._dlg.replyInterp(this._("You do not have a ${device} configured. You will need to configure it inside your ${factory} before you can use that command."), {
device: cleanKind(kind),
factory: factory.text,
});
// exit early without any button
return null;
} else {
await this._dlg.replyInterp(this._("You need to enable ${device} before you can use that command."), {
device: factory.text
});
}
// HACK: home assistant cannot be configured here, override the factory type
if (factory.type !== 'multiple' && factory.kind === 'io.home-assistant')
factory.type = 'interactive'; // this code is CHAOTIC EVIL as it exploits the unsoundness of TypeScript :D
switch (factory.type) {
case 'oauth2':
await this._dlg.replyLink(this._dlg.interpolate(this._("Configure ${device}"), { device: factory.text }),
`/devices/oauth2/${factory.kind}?name=${encodeURIComponent(factory.text)}`);
break;
case 'multiple':
await this._dlg.replyLink(this._("Configure a new skill"), "/devices/create");
break;
default:
await this._dlg.replyLink(this._dlg.interpolate(this._("Configure ${device}"), { device: factory.text }), "/devices/create");
}
return null;
}
}
async lookupContact(category : ValueCategory, name : string) : Promise<Contact[]> {
if (this._dlg.platformData.contacts) {
for (const platformContact of this._dlg.platformData.contacts) {
if (platformContact.value === name) {
this.debug(`Mapped @${name} to ${platformContact.principal} using platform data`);
return [{
value: platformContact.principal,
displayName: platformContact.display
}];
}
}
}
const contactApi = this._platform.getCapability('contacts');
if (contactApi === null)
return [];
let what : 'phone_number' | 'email_address' | 'contact';
if (category === ValueCategory.PhoneNumber)
what = 'phone_number';
else if (category === ValueCategory.EmailAddress)
what = 'email_address';
else
what = 'contact';
return contactApi.lookup(what, name);
}
async askMissingContact(category : ValueCategory.EmailAddress|ValueCategory.PhoneNumber|ValueCategory.Contact,
name : string) : Promise<Ast.EntityValue> {
await this._dlg.replyInterp(this._("No contact matches “${name}”."), { name });
// straight up ask for the target category
// this ensures we show a contact picker, which is better than
// repeatedly asking the user
const value = await this._dlg.ask(category === ValueCategory.Contact ? ValueCategory.PhoneNumber : category,
this._("Who do you want to contact?"));
assert(value instanceof Ast.EntityValue);
return value;
}
protected async addDisplayToContact(contact : Ast.EntityValue) : Promise<void> {
const principal = contact.value;
if (!principal)
return;
if (this._dlg.platformData.contacts) {
for (const platformContact of this._dlg.platformData.contacts) {
if (platformContact.principal === principal) {
contact.display = platformContact.display;
return;
}
}
}
const contactApi = this._platform.getCapability('contacts');
if (contactApi === null)
return;
const addressBookContact = await contactApi.lookupPrincipal(principal);
if (addressBookContact)
contact.display = addressBookContact.displayName;
}
private async _constructEntityQuery(kind : string, query : string, entityDisplay : string) {
const schema = await this._schemas.getSchemaAndNames(kind, 'query', query);
const filter = new Ast.BooleanExpression.Atom(null, 'id', '=~', new Ast.Value.String(entityDisplay));
const invocation = (new Ast.Invocation(null, new Ast.DeviceSelector(null, kind, null, null), query, [], schema));
const invocationTable = new Ast.InvocationExpression(null, invocation, schema);
const filteredTable = new Ast.FilterExpression(null, invocationTable, filter, schema);
return new Ast.ExpressionStatement(null, filteredTable);
}
protected async resolveEntity(entityType : string,
entityDisplay : string,
hints : DisambiguationHints) : Promise<EntityRecord> {
const hintsCandidates = hints.idEntities.get(entityType);
if (hintsCandidates)
return getBestEntityMatch(entityDisplay, entityType, hintsCandidates);
// HACK this should be made generic with some new Genie annotation
if (entityType === 'org.freedesktop:app_id') {
const appLauncher = this._platform.getCapability('app-launcher');
if (appLauncher) {
const apps = await appLauncher.listApps();
return getBestEntityMatch(entityDisplay, entityType, apps);
}
}
const { data: tpCandidates, /*meta*/ } = await this._tpClient.lookupEntity(entityType, entityDisplay);
if (tpCandidates.length > 0)
return getBestEntityMatch(entityDisplay, entityType, tpCandidates);
let stmt;
try {
const kind = entityType.split(":")[0];
const query = entityType.split(":")[1];
stmt = await this._constructEntityQuery(kind, query, entityDisplay);
} catch(e) {
// ignore an error here (it indicates the query is not an ID query)
}
let candidates = tpCandidates;
if (stmt) {
await this._prepareForExecution(stmt, hints);
const [results,] = await this._executor.executeStatement(stmt, undefined, undefined);
candidates = [];
for (const item of results!.results) {
const id = item.value.id;
if (!id || !(id instanceof Ast.EntityValue))
continue;
const entity = {
type: entityType,
value: id.value!,
canonical: id.display!.toLowerCase(),
name: id.display!
};
candidates.push(entity);
}
}
if (candidates.length === 0) {
console.error(`Cannot find any entity of type ${entityType} matching "${entityDisplay}"`);
/*await this._dlg.replyInterp(this._("Sorry, I cannot find any ${entity_type} matching “${name}”."), {
entity_type: meta.name,
name: entityDisplay
});*/
throw new CancellationError();
}
return candidates[0];
}
private async _tryGetCurrentLocation() : Promise<Ast.AbsoluteLocation|null> {
const gps = this._platform.getCapability('gps');
if (gps === null)
return null;
const location = await gps.getCurrentLocation();
if (location === null) {
this.debug('GPS location not available');
return null;
} else {
return new Ast.Location.Absolute(location.latitude, location.longitude, location.display||null);
}
}
protected async lookupLocation(searchKey : string, previousLocations : Ast.AbsoluteLocation[]) : Promise<Ast.LocationValue> {
const currentLocation = await this._tryGetCurrentLocation();
const lastLocation = previousLocations.length ? previousLocations[previousLocations.length - 1] : undefined;
let around;
if (lastLocation)
around = { latitude: lastLocation.lat, longitude: lastLocation.lon };
else if (currentLocation)
around = { latitude: currentLocation.lat, longitude: currentLocation.lon };
const candidates = await this._tpClient.lookupLocation(searchKey, around);
// ignore locations larger than a city
const mapped = candidates.filter((c) => c.rank >= 16).map((c) => {
return new Ast.Location.Absolute(c.latitude, c.longitude, c.display);
});
if (mapped.length === 0) {
await this._dlg.replyInterp(this._("Sorry, I cannot find any location matching “${location}”."), {
location: searchKey,
});
throw new CancellationError();
}
return new Ast.Value.Location(mapped[0]);
}
private _tryGetStoredVariable(type : Type, variable : string) : Ast.Value|null {
if (this._dlg.isAnonymous)
return null;
const sharedPrefs = this._platform.getSharedPreferences();
const value = sharedPrefs.get('context-' + variable);
if (value === undefined)
return null;
return Ast.Value.fromJSON(type, value);
}
private async _resolvePhoneNumber() : Promise<Ast.Value> {
// if we received the command over SMS, that's our phone number, immediately
if (this._dlg.platformData.from && this._dlg.platformData.from.startsWith('phone:'))
return new Ast.Value.Entity(this._dlg.platformData.from.substring('phone:'.length), 'tt:phone_number', null);
if (!this._dlg.isAnonymous) {
const profile = this._platform.getProfile();
if (profile.phone) {
// TODO phone verification???
assert(profile.phone_verified);
return new Ast.Value.Entity(profile.phone, 'tt:phone_number', null);
}
}
const phone = await this._dlg.ask(ValueCategory.PhoneNumber, this._("What is your phone number?"));
if (this._dlg.isAnonymous) {
return phone;
} else {
if (!await this._platform.setProfile({ phone: String(phone.toJS()) }))
return phone;
const profile = this._platform.getProfile();
assert(profile.phone_verified);
return phone;
}
}
private async _resolveEmailAddress() : Promise<Ast.Value> {
// if we received the command over email, that's our email address, immediately
if (this._dlg.platformData.from && this._dlg.platformData.from.startsWith('email:'))
return new Ast.Value.Entity(this._dlg.platformData.from.substring('email:'.length), 'tt:email_address', null);
if (!this._dlg.isAnonymous) {
const profile = this._platform.getProfile();
if (profile.email) {
if (!profile.email_verified)
await this._dlg.reply(this._("You must verify your email address by clicking the verification link before you can use it to receive notifications."));
return new Ast.Value.Entity(profile.email, 'tt:email_address', null);
}
}
const email = await this._dlg.ask(ValueCategory.EmailAddress, this._("What is your email address?"));
if (this._dlg.isAnonymous) {
return email;
} else {
if (!await this._platform.setProfile({ email: String(email.toJS()) }))
return email;
const profile = this._platform.getProfile();
if (!profile.email_verified)
await this._dlg.reply(this._("Thank you! Please verify your email address by clicking the verification link before continuing."));
return email;
}
}
protected async resolveUserContext(variable : string) : Promise<Ast.Value> {
switch (variable) {
case '$context.self.phone_number':
return this._resolvePhoneNumber();
case '$context.self.email_address':
return this._resolveEmailAddress();
}
let value : Ast.Value|null = null;
switch (variable) {
case '$context.location.current_location': {
const location = await this._tryGetCurrentLocation();
if (location)
value = new Ast.Value.Location(location);
else
value = this._tryGetStoredVariable(Type.Location, variable);
break;
}
case '$context.location.home':
case '$context.location.work':
value = this._tryGetStoredVariable(Type.Location, variable);
break;
case '$context.time.morning':
case '$context.time.evening':
value = this._tryGetStoredVariable(Type.Time, variable);
break;
default:
throw new TypeError('Invalid variable ' + variable);
}
if (value !== null)
return value;
let question, type;
switch (variable) {
case '$context.location.current_location':
question = this._("Where are you now?");
type = ValueCategory.Location as const;
break;
case '$context.location.home':
question = this._("What is your home address?");
type = ValueCategory.Location as const;
break;
case '$context.location.work':
question = this._("What is your work address?");
type = ValueCategory.Location as const;
break;
case '$context.time.morning':
question = this._("What time does your morning begin?");
type = ValueCategory.Time as const;
break;
case '$context.time.evening':
question = this._("What time does your evening begin?");
type = ValueCategory.Time as const;
break;
}
let answer = await this._dlg.ask(type, question);
if (type === ValueCategory.Location) {
assert(answer instanceof Ast.LocationValue);
if (answer.value instanceof Ast.RelativeLocation)
answer = await this.resolveUserContext('$context.location.' + answer.value.relativeTag);
else if (answer.value instanceof Ast.UnresolvedLocation)
answer = await this.lookupLocation(answer.value.name, []);
}
if (!this._dlg.isAnonymous) {
const sharedPrefs = this._platform.getSharedPreferences();
sharedPrefs.set('context-' + variable, answer.toJS());
}
return answer;
}
getPreferredUnit(type : string) : string|undefined {
const pref = this._platform.getSharedPreferences();
return pref.get('preferred-' + type) as string|undefined;
}
protected async configureNotifications() {
if (!this._dlg.isAnonymous) {
// if we're not anonymous, look at the previous configuration
const prefs = this._platform.getSharedPreferences();
const backendId = prefs.get('notification-backend') as string|undefined;
// check if the user has chosen a backend, and if that backend was
// autodiscovered from a thingpedia device, check that the device is
// still available
if (backendId !== undefined &&
(!backendId.startsWith('thingpedia/') || this._engine.hasDevice(backendId.substring('thingpedia/'.length))))
return undefined; // return null so we don't force a particular configuration now
}
const available = this._engine.assistant.getAvailableNotificationBackends();
// if no backend is available, use the default (which is to blast to all
// conversations) and leave it unspecified
if (available.length === 0)
return undefined;
// if we have voice, we'll use that for notifications
if (this._platform.hasCapability('sound'))
return undefined;
let backend;
if (this._dlg.platformData.from) {
if (this._dlg.platformData.from.startsWith('email:'))
backend = available.find((b) => b.uniqueId === 'email');
else if (this._dlg.platformData.from.startsWith('phone:'))
backend = available.find((b) => b.uniqueId === 'twilio');
}
if (!backend) {
let chosen;
if (available.length > 1) {
const choices = available.map((c) => c.name);
chosen = await this._dlg.askChoices(this._("How would you like to be notified?"), choices);
} else {
chosen = 0;
}
backend = available[chosen];
}
const settings = backend.requiredSettings;
const config : Record<string, string> = {};
// ensure that all settings needed by the notification backend are set
for (const key in settings) {
const variable = settings[key];
config[key] = String((await this.resolveUserContext(variable)).toJS());
}
// if we get here, the user has given meaningful answers to our questions
// in anonymous mode, we make up a transient notification config that we'll
// use just for this program
//
// in non-anonymous mode, we save the choice the notification backend
// other info has been saved to the profile already
if (this._dlg.isAnonymous) {
return {
backend: backend.uniqueId,
config
};
} else {
const prefs = this._platform.getSharedPreferences();
prefs.set('notification-backend', backend.uniqueId);
return undefined;
}
}
} | the_stack |
import {
Component,
OnInit,
Input,
ChangeDetectorRef,
Output,
EventEmitter, ViewChild
} from '@angular/core';
import {FormGroup, FormControl, Validators} from '@angular/forms';
import {cloneDeep, map, zipObject, extend, defer} from 'lodash';
import {TranslateService} from '@ngx-translate/core';
import {TreeNode} from 'primeng/api';
import {NgbModal} from '@ng-bootstrap/ng-bootstrap';
import {
CalendarOptions,
DateSelectArg,
EventClickArg,
EventAddArg,
EventChangeArg
} from '@fullcalendar/angular';
import ruLocale from '@fullcalendar/core/locales/ru';
import dayGridPlugin from '@fullcalendar/daygrid';
import timeGridPlugin from '@fullcalendar/timegrid';
import listPlugin from '@fullcalendar/list';
import interactionPlugin from '@fullcalendar/interaction';
import {ContentField} from './catalog/models/content_field.model';
import {MultiValues} from './models/multivalues.model';
import {Properties} from './models/properties.iterface';
import {FileData} from './catalog/models/file-data.model';
import {SystemNameService} from './services/system-name.service';
import {AppSettings} from './services/app-settings.service';
import {CategoriesService} from './catalog/services/categories.service';
import {ModalEditTextareaComponent} from '@app/components/modal-edit-textarea.component';
@Component({
selector: 'app-input-field-renderer',
templateUrl: 'templates/render-input-field.html',
providers: []
})
export class InputFieldRenderComponent implements OnInit {
@Input() fields: ContentField[];
@Input() groups: string[];
@Input() model: {[key: string]: any};
@Input() form: FormGroup;
@Input() formErrors: {[key: string]: string};
@Input() validationMessages: {[key: string]: {[key: string]: string}};
@Input() files: { [key: string]: File } = {};
@Input() localeFieldsAllowed: string[] = [];
@Input() isLocalizationActive: boolean;
@Output() onAddTranslation = new EventEmitter<string>();
fieldsMultivalues: {[key: string]: MultiValues} = {};
submitted = false;
filesDirBaseUrl: string;
loadingCategories = false;
categories = [];
categoriesTree: TreeNode[] = [];
categoriesSelection: {[key: string]: any} = {};
fullCalendarOptions: {[key: string]: CalendarOptions};
constructor(
private changeDetectionRef: ChangeDetectorRef,
private systemNameService: SystemNameService,
private categoriesService: CategoriesService,
private translateService: TranslateService,
private modalService: NgbModal,
private appSettings: AppSettings
) {
this.filesDirBaseUrl = this.appSettings.settings.filesDirUrl;
}
ngOnInit(): void {
this.getCategoriesTree(true);
this.buildControls();
}
buildControls() {
this.fields.forEach(function(field) {
this.setFieldProperties(field);
this.setFieldOptions(field);
this.setValue(field);
this.formErrors[field.name] = '';
if (!this.validationMessages[field.name]) {
this.validationMessages[field.name] = {};
}
if (field.inputType === 'categories') {
this.categoriesSelection[field.name] = [];
}
if (!this.form.controls[field.name]) {
const validators = this.getValidators(field);
const control = new FormControl(this.model[field.name], validators);
this.form.addControl(field.name, control);
}
}.bind(this));
this.changeDetectionRef.detectChanges();
}
setFieldProperties(field: ContentField): void {
let propertiesDefault: Properties;
switch (field.inputType) {
case 'number':
propertiesDefault = {
handler: '',
multiple: 0,
min: null,
max: null,
step: 1
};
this.extendProperties(
field.inputProperties,
propertiesDefault
);
break;
case 'date':
propertiesDefault = {
handler: '',
multiple: 0,
format: 'dd.mm.yy',
show_time: 0,
hour_format: 24,
first_day_of_week: 1,
locale: 'en'
};
this.extendProperties(
field.inputProperties,
propertiesDefault
);
break;
case 'parameters':
propertiesDefault = {
handler: '',
multiple: 0,
names: 'NAME,VALUE,PRICE,IMAGE_NUMBER',
keys: 'name,value,price,imageNum',
types: 'text,text,number,number'
};
this.extendProperties(
field.inputProperties,
propertiesDefault
);
['names', 'keys', 'types'].forEach((k) => {
const defaultValues = String(propertiesDefault[k]).split(',');
let values = String(field.inputProperties[k]).split(',');
if (defaultValues.length > values.length) {
values = values.concat(defaultValues.slice(values.length));
}
field.inputProperties[k] = values;
});
break;
case 'schedule':
propertiesDefault = {
slotDuration: '0:10:00'
};
this.extendProperties(
field.inputProperties,
propertiesDefault
);
if (!this.fullCalendarOptions) {
this.fullCalendarOptions = {};
}
this.fullCalendarOptions[field.name] = {
plugins: [dayGridPlugin, timeGridPlugin, listPlugin, interactionPlugin],
headerToolbar: {
left: 'prev,next today',
center: 'title',
right: 'dayGridMonth,timeGridWeek,timeGridDay,listWeek'
},
locales: [ruLocale],
locale: this.appSettings.settings.locale,
initialView: 'dayGridMonth',
weekends: true,
editable: true,
selectable: true,
selectMirror: true,
dayMaxEvents: false,
navLinks: true,
slotDuration: '0:10:00',
initialEvents: this.model[field.name],
select: (selectInfo: DateSelectArg) => {
this.handleFullCalendarDateSelect(field.name, selectInfo);
},
eventClick: (clickInfo: EventClickArg) => {
this.handleFullCalendarEventClick(field.name, clickInfo);
},
eventAdd: (api: EventAddArg) => {
this.handleFullCalendarEventAdd(field.name, api);
},
eventChange: (api: EventChangeArg) => {
this.handleFullCalendarEventChange(field.name, api);
}
};
Object.assign(this.fullCalendarOptions[field.name], field.inputProperties);
break;
case 'rich_text':
propertiesDefault = {
handler: '',
multiple: 0,
formats: 'background,bold,color,font,code,italic,link,'
+ 'strike,script,underline,blockquote,header,indent,'
+ 'list,align,direction,code-block,formula,image,video,clean'
};
this.extendProperties(
field.inputProperties,
propertiesDefault
);
field.inputProperties['formats'] = String(field.inputProperties['formats']).split(',');
break;
case 'file':
propertiesDefault = {
handler: '',
multiple: 0,
allowed_extensions: '.zip,.rar,.doc,.docx,.xls,.xlsx,.ods,.odt',
has_preview_image: 0
};
this.extendProperties(
field.inputProperties,
propertiesDefault
);
break;
case 'categories':
propertiesDefault = {
handler: '',
multiple: 0,
layout: 'vertical'
};
this.extendProperties(
field.inputProperties,
propertiesDefault
);
break;
case 'color':
propertiesDefault = {
handler: '',
multiple: 0,
inline: 0
};
this.extendProperties(
field.inputProperties,
propertiesDefault
);
break;
default:
propertiesDefault = {
handler: '',
multiple: 0
};
this.extendProperties(
field.inputProperties,
propertiesDefault
);
}
}
setFieldOptions(field: ContentField): void {
field.options = [];
let valueArr;
switch (field.inputType) {
case 'radio':
case 'select':
valueArr = field.inputProperties['values_list']
? String(field.inputProperties['values_list']).split('||')
: [];
valueArr.forEach((optStr, index) => {
const opts = optStr.split('==');
if (!opts[1]) {
opts[1] = opts[0];
}
field.options.push(zipObject(['title', 'value'], opts));
});
break;
case 'checkbox':
valueArr = field.inputProperties['values_list']
? String(field.inputProperties['values_list']).split('||')
: [];
if (!Array.isArray(this.model[field.name])) {
this.model[field.name] = [];
}
this.fieldsMultivalues[field.name] = new MultiValues([], []);
valueArr.forEach((optStr, index) => {
const opts = optStr.split('==');
if (!opts[1]) {
opts[1] = opts[0];
}
field.options.push(zipObject(['title', 'value'], opts));
this.fieldsMultivalues[field.name].values.push(opts[1]);
this.fieldsMultivalues[field.name].checked.push(this.model[field.name].indexOf(opts[1]) > -1);
});
break;
case 'schedule':
case 'parameters':
if (!Array.isArray(this.model[field.name])) {
this.model[field.name] = [];
}
}
}
onGroupChange(e: any): void {
// console.log('onGroupChange', e);
}
setValue(field: ContentField): void {
let defaultValue = null,
modelValue = typeof this.model[field.name] !== 'undefined' ? this.model[field.name] : null;
if (typeof field.inputProperties.value !== 'undefined') {
defaultValue = field.inputProperties.value;
}
switch (field.inputType) {
case 'date':
defaultValue = new Date();
if (modelValue) {
modelValue = new Date(modelValue);
}
break;
case 'number':
defaultValue = defaultValue ? parseFloat(String(defaultValue)) : null;
break;
case 'schedule':
case 'parameters':
if (typeof defaultValue !== 'object') {
defaultValue = defaultValue ? JSON.parse(defaultValue) : [];
}
if (!Array.isArray(defaultValue)) {
defaultValue = [defaultValue];
}
break;
case 'tags':
case 'checkbox':
defaultValue = defaultValue ? defaultValue.split('||') : [];
break;
}
this.model[field.name] = modelValue !== null ? modelValue : defaultValue;
}
selectValue(e, fieldName: string, value: string): void {
if (!Array.isArray(this.model[fieldName])) {
this.model[fieldName] = [];
}
const valIndex = this.fieldsMultivalues[fieldName].values.indexOf(value);
if (valIndex === -1) {
return;
}
if (e.target.checked) {
this.model[fieldName].push(value);
this.fieldsMultivalues[fieldName].checked[valIndex] = true;
} else {
this.model[fieldName].splice(this.model[fieldName].indexOf(value), 1);
this.fieldsMultivalues[fieldName].checked[valIndex] = false;
}
}
getValidators(field: ContentField): any[] {
const validators = [];
if (field.required) {
validators.push(Validators.required);
this.translateService.get('FIELD_REQUIRED', {name: field.title})
.subscribe((res: string) => {
this.validationMessages[field.name].required = res;
});
}
return validators;
}
extendProperties(object1: Properties, object2: Properties): void {
for (const key in object2) {
if (object2.hasOwnProperty(key)) {
if (!object1[key]) {
object1[key] = object2[key];
}
if (typeof object1[key] === 'string' && !Number.isNaN(Number(object1[key]))) {
object1[key] = parseInt(String(object1[key]), 10);
}
}
}
}
generateName(field: ContentField): void {
const sourceFieldName = field.inputProperties.source_field
? String(field.inputProperties.source_field)
: 'title';
const title = this.model[sourceFieldName] || '';
this.model[field.name] = this.systemNameService.generateName(title);
this.changeDetectionRef.detectChanges();
}
fileChange(event, field: ContentField, imgPreview?: HTMLImageElement) {
const fileList: FileList = event.target.files,
fieldName = field.name;
if (fileList.length > 0) {
this.model[fieldName] = this.getFileData(fileList[0]);
this.files[fieldName] = fileList[0];
const parentEl = imgPreview.parentElement.parentElement;
if (field.inputProperties.has_preview_image
&& fileList[0].type.indexOf('image/') > -1) {
imgPreview.style.display = 'block';
parentEl.querySelector('.file-buttons').classList.add('show-on-hover-child-left');
const reader = new FileReader();
reader.onload = (e: ProgressEvent) => {
const fr = e.target as FileReader;
this.model[fieldName].dataUrl = fr.result;
};
reader.readAsDataURL(fileList[0]);
} else {
imgPreview.style.display = 'none';
parentEl.querySelector('.file-buttons').classList.remove('show-on-hover-child-left');
}
}
}
fileClear(fieldName: string, imgPreviewEl?: HTMLImageElement) {
this.model[fieldName] = null;
this.form.controls[fieldName].reset(null);
delete this.files[fieldName];
if (imgPreviewEl) {
imgPreviewEl.src = '';
imgPreviewEl.style.display = 'none';
}
}
getFileData(file: File): FileData {
const title = file.name.substr(0, file.name.lastIndexOf('.')),
extension = file.name.substr(file.name.lastIndexOf('.') + 1),
size = file.size;
return new FileData(0, title, extension, size);
}
getImageUrl(fileData: FileData|null): string|ArrayBuffer {
return FileData.getImageUrl(this.filesDirBaseUrl, fileData);
}
getCategoriesTree(updateTreeSelections = false): void {
this.loadingCategories = true;
this.categoriesService.getTree()
.subscribe((data) => {
this.categoriesTree = data;
this.loadingCategories = false;
if (updateTreeSelections) {
this.updateTreeSelections();
}
}, (err) => {
this.loadingCategories = false;
});
}
updateTreeSelections(): void {
this.fields.forEach((field) => {
if (field.inputType === 'categories') {
if (!this.categoriesSelection[field.name]) {
this.categoriesSelection[field.name] = [];
}
if (this.model[field.name] && Array.isArray(this.model[field.name])) {
this.model[field.name].forEach((id) => {
const category = this.getCategoryById(id, this.categoriesTree);
if (category) {
const parent = this.getCategoryById(category.parentId, this.categoriesTree);
if (parent) {
category.parent = parent;
}
this.categoriesSelection[field.name].push(category);
}
});
}
}
});
}
getCategoryById(id: number, categoriesArr) {
let output = null;
categoriesArr.forEach((category) => {
if (output) {
return;
}
if (category.id === id) {
output = category;
} else if (category.children && category.children.length > 0) {
output = this.getCategoryById(id, category.children);
}
});
return output;
}
categorySelect(fieldName: string) {
this.model[fieldName] = [];
defer(() => {
this.categoriesSelection[fieldName].forEach((category) => {
if (this.model[fieldName].indexOf(category.id) === -1) {
this.model[fieldName].push(category.id);
}
});
});
}
parametersRemove(fieldName: string, index: number): void {
if (Array.isArray(this.model[fieldName])) {
this.model[fieldName].splice(index, 1);
}
}
parametersAdd(fieldName: string, event?: MouseEvent): void {
if (event) {
event.preventDefault();
}
if (!this.model[fieldName]) {
this.model[fieldName] = [];
}
const index = this.fields.findIndex((field) => {
return field.name === fieldName;
});
const obj = {name: '', value: '', price: 0, imageNum: 0};
if (index > -1 && this.fields[index].inputProperties.keys) {
(this.fields[index].inputProperties.keys as string[]).forEach((key) => {
if (typeof obj[key] === 'undefined') {
obj[key] = '';
}
});
}
this.model[fieldName].push(obj);
}
parametersExport(fieldName: string, event?: MouseEvent): void {
if (event) {
event.preventDefault();
}
if (!this.model[fieldName]) {
this.model[fieldName] = [];
}
this.formErrors[fieldName] = '';
const dataStr = JSON.stringify(this.model[fieldName], null, '\t');
const modalRef = this.modalService.open(ModalEditTextareaComponent, {
backdrop: 'static',
keyboard: false,
container: '#modals-container'
});
modalRef.componentInstance.modalTitle = `${this.getLangString('EXPORT')} JSON`;
modalRef.componentInstance.textValue = dataStr;
modalRef.result.then((result) => {
if (result.data) {
try {
const outputData = JSON.parse(result.data);
this.model[fieldName].splice(0, this.model[fieldName].length);
this.model[fieldName].push(...outputData);
} catch (e) {
this.formErrors[fieldName] = 'Syntax error.';
}
}
}, (reason) => {
// console.log(reason);
});
}
/**
* Move fieldparametersAdd(
* @param field
* @param direction
* @param event
*/
fieldMove(field: ContentField, direction: string, event?: MouseEvent): void {
if (event) {
event.preventDefault();
}
if (direction === 'up' && field.name.indexOf('__') === -1) {
return;
}
const fieldBaseName = ContentField.getFieldBaseName(field.name),
baseFieldIndexData = ContentField.getFieldIndexData(this.fields, fieldBaseName),
fieldIndexData = ContentField.getFieldIndexData(this.fields, field.name);
if (fieldIndexData.index === -1) {
return;
}
if (direction === 'up') {
if (fieldIndexData.index - 1 === baseFieldIndexData.index) {
return;
}
this.fields.splice(fieldIndexData.index, 1);
this.fields.splice(fieldIndexData.index - 1, 0, field);
}
if (direction === 'down') {
if (fieldIndexData.index - baseFieldIndexData.index === fieldIndexData.additFieldsCount) {
return;
}
this.fields.splice(fieldIndexData.index, 1);
this.fields.splice(fieldIndexData.index + 1, 0, field);
}
}
/**
* Add field
* @param field
* @param event
*/
fieldAdd(field: ContentField, event?: MouseEvent): void {
if (event) {
event.preventDefault();
}
const fieldIndexData = ContentField.getFieldIndexData(this.fields, field.name);
if (fieldIndexData.index === -1) {
return;
}
const baseFieldName = ContentField.getFieldBaseName(field.name),
newField = cloneDeep(field);
newField.name = `${baseFieldName}__${fieldIndexData.additFieldsCount + 1}`;
this.fields.splice(fieldIndexData.index + 1, 0, newField);
this.buildControls();
}
/**
* Drop image from File Manager
* @param event
*/
onInitTextEditor(event: any): void {
const quillEditorRef = event.editor;
event.editor.container.ondrop = (e: DragEvent) => {
e.preventDefault();
e.stopPropagation();
const value = e.dataTransfer.getData('text/plain');
const ext = value.split('.').pop();
if (['jpg', 'jpeg', 'png', 'gif'].indexOf(ext.toLowerCase()) === -1) {
return;
}
const range = quillEditorRef.getSelection();
quillEditorRef.insertEmbed(range.index, 'image', value, 'user');
};
}
addTranslation(fieldName: string, event: MouseEvent): void {
if (event) {
event.preventDefault();
}
this.onAddTranslation.emit(fieldName);
}
handleFullCalendarDateSelect(fieldName: string, selectInfo: DateSelectArg): void {
this.translateService.get('PLEASE_ENTER_NEW_TITLE_FOR_EVENT')
.subscribe((translatedString: string) => {
const title = prompt(translatedString);
const calendarApi = selectInfo.view.calendar;
calendarApi.unselect();
if (title) {
calendarApi.addEvent({
id: this.fullCalendarCreateId(fieldName),
title,
start: selectInfo.startStr,
end: selectInfo.endStr,
allDay: selectInfo.allDay
});
}
});
}
handleFullCalendarEventClick(fieldName: string, clickInfo: EventClickArg): void {
this.translateService.get('YOU_SURE_YOU_WANT_DELETE_NAME', {name: clickInfo.event.title})
.subscribe((translatedString: string) => {
if (confirm(translatedString)) {
clickInfo.event.remove();
const event = clickInfo.event.toPlainObject();
const index = this.model[fieldName].findIndex((item) => {
return String(item.id) === String(event.id);
});
if (index > -1) {
this.model[fieldName].splice(index, 1);
this.fullCalendarOptions[fieldName].initialEvents = this.model[fieldName];
}
}
});
}
handleFullCalendarEventAdd(fieldName: string, api: EventAddArg): void {
this.model[fieldName] = [...this.model[fieldName], api.event.toJSON()];
this.fullCalendarOptions[fieldName].initialEvents = this.model[fieldName];
}
handleFullCalendarEventChange(fieldName: string, api: EventChangeArg): void {
const event = api.event.toPlainObject();
const index = this.model[fieldName].findIndex((item) => {
return String(item.id) === String(event.id);
});
if (index > -1) {
Object.assign(this.model[fieldName][index], event);
this.fullCalendarOptions[fieldName].initialEvents = this.model[fieldName];
}
}
fullCalendarCreateId(fieldName: string): string {
let lastId = 0;
this.model[fieldName].forEach((item) => {
if (parseInt(item.id, 10) > lastId) {
lastId = parseInt(item.id, 10);
}
});
return String(lastId + 1);
}
getLangString(value: string): string {
if (!this.translateService.store.translations[this.translateService.currentLang]) {
return value;
}
const translations = this.translateService.store.translations[this.translateService.currentLang];
return translations[value] || value;
}
} | the_stack |
import {DateTime, Unit} from './datetime';
import Collapse from './display/collapse';
import Namespace from './utilities/namespace';
import Dates from './dates';
import Validation from './validation';
import Display from './display';
import {EventEmitters} from './utilities/event-emitter';
import {serviceLocator} from './utilities/service-locator.js';
import ActionTypes from './utilities/action-types';
import CalendarModes from './utilities/calendar-modes';
import {OptionsStore} from "./utilities/optionsStore";
/**
*
*/
export default class Actions {
private optionsStore: OptionsStore;
private validation: Validation;
private dates: Dates;
private display: Display;
private _eventEmitters: EventEmitters;
constructor() {
this.optionsStore = serviceLocator.locate(OptionsStore);
this.dates = serviceLocator.locate(Dates);
this.validation = serviceLocator.locate(Validation);
this.display = serviceLocator.locate(Display);
this._eventEmitters = serviceLocator.locate(EventEmitters);
this._eventEmitters.action.subscribe((result) => {
this.do(result.e, result.action);
});
}
/**
* Performs the selected `action`. See ActionTypes
* @param e This is normally a click event
* @param action If not provided, then look for a [data-action]
*/
do(e: any, action?: ActionTypes) {
const currentTarget = e?.currentTarget;
if (currentTarget?.classList?.contains(Namespace.css.disabled))
return false;
action = action || currentTarget?.dataset?.action;
const lastPicked = (this.dates.lastPicked || this.optionsStore.viewDate)
.clone;
switch (action) {
case ActionTypes.next:
case ActionTypes.previous:
this.handleNextPrevious(action);
break;
case ActionTypes.changeCalendarView:
this.display._showMode(1);
this.display._updateCalendarHeader();
break;
case ActionTypes.selectMonth:
case ActionTypes.selectYear:
case ActionTypes.selectDecade:
const value = +currentTarget.dataset.value;
switch (action) {
case ActionTypes.selectMonth:
this.optionsStore.viewDate.month = value;
break;
case ActionTypes.selectYear:
case ActionTypes.selectDecade:
this.optionsStore.viewDate.year = value;
break;
}
if (
this.optionsStore.currentCalendarViewMode ===
this.optionsStore.minimumCalendarViewMode
) {
this.dates.setValue(
this.optionsStore.viewDate,
this.dates.lastPickedIndex
);
if (!this.optionsStore.options.display.inline) {
this.display.hide();
}
} else {
this.display._showMode(-1);
}
break;
case ActionTypes.selectDay:
const day = this.optionsStore.viewDate.clone;
if (currentTarget.classList.contains(Namespace.css.old)) {
day.manipulate(-1, Unit.month);
}
if (currentTarget.classList.contains(Namespace.css.new)) {
day.manipulate(1, Unit.month);
}
day.date = +currentTarget.dataset.day;
let index = 0;
if (this.optionsStore.options.multipleDates) {
index = this.dates.pickedIndex(day, Unit.date);
if (index !== -1) {
this.dates.setValue(null, index); //deselect multi-date
} else {
this.dates.setValue(day, this.dates.lastPickedIndex + 1);
}
} else {
this.dates.setValue(day, this.dates.lastPickedIndex);
}
if (
!this.display._hasTime &&
!this.optionsStore.options.display.keepOpen &&
!this.optionsStore.options.display.inline &&
!this.optionsStore.options.multipleDates
) {
this.display.hide();
}
break;
case ActionTypes.selectHour:
let hour = +currentTarget.dataset.value;
if (
lastPicked.hours >= 12 &&
!this.optionsStore.options.display.components.useTwentyfourHour
)
hour += 12;
lastPicked.hours = hour;
this.dates.setValue(lastPicked, this.dates.lastPickedIndex);
this.hideOrClock(e);
break;
case ActionTypes.selectMinute:
lastPicked.minutes = +currentTarget.dataset.value;
this.dates.setValue(lastPicked, this.dates.lastPickedIndex);
this.hideOrClock(e);
break;
case ActionTypes.selectSecond:
lastPicked.seconds = +currentTarget.dataset.value;
this.dates.setValue(lastPicked, this.dates.lastPickedIndex);
this.hideOrClock(e);
break;
case ActionTypes.incrementHours:
this.manipulateAndSet(lastPicked, Unit.hours);
break;
case ActionTypes.incrementMinutes:
this.manipulateAndSet(
lastPicked,
Unit.minutes,
this.optionsStore.options.stepping
);
break;
case ActionTypes.incrementSeconds:
this.manipulateAndSet(lastPicked, Unit.seconds);
break;
case ActionTypes.decrementHours:
this.manipulateAndSet(lastPicked, Unit.hours, -1);
break;
case ActionTypes.decrementMinutes:
this.manipulateAndSet(
lastPicked,
Unit.minutes,
this.optionsStore.options.stepping * -1
);
break;
case ActionTypes.decrementSeconds:
this.manipulateAndSet(lastPicked, Unit.seconds, -1);
break;
case ActionTypes.toggleMeridiem:
this.manipulateAndSet(
lastPicked,
Unit.hours,
this.dates.lastPicked.hours >= 12 ? -12 : 12
);
break;
case ActionTypes.togglePicker:
if (
currentTarget.getAttribute('title') ===
this.optionsStore.options.localization.selectDate
) {
currentTarget.setAttribute(
'title',
this.optionsStore.options.localization.selectTime
);
currentTarget.innerHTML = this.display._iconTag(
this.optionsStore.options.display.icons.time
).outerHTML;
this.display._updateCalendarHeader();
this.optionsStore.refreshCurrentView();
} else {
currentTarget.setAttribute(
'title',
this.optionsStore.options.localization.selectDate
);
currentTarget.innerHTML = this.display._iconTag(
this.optionsStore.options.display.icons.date
).outerHTML;
if (this.display._hasTime) {
this.do(e, ActionTypes.showClock);
this.display._update('clock');
}
}
this.display.widget
.querySelectorAll(
`.${Namespace.css.dateContainer}, .${Namespace.css.timeContainer}`
)
.forEach((htmlElement: HTMLElement) => Collapse.toggle(htmlElement));
this._eventEmitters.viewUpdate.emit();
break;
case ActionTypes.showClock:
case ActionTypes.showHours:
case ActionTypes.showMinutes:
case ActionTypes.showSeconds:
this.handleShowClockContainers(action);
break;
case ActionTypes.clear:
this.dates.setValue(null);
this.display._updateCalendarHeader();
break;
case ActionTypes.close:
this.display.hide();
break;
case ActionTypes.today:
const today = new DateTime().setLocale(
this.optionsStore.options.localization.locale
);
this.optionsStore.viewDate = today;
if (this.validation.isValid(today, Unit.date))
this.dates.setValue(today, this.dates.lastPickedIndex);
break;
}
}
private handleShowClockContainers(action: ActionTypes) {
if (!this.display._hasTime) {
Namespace.errorMessages.throwError('Cannot show clock containers when time is disabled.');
return;
}
this.optionsStore.currentView = 'clock';
this.display.widget
.querySelectorAll(`.${Namespace.css.timeContainer} > div`)
.forEach(
(htmlElement: HTMLElement) => (htmlElement.style.display = 'none')
);
let classToUse = '';
switch (action) {
case ActionTypes.showClock:
classToUse = Namespace.css.clockContainer;
this.display._update('clock');
break;
case ActionTypes.showHours:
classToUse = Namespace.css.hourContainer;
this.display._update(Unit.hours);
break;
case ActionTypes.showMinutes:
classToUse = Namespace.css.minuteContainer;
this.display._update(Unit.minutes);
break;
case ActionTypes.showSeconds:
classToUse = Namespace.css.secondContainer;
this.display._update(Unit.seconds);
break;
}
(<HTMLElement>(
this.display.widget.getElementsByClassName(classToUse)[0]
)).style.display = 'grid';
}
private handleNextPrevious(action: ActionTypes) {
const {unit, step} =
CalendarModes[this.optionsStore.currentCalendarViewMode];
if (action === ActionTypes.next)
this.optionsStore.viewDate.manipulate(step, unit);
else this.optionsStore.viewDate.manipulate(step * -1, unit);
this._eventEmitters.viewUpdate.emit();
this.display._showMode();
}
/**
* After setting the value it will either show the clock or hide the widget.
* @param e
*/
private hideOrClock(e) {
if (
this.optionsStore.options.display.components.useTwentyfourHour &&
!this.optionsStore.options.display.components.minutes &&
!this.optionsStore.options.display.keepOpen &&
!this.optionsStore.options.display.inline
) {
this.display.hide();
} else {
this.do(e, ActionTypes.showClock);
}
}
/**
* Common function to manipulate {@link lastPicked} by `unit`.
* @param lastPicked
* @param unit
* @param value Value to change by
*/
private manipulateAndSet(lastPicked: DateTime, unit: Unit, value = 1) {
const newDate = lastPicked.manipulate(value, unit);
if (this.validation.isValid(newDate, unit)) {
this.dates.setValue(newDate, this.dates.lastPickedIndex);
}
}
} | the_stack |
namespace pixi_heaven.mesh {
const tempPoint = new PIXI.Point();
const tempPolygon = new PIXI.Polygon();
/**
* Base mesh class
* @class
* @extends PIXI.Container
* @memberof PIXI.mesh
*/
export class Mesh extends PIXI.Container implements ITextureAnimationTarget {
/**
* The texture of the Mesh
*
* @member {PIXI.Texture}
* @private
*/
_texture: PIXI.Texture;
animState: AnimationState = null;
/**
* The Uvs of the Mesh
*
* @member {Float32Array}
*/
uvs: Float32Array;
/**
* An array of vertices
*
* @member {Float32Array}
*/
vertices: Float32Array;
/*
* @member {Uint16Array} An array containing the indices of the vertices
*/
indices: Uint16Array;
/**
* Two colors per vertex: dark, light. Please fill with 0x0 and 0xffffffff by default.
*
* @member {Uint32Array}
*/
colors: Uint32Array;
/**
* The way the Mesh should be drawn, can be any of the {@link PIXI.mesh.Mesh.DRAW_MODES} consts
*
* @member {number}
* @see PIXI.mesh.Mesh.DRAW_MODES
*/
drawMode: number;
/**
* Version of mesh uvs are dirty or not
*
* @member {number}
*/
dirty = 0;
/**
* Version of mesh indices
*
* @member {number}
*/
indexDirty = 0;
/**
* The blend mode to be applied to the mesh. Set to `PIXI.BLEND_MODES.NORMAL` to remove
* any blend mode.
*
* @member {number}
* @default PIXI.BLEND_MODES.NORMAL
* @see PIXI.BLEND_MODES
*/
blendMode = PIXI.BLEND_MODES.NORMAL;
/**
* Triangles in canvas mode are automatically antialiased, use this value to force triangles
* to overlap a bit with each other.
*
* @member {number}
*/
canvasPadding = 0;
/**
* The tint applied to the mesh. This is a [r,g,b] value. A value of [1,1,1] will remove any
* tint effect.
*
* @member {number}
*/
tintRgb = new Float32Array([1, 1, 1]);
/**
* A map of renderer IDs to webgl render data
*
* @private
* @member {object<number, object>}
*/
_glDatas: { [key: number]: any } = {};
/**
* whether or not upload uvTransform to shader
* if its false, then uvs should be pre-multiplied
* if you change it for generated mesh, please call 'refresh(true)'
* @member {boolean}
* @default false
*/
uploadUvTransform = false;
/**
* Plugin that is responsible for rendering this element.
* Allows to customize the rendering process without overriding '_renderWebGL' & '_renderCanvas' methods.
* @member {string}
* @default 'mesh'
*/
pluginName = settings.MESH_PLUGIN;
/**
* transform that is applied to UV to get the texture coords
* its updated independently from texture uvTransform
* updates of uvs are tied to that thing
*
* @member {PIXI.TextureMatrix}
* @private
*/
_uvTransform: PIXI.TextureMatrix;
/**
* Same as sprite vertexData
*/
vertexData: Float32Array = null;
/**
* Same as sprite maskVertexData
*/
maskVertexData: Float32Array = null;
maskSprite: PIXI.Sprite = null;
/**
* @param {PIXI.Texture} texture - The texture to use
* @param {Float32Array} [vertices] - if you want to specify the vertices
* @param {Float32Array} [uvs] - if you want to specify the uvs
* @param {Uint16Array} [indices] - if you want to specify the indices
* @param {number} [drawMode] - the drawMode, can be any of the Mesh.DRAW_MODES consts
*/
constructor(texture: PIXI.Texture, vertices?: Float32Array, uvs?: Float32Array, indices?: Uint16Array,
drawMode: number = PIXI.mesh.Mesh.DRAW_MODES.TRIANGLE_MESH) {
super();
texture = texture || PIXI.Texture.EMPTY;
this._texture = texture;
if (!texture.baseTexture.hasLoaded) {
texture.once('update', this._onTextureUpdate, this);
}
this.uvs = uvs || new Float32Array([
0, 0,
1, 0,
1, 1,
0, 1]);
this.vertices = vertices || new Float32Array([
0, 0,
100, 0,
100, 100,
0, 100]);
// TODO auto generate this based on draw mode!
this.indices = indices || new Uint16Array([0, 1, 3, 2]);
this.colors = null;
this.drawMode = drawMode;
/**
* transform that is applied to UV to get the texture coords
* its updated independently from texture uvTransform
* updates of uvs are tied to that thing
*
* @member {PIXI.TextureMatrix}
* @private
*/
this._uvTransform = new PIXI.TextureMatrix(texture, 0);
}
/**
* Updates the object transform for rendering
*
* @private
*/
updateTransform() {
this.refresh();
//TODO: move it somewhere, default heaven updateTransform
this._boundsID++;
this.transform.updateTransform(this.parent.transform);
this.worldAlpha = this.alpha * this.parent.worldAlpha;
if (this.color) {
this.color.alpha = this.worldAlpha;
this.color.updateTransform();
}
for (let i = 0, j = this.children.length; i < j; ++i) {
const child = this.children[i];
if (child.visible) {
child.updateTransform();
}
}
}
/**
* Renders the object using the WebGL renderer
*
* @private
* @param {PIXI.WebGLRenderer} renderer - a reference to the WebGL renderer
*/
_renderWebGL(renderer: PIXI.WebGLRenderer) {
renderer.setObjectRenderer(renderer.plugins[this.pluginName]);
renderer.plugins[this.pluginName].render(this);
}
/**
* Renders the object using the Canvas renderer
*
* @private
* @param {PIXI.CanvasRenderer} renderer - The canvas renderer.
*/
_renderCanvas(renderer: PIXI.CanvasRenderer) {
renderer.plugins['mesh'].render(this);
}
/**
* When the texture is updated, this event will fire to update the scale and frame
*
* @private
*/
_onTextureUpdate() {
this._uvTransform.texture = this._texture;
this.color.pma = this._texture.baseTexture.premultipliedAlpha;
this.refresh();
}
/**
* multiplies uvs only if uploadUvTransform is false
* call it after you change uvs manually
* make sure that texture is valid
*/
multiplyUvs() {
if (!this.uploadUvTransform) {
(this._uvTransform as any).multiplyUvs(this.uvs);
}
}
/**
* Refreshes uvs for generated meshes (rope, plane)
* sometimes refreshes vertices too
*
* @param {boolean} [forceUpdate=false] if true, matrices will be updated any case
*/
refresh(forceUpdate = false) {
if (this._uvTransform.update(forceUpdate)) {
this._refreshUvs();
}
}
/**
* re-calculates mesh coords
* @protected
*/
_refreshUvs() {
/* empty */
}
/**
* Returns the bounds of the mesh as a rectangle. The bounds calculation takes the worldTransform into account.
*
*/
_calculateBounds() {
// TODO - we can cache local bounds and use them if they are dirty (like graphics)
this._bounds.addVertices(this.transform as any, this.vertices as any, 0, this.vertices.length);
}
/**
* Tests if a point is inside this mesh. Works only for TRIANGLE_MESH
*
* @param {PIXI.Point} point - the point to test
* @return {boolean} the result of the test
*/
containsPoint(point: PIXI.PointLike) {
if (!this.getBounds().contains(point.x, point.y)) {
return false;
}
this.worldTransform.applyInverse(point as any, tempPoint);
const vertices = this.vertices;
const points = tempPolygon.points;
const indices = this.indices;
const len = this.indices.length;
const step = this.drawMode === Mesh.DRAW_MODES.TRIANGLES ? 3 : 1;
for (let i = 0; i + 2 < len; i += step) {
const ind0 = indices[i] * 2;
const ind1 = indices[i + 1] * 2;
const ind2 = indices[i + 2] * 2;
points[0] = vertices[ind0];
points[1] = vertices[ind0 + 1];
points[2] = vertices[ind1];
points[3] = vertices[ind1 + 1];
points[4] = vertices[ind2];
points[5] = vertices[ind2 + 1];
if (tempPolygon.contains(tempPoint.x, tempPoint.y)) {
return true;
}
}
return false;
}
calculateVertices() {
const vertices = this.vertices;
const n = vertices.length;
if (!this.vertexData || this.vertexData.length !== n)
{
this.vertexData = new Float32Array(n);
}
const vertexData = this.vertexData;
const matrix = this.transform.worldTransform;
const a = matrix.a;
const b = matrix.b;
const c = matrix.c;
const d = matrix.d;
const tx = matrix.tx;
const ty = matrix.ty;
for (let i = 0; i < n; i += 2)
{
const rawX = vertices[i];
const rawY = vertices[i + 1];
vertexData[i] = (a * rawX) + (c * rawY) + tx;
vertexData[i+1] = (d * rawY) + (b * rawX) + ty;
}
}
calculateMaskVertices() {
// actual implementation is in Sprite class
}
/**
* The texture that the mesh uses.
*
* @member {PIXI.Texture}
*/
get texture() {
return this._texture;
}
set texture(value) // eslint-disable-line require-jsdoc
{
if (this._texture === value) {
return;
}
this._texture = value;
if (value) {
// wait for the texture to load
if (value.baseTexture.hasLoaded) {
this._onTextureUpdate();
}
else {
value.once('update', this._onTextureUpdate, this);
}
}
}
enableColors() {
this.pluginName = 'meshColored';
const len = this.vertices.length / 2;
const colors = new Uint32Array(len * 2);
this.colors = colors;
for (let i = 0; i < len; i++) {
this.colors[i * 2] = 0;
this.colors[i * 2 + 1] = 0xffffffff;
}
}
/**
* @param {Float32Array} rgb 3 * len numbers, RGB colors of mesh
* @param {boolean} dark whether its dark or light tint
*/
setRGB(rgb: Float32Array, dark: boolean) {
const colors = this.colors;
let j = dark ? 0 : 1;
let a = dark ? 0 : (0xff << 24);
for (let i = 0; i < rgb.length; i += 3) {
colors[j] = a | ((rgb[i] * 255) << 16) | ((rgb[i+1] * 255) << 8) | ((rgb[i+2] * 255) << 0);
j+=2;
}
this.dirty++;
}
color = new ColorTransform();
/**
* The tint applied to the mesh. This is a hex value. A value of 0xFFFFFF will remove any tint effect.
*
* @member {number}
* @default 0xFFFFFF
*/
get tint() {
return this.color ? this.color.tintBGR : 0xffffff;
}
set tint(value: number) {
this.color && (this.color.tintBGR = value);
}
static DRAW_MODES = PIXI.mesh.Mesh.DRAW_MODES;
destroy(options?: PIXI.DestroyOptions | boolean) {
if (this.animState) {
this.animState.stop();
this.animState = null;
}
for (const id in this._glDatas)
{
const data = this._glDatas[id];
if (data.destroy)
{
data.destroy();
}
else
{
if (data.vertexBuffer)
{
data.vertexBuffer.destroy();
data.vertexBuffer = null;
}
if (data.indexBuffer)
{
data.indexBuffer.destroy();
data.indexBuffer = null;
}
if (data.colorBuffer)
{
data.colorBuffer.destroy();
data.colorBuffer = null;
}
if (data.uvBuffer)
{
data.uvBuffer.destroy();
data.uvBuffer = null;
}
if (data.vao)
{
data.vao.destroy();
data.vao = null;
}
}
}
this._glDatas = null;
super.destroy(options);
}
}
} | the_stack |
import { Augur } from "augur.js";
import BigNumber from "bignumber.js";
import * as t from "io-ts";
import * as Knex from "knex";
import * as _ from "lodash";
import { Dictionary, NumericDictionary } from "lodash";
import { FrozenFunds } from "../../blockchain/log-processors/profit-loss/frozen-funds";
import { getCurrentTime } from "../../blockchain/process-block";
import { ZERO } from "../../constants";
import { Address, TradesRow, MarketsRow } from "../../types";
import { Percent, Price, safePercent, Shares, Tokens } from "../../utils/dimension-quantity";
import { getRealizedProfitPercent, getTotalCost, getTotalProfit, getTotalProfitPercent, getTradePrice, getUnrealizedCost, getUnrealizedProfit, getUnrealizedProfitPercent, getUnrealizedRevenue } from "../../utils/financial-math";
const DEFAULT_NUMBER_OF_BUCKETS = 30;
export function getDefaultPLTimeseries(): ProfitLossTimeseries {
return {
timestamp: 0,
account: "",
marketId: "",
outcome: 0,
transactionHash: "",
price: ZERO,
position: ZERO,
quantityOpened: ZERO,
numOutcomes: 2,
profit: ZERO,
realizedCost: ZERO,
minPrice: ZERO,
maxPrice: ZERO,
frozenFunds: ZERO,
};
}
export function getDefaultOVTimeseries(): OutcomeValueTimeseries {
return {
timestamp: 0,
marketId: "",
outcome: 0,
value: ZERO,
transactionHash: "",
blockNumber: 0,
logIndex: 0,
};
}
export interface Timestamped {
timestamp: number;
}
export interface ProfitLossTimeseries extends Timestamped, FrozenFunds {
account: Address;
marketId: Address;
outcome: number;
transactionHash: string;
price: BigNumber; // denominated in tokens/share. average price user paid for shares in the current open position
position: BigNumber; // denominated in shares. Known as "net position", this is the number of shares the user currently owns for this outcome; if it's a positive number, the user is "long" and earns money if the share price goes up; if it's a negative number the user is "short" and earns money if the share price goes down. Eg. "-15" means an open position of short 15 shares.
quantityOpened: BigNumber; // denominated in shares. See TradeQuantityOpened
numOutcomes: number;
profit: BigNumber; // denominated in tokens. Realized profit of shares that were bought and sold
realizedCost: BigNumber; // denominated in tokens. Cumulative cost of shares included in realized profit
minPrice: BigNumber; // market minPrice in tokens. Helps convert between TradePriceMinusMinPrice, TradePrice, and SharePrice
maxPrice: BigNumber; // market maxPrice in tokens. Helps convert between TradePriceMinusMinPrice, TradePrice, and SharePrice
}
export interface OutcomeValueTimeseries extends Timestamped {
marketId: Address;
outcome: number;
value: BigNumber;
transactionHash: string;
blockNumber: number;
logIndex: number;
}
// ProfitLossResult is the profit or loss result, at a particular point in
// time, of a user's position in a single market outcome. ProfitLossResult
// represents the total accumulation of history (for this market
// outcome) as of an instantaneous point in time, like a balance sheet in
// accounting. A user's "position" refers to the shares they have bought
// ("long" position) or sold ("short" position) in this market outcome.
export interface ProfitLossResult extends
Timestamped, // profit and loss as of this timestamp
FrozenFunds { // funds the user froze to be in this position (see FrozenFunds docs)
marketId: Address; // user's position is in this market
outcome: number; // user's position is in this market outcome
netPosition: BigNumber; // denominated in shares. See NetPosition
averagePrice: BigNumber; // denominated in tokens/share. See AverageTradePriceMinusMinPriceForOpenPosition
unrealizedCost: BigNumber; // denominated in tokens. See UnrealizedCost
unrealizedRevenue: BigNumber; // denominated in tokens. See UnrealizedRevenue
unrealized: BigNumber; // ie. unrealizedProfit. Denominated in tokens. See UnrealizedProfit
unrealizedPercent: BigNumber; // unrealized profit percent. See UnrealizedProfitPercent
realizedCost: BigNumber; // denominated in tokens. See RealizedCost
realized: BigNumber; // ie. realizedProfit. Denominated in tokens. See RealizedProfit
realizedPercent: BigNumber; // realized profit percent. See RealizedProfitPercent
totalCost: BigNumber; // denominated in tokens. See TotalCost
total: BigNumber; // ie totalProfit. Denominated in tokens. See TotalProfit
totalPercent: BigNumber; // total profit percent. See TotalProfitPercent
lastTradePrice: BigNumber; // denominated in tokens. Last (most recent) price at which this outcome was traded by anybody. See TradePrice
lastTradePrice24hAgo: BigNumber; // denominated in tokens. As of 24 hours ago, last (most recent) price at which this outcome was traded by anybody. See TradePrice
lastTradePrice24hChangePercent: BigNumber; // percent change in lastTradePrice from 24 hours ago (NB this is calculated using LastTradePriceMinusMinPrice, not LastTradePrice)
unrealizedRevenue24hAgo: BigNumber; // denominated in tokens. See UnrealizedRevenue, except this is is calculated using lastTradePrice from 24 hours ago, as if the user held this position constant for the past 24 hours. But, if the user (further) opened their current position within the past 24 hours, then the price at which the position was opened is used instead of the actual lastTradePrice from 24 hours ago
unrealizedRevenue24hChangePercent: BigNumber; // percent change in unrealizedRevenue from 24 hours ago
}
export interface ShortPosition {
outcome: number;
position: BigNumber;
}
const GetProfitLossSharedParams = t.type({
universe: t.string,
account: t.string,
startTime: t.union([t.number, t.null]),
endTime: t.union([t.number, t.null]),
periodInterval: t.union([t.number, t.null]),
});
const MarketIdParams = t.type({
marketId: t.union([t.string, t.null]),
});
export const GetProfitLossParams = t.intersection([GetProfitLossSharedParams, MarketIdParams]);
export type GetProfitLossParamsType = t.TypeOf<typeof GetProfitLossParams>;
const MarketIdAndOutcomeParams = t.type({
marketId: t.string,
outcome: t.number,
});
export const GetOutcomeProfitLossParams = t.intersection([GetProfitLossSharedParams, MarketIdAndOutcomeParams]);
export type GetOutcomeProfitLossParamsType = t.TypeOf<typeof GetOutcomeProfitLossParams>;
export const GetProfitLossSummaryParams = t.intersection([t.type({
universe: t.string,
account: t.string,
marketId: t.union([t.string, t.null]),
}), t.partial({
endTime: t.number,
})]);
export type GetProfitLossSummaryParamsType = t.TypeOf<typeof GetProfitLossSummaryParams>;
export function bucketRangeByInterval(startTime: number, endTime: number, periodInterval: number | null): Array<Timestamped> {
if (startTime < 0) throw new Error("startTime must be a valid unix timestamp, greater than 0");
if (endTime < 0) throw new Error("endTime must be a valid unix timestamp, greater than 0");
if (endTime < startTime) throw new Error("endTime must be greater than or equal startTime");
if (periodInterval !== null && periodInterval <= 0) throw new Error("periodInterval must be positive integer (seconds)");
const interval = periodInterval == null ? Math.ceil((endTime - startTime) / DEFAULT_NUMBER_OF_BUCKETS) : periodInterval;
const buckets: Array<Timestamped> = [];
for (let bucketEndTime = startTime; bucketEndTime < endTime; bucketEndTime += interval) {
buckets.push({ timestamp: bucketEndTime });
}
buckets.push({ timestamp: endTime });
return buckets;
}
export function sumProfitLossResults<T extends ProfitLossResult>(left: T, right: T): T {
const leftPosition = new BigNumber(left.netPosition, 10);
const rightPosition = new BigNumber(right.netPosition, 10);
const netPosition = leftPosition.plus(rightPosition);
const averagePrice = left.averagePrice.plus(right.averagePrice).dividedBy(2);
const realized = left.realized.plus(right.realized);
const unrealized = left.unrealized.plus(right.unrealized);
const unrealizedCost = left.unrealizedCost.plus(right.unrealizedCost);
const unrealizedRevenue = left.unrealizedRevenue.plus(right.unrealizedRevenue);
const realizedCost = left.realizedCost.plus(right.realizedCost);
const totalCost = left.totalCost.plus(right.totalCost);
const total = realized.plus(unrealized);
const { unrealizedProfitPercent } = getUnrealizedProfitPercent({
unrealizedCost: new Tokens(unrealizedCost),
unrealizedProfit: new Tokens(unrealized),
});
const { realizedProfitPercent } = getRealizedProfitPercent({
realizedCost: new Tokens(realizedCost),
realizedProfit: new Tokens(realized),
});
const { totalProfitPercent } = getTotalProfitPercent({
totalCost: new Tokens(totalCost),
totalProfit: new Tokens(total),
});
return Object.assign(_.clone(left), {
netPosition,
averagePrice,
realized,
unrealized,
total,
unrealizedCost,
realizedCost,
totalCost,
unrealizedPercent: unrealizedProfitPercent.magnitude,
realizedPercent: realizedProfitPercent.magnitude,
totalPercent: totalProfitPercent.magnitude,
unrealizedRevenue,
});
}
async function queryProfitLossTimeseries(db: Knex, now: number, params: GetProfitLossParamsType): Promise<Array<ProfitLossTimeseries>> {
const query = db("wcl_profit_loss_timeseries")
.select("wcl_profit_loss_timeseries.*", "markets.universe", "markets.numOutcomes", "markets.minPrice", "markets.maxPrice")
.join("markets", "wcl_profit_loss_timeseries.marketId", "markets.marketId")
.where({ account: params.account, universe: params.universe })
.orderBy("timestamp");
if (params.marketId !== null) query.where("wcl_profit_loss_timeseries.marketId", params.marketId);
if (params.startTime) query.where("timestamp", ">=", params.startTime);
query.where("timestamp", "<=", params.endTime || now);
return await query;
}
async function queryOutcomeValueTimeseries(db: Knex, now: number, params: GetProfitLossParamsType): Promise<Array<OutcomeValueTimeseries>> {
const query = db("outcome_value_timeseries")
.select("outcome_value_timeseries.*", "markets.universe")
.join("markets", "outcome_value_timeseries.marketId", "markets.marketId")
.orderBy("timestamp", "asc")
.orderBy("blockNumber", "asc")
.orderBy("logIndex", "asc");
if (params.marketId !== null) query.where("outcome_value_timeseries.marketId", params.marketId);
if (params.startTime) query.where("timestamp", ">=", params.startTime);
query.where("timestamp", "<=", params.endTime || now);
return await query;
}
function bucketAtTimestamps<T extends Timestamped>(timestampeds: Dictionary<Array<T>>, timestamps: Array<Timestamped>, defaultValue: T): Array<Array<T>> {
return _.map(timestamps, (bucket) => {
return _.map(timestampeds, (values, outcome: string) => {
let result: T | undefined;
const beforeBucket = _.takeWhile(values, (pl) => pl.timestamp <= bucket.timestamp);
if (beforeBucket.length > 0) {
_.drop(values, beforeBucket.length);
result = _.last(beforeBucket);
}
if (!result) result = Object.assign({ outcome }, defaultValue);
result.timestamp = bucket.timestamp;
return result;
});
});
}
function getProfitResultsForTimestamp(
plsAtTimestamp: Array<ProfitLossTimeseries>,
outcomeValuesAtTimestamp: Array<OutcomeValueTimeseries> | null, lastTradePriceMinusMinPrice24hAgoByOutcomeByMarketId: Dictionary<Dictionary<OutcomeValueTimeseries>>,
// See below for doc on oldestTradePriceMinusMinPriceUserPaidForOpenPositionInLast24h
oldestTradePriceMinusMinPriceUserPaidForOpenPositionInLast24hByOutcomeByMarketId: Dictionary<Dictionary<Price>>,
): Array<ProfitLossResult> {
const unsortedResults: Array<ProfitLossResult> = plsAtTimestamp.map((outcomePl) => {
const realizedCost = new Tokens(outcomePl.realizedCost);
const realizedProfit = new Tokens(outcomePl.profit);
const outcome = outcomePl.outcome;
const averageTradePriceMinusMinPriceForOpenPosition = new Price(outcomePl.price);
const marketMinPrice = new Price(outcomePl.minPrice);
const marketMaxPrice = new Price(outcomePl.maxPrice);
const netPosition = new Shares(outcomePl.position);
const frozenFunds = new Tokens(outcomePl.frozenFunds);
// Ie. averageTradePriceForOpenPosition is assigned to
// ProfitLossResult.averagePrice, and ProfitLossResult.averagePrice
// is a TradePrice and not TradePriceMinusMinPrice.
const averageTradePriceForOpenPosition = getTradePrice({
marketMinPrice,
tradePriceMinusMinPrice: averageTradePriceMinusMinPriceForOpenPosition,
}).tradePrice;
const lastTradePriceMinusMinPrice24hAgo: Price = lastTradePriceMinusMinPrice24hAgoByOutcomeByMarketId[outcomePl.marketId] !== undefined ? (
lastTradePriceMinusMinPrice24hAgoByOutcomeByMarketId[outcomePl.marketId][outcome] !== undefined ?
new Price(lastTradePriceMinusMinPrice24hAgoByOutcomeByMarketId[outcomePl.marketId][outcome].value) : Price.ZERO
) : Price.ZERO;
const lastTradePriceMinusMinPrice: Price = outcomeValuesAtTimestamp ? new Price(outcomeValuesAtTimestamp[outcome].value).minus(marketMinPrice) : Price.ZERO;
// oldestTradePriceMinusMinPriceUserPaidForOpenPositionInLast24h is the oldest
// TradePriceMinusMinPrice a user paid to (further) open their position within
// the last 24 hours. For example, this will be undefined if the user's position
// was opened 2 days ago and not modified since; if the user opened the position
// at a price of 0.3 twelve hours ago, and then further opened the position
// at a price of 0.7 six hours ago, then this will be 0.3 because it's the
// oldest price _within the last 24h ignoring position closes and reversals_.
const oldestTradePriceMinusMinPriceUserPaidForOpenPositionInLast24h: Price | undefined = oldestTradePriceMinusMinPriceUserPaidForOpenPositionInLast24hByOutcomeByMarketId[outcomePl.marketId] !== undefined ?
oldestTradePriceMinusMinPriceUserPaidForOpenPositionInLast24hByOutcomeByMarketId[outcomePl.marketId][outcome]
: undefined;
const { unrealizedCost } = getUnrealizedCost({
marketMinPrice,
marketMaxPrice,
netPosition,
averageTradePriceMinusMinPriceForOpenPosition,
});
const { unrealizedRevenue } = getUnrealizedRevenue({
marketMinPrice,
marketMaxPrice,
netPosition,
lastTradePriceMinusMinPrice,
});
const { unrealizedProfit } = getUnrealizedProfit({
marketMinPrice,
marketMaxPrice,
netPosition,
averageTradePriceMinusMinPriceForOpenPosition,
lastTradePriceMinusMinPrice,
});
const { totalCost } = getTotalCost({
marketMinPrice,
marketMaxPrice,
netPosition,
averageTradePriceMinusMinPriceForOpenPosition,
realizedCost,
});
const { totalProfit } = getTotalProfit({
marketMinPrice,
marketMaxPrice,
netPosition,
averageTradePriceMinusMinPriceForOpenPosition,
lastTradePriceMinusMinPrice,
realizedProfit,
});
const { realizedProfitPercent } = getRealizedProfitPercent({
realizedCost,
realizedProfit,
});
const { unrealizedProfitPercent } = getUnrealizedProfitPercent({
marketMinPrice,
marketMaxPrice,
netPosition,
averageTradePriceMinusMinPriceForOpenPosition,
lastTradePriceMinusMinPrice,
});
const { totalProfitPercent } = getTotalProfitPercent({
marketMinPrice,
marketMaxPrice,
netPosition,
averageTradePriceMinusMinPriceForOpenPosition,
lastTradePriceMinusMinPrice,
realizedCost,
realizedProfit,
});
const { tradePrice: lastTradePrice } = getTradePrice({
marketMinPrice,
tradePriceMinusMinPrice: lastTradePriceMinusMinPrice,
});
const { tradePrice: lastTradePrice24hAgo } = getTradePrice({
marketMinPrice,
tradePriceMinusMinPrice: lastTradePriceMinusMinPrice24hAgo,
});
const lastTradePrice24hChangePercent: Percent = safePercent({
numerator: lastTradePriceMinusMinPrice,
denominator: lastTradePriceMinusMinPrice24hAgo,
subtractOne: true,
});
const { unrealizedRevenue: unrealizedRevenue24hAgo } = getUnrealizedRevenue({
marketMinPrice,
marketMaxPrice,
netPosition,
lastTradePriceMinusMinPrice: oldestTradePriceMinusMinPriceUserPaidForOpenPositionInLast24h || lastTradePriceMinusMinPrice24hAgo, // if user opened position within last 24h we want to use their open price as the "reference price" for unrealizedRevenue24hAgo because otherwise they will make a trade and then instantly see a 24-hour "loss" or "gain" because it'll compare the price they paid just now to the (irrelevant) price 24 hours ago
});
const unrealizedRevenue24hChangePercent: Percent = safePercent({
numerator: unrealizedRevenue,
denominator: unrealizedRevenue24hAgo,
subtractOne: true,
});
return {
marketId: outcomePl.marketId,
outcome,
timestamp: outcomePl.timestamp,
netPosition: netPosition.magnitude,
realized: realizedProfit.magnitude,
unrealized: unrealizedProfit.magnitude,
total: totalProfit.magnitude,
averagePrice: averageTradePriceForOpenPosition.magnitude,
unrealizedCost: unrealizedCost.magnitude,
realizedCost: realizedCost.magnitude,
totalCost: totalCost.magnitude,
realizedPercent: realizedProfitPercent.magnitude,
unrealizedPercent: unrealizedProfitPercent.magnitude,
totalPercent: totalProfitPercent.magnitude,
unrealizedRevenue: unrealizedRevenue.magnitude,
frozenFunds: frozenFunds.magnitude,
lastTradePrice: lastTradePrice.magnitude,
lastTradePrice24hAgo: lastTradePrice24hAgo.magnitude,
lastTradePrice24hChangePercent: lastTradePrice24hChangePercent.magnitude,
unrealizedRevenue24hAgo: unrealizedRevenue24hAgo.magnitude,
unrealizedRevenue24hChangePercent: unrealizedRevenue24hChangePercent.magnitude,
};
});
return _.sortBy(unsortedResults, "outcome")!;
}
function getProfitResultsForMarket(
marketPls: Array<Array<ProfitLossTimeseries>>, marketOutcomeValues: Array<Array<OutcomeValueTimeseries>>,
buckets: Array<Timestamped>,
lastTradePriceMinusMinPrice24hAgoByOutcomeByMarketId: Dictionary<Dictionary<OutcomeValueTimeseries>>,
// See doc on oldestTradePriceMinusMinPriceUserPaidForOpenPositionInLast24h
oldestTradePriceMinusMinPriceUserPaidForOpenPositionInLast24hByOutcomeByMarketId: Dictionary<Dictionary<Price>>,
): Array<Array<ProfitLossResult>> {
return _.map(marketPls, (outcomePLsAtTimestamp, timestampIndex) => {
const nonZeroPositionOutcomePls = _.filter(outcomePLsAtTimestamp, (outcome) => !outcome.position.eq(ZERO));
if (nonZeroPositionOutcomePls.length < 1) {
return getProfitResultsForTimestamp(outcomePLsAtTimestamp, null, lastTradePriceMinusMinPrice24hAgoByOutcomeByMarketId, oldestTradePriceMinusMinPriceUserPaidForOpenPositionInLast24hByOutcomeByMarketId);
}
const numOutcomes = nonZeroPositionOutcomePls[0].numOutcomes;
const outcomeValuesAtTimestamp = marketOutcomeValues ? marketOutcomeValues[timestampIndex] : _.fill(Array(numOutcomes), getDefaultOVTimeseries());
// turn outcome values into real list
const sortedOutcomeValues = _.reduce(_.range(numOutcomes), (result, outcomeIndex) => {
let outcomeValue = _.find(outcomeValuesAtTimestamp, (ov) => ov.outcome === outcomeIndex);
if (!outcomeValue) outcomeValue = Object.assign(getDefaultOVTimeseries(), { outcome: outcomeIndex });
result.push(outcomeValue);
return result;
}, [] as Array<OutcomeValueTimeseries>);
return getProfitResultsForTimestamp(outcomePLsAtTimestamp, sortedOutcomeValues, lastTradePriceMinusMinPrice24hAgoByOutcomeByMarketId, oldestTradePriceMinusMinPriceUserPaidForOpenPositionInLast24hByOutcomeByMarketId);
});
}
interface ProfitLossData {
profits: Dictionary<Dictionary<Array<ProfitLossTimeseries>>>;
outcomeValues: Dictionary<Dictionary<Array<OutcomeValueTimeseries>>>; // historical lastTradePriceMinusMinPrices by outcome by marketId, see TradePriceMinusMinPrice
buckets: Array<Timestamped>;
lastTradePriceMinusMinPrice24hAgoByOutcomeByMarketId: Dictionary<Dictionary<OutcomeValueTimeseries>>; // lastTradePriceMinusMinPrice by outcome by marketId as of 24 hours ago, see TradePriceMinusMinPrice
oldestTradePriceMinusMinPriceUserPaidForOpenPositionInLast24hByOutcomeByMarketId: Dictionary<Dictionary<Price>>; // oldest TradePriceMinusMinPrice a user paid to (further) open their position within the last 24 hours, by outcome by marketId. For example, this will be undefined if the user's position was opened 2 days ago and not modified since; if the user opened the position at a price of 0.3 twelve hours ago, and then further opened the position at a price of 0.7 six hours ago, then this will be 0.3 because it's the oldest price _within the last 24h ignoring position closes and reversals_.
}
async function getProfitLossData(db: Knex, params: GetProfitLossParamsType): Promise<ProfitLossData> {
const now = getCurrentTime();
// Realized Profits + Timeseries data about the state of positions
const profitsOverTime = await queryProfitLossTimeseries(db, now, params);
const marketProfits = _.groupBy(profitsOverTime, (r) => r.marketId);
const profits: Dictionary<Dictionary<Array<ProfitLossTimeseries>>> = _.reduce(marketProfits, (result, value, key) => {
result[key] = _.groupBy(value, (r) => r.outcome);
return result;
}, {} as Dictionary<Dictionary<Array<ProfitLossTimeseries>>>);
// Type there are no trades in this window then we'll return empty data
if (_.isEmpty(profits)) {
const buckets = bucketRangeByInterval(params.startTime || 0, params.endTime || now, params.periodInterval);
return { profits: {}, outcomeValues: {}, buckets, lastTradePriceMinusMinPrice24hAgoByOutcomeByMarketId: {}, oldestTradePriceMinusMinPriceUserPaidForOpenPositionInLast24hByOutcomeByMarketId: {} };
}
// The value of an outcome over time, for computing unrealized profit and loss at a time
const outcomeValuesOverTime = await queryOutcomeValueTimeseries(db, now, params);
const marketOutcomeValues = _.groupBy(outcomeValuesOverTime, (r) => r.marketId);
const outcomeValues: Dictionary<Dictionary<Array<OutcomeValueTimeseries>>> = _.reduce(marketOutcomeValues, (result, value, key) => {
result[key] = _.groupBy(value, (r) => r.outcome);
return result;
}, {} as Dictionary<Dictionary<Array<OutcomeValueTimeseries>>>);
// The timestamps at which we need to return results
const startTime = params.startTime || profitsOverTime[0].timestamp;
const maxResultTime = Math.max(_.last(profitsOverTime)!.timestamp, _.last(outcomeValuesOverTime)!.timestamp);
const endTime = Math.min(maxResultTime, now);
const interval = params.periodInterval || null;
const buckets = bucketRangeByInterval(startTime, endTime, interval);
return {
profits,
outcomeValues,
buckets,
lastTradePriceMinusMinPrice24hAgoByOutcomeByMarketId: await getLastTradePriceMinusMinPrice24hAgoByOutcomeByMarketId(db, now, params),
oldestTradePriceMinusMinPriceUserPaidForOpenPositionInLast24hByOutcomeByMarketId: await getOldestTradePriceMinusMinPriceUserPaidForOpenPositionInLast24hByOutcomeByMarketId(db, now, params),
};
}
export interface AllOutcomesProfitLoss {
profit: Dictionary<Array<Array<ProfitLossResult>>>;
buckets: Array<Timestamped>;
marketOutcomes: Dictionary<number>;
}
export async function getAllOutcomesProfitLoss(db: Knex, params: GetProfitLossParamsType): Promise<AllOutcomesProfitLoss> {
const { profits, outcomeValues, buckets, lastTradePriceMinusMinPrice24hAgoByOutcomeByMarketId, oldestTradePriceMinusMinPriceUserPaidForOpenPositionInLast24hByOutcomeByMarketId } = await getProfitLossData(db, params);
const bucketedProfits = _.mapValues(profits, (pls, marketId) => {
return bucketAtTimestamps<ProfitLossTimeseries>(pls, buckets, Object.assign(getDefaultPLTimeseries(), { marketId }));
});
const bucketedOutcomeValues = _.mapValues(outcomeValues, (marketOutcomeValues) => {
return bucketAtTimestamps<OutcomeValueTimeseries>(marketOutcomeValues, buckets, getDefaultOVTimeseries());
});
const profit = _.mapValues(bucketedProfits, (pls, marketId) => {
return getProfitResultsForMarket(pls, bucketedOutcomeValues[marketId], buckets, lastTradePriceMinusMinPrice24hAgoByOutcomeByMarketId, oldestTradePriceMinusMinPriceUserPaidForOpenPositionInLast24hByOutcomeByMarketId);
});
const marketOutcomes = _.fromPairs(_.values(_.mapValues(profits, (pls) => {
const first = _.first(_.first(_.values(pls)))!;
return [first.marketId, first.numOutcomes];
})));
return {
profit,
marketOutcomes,
buckets,
};
}
export async function getProfitLoss(db: Knex, augur: Augur, params: GetProfitLossParamsType): Promise<Array<ProfitLossResult>> {
const { profit: outcomesProfitLoss, buckets } = await getAllOutcomesProfitLoss(db, params);
if (_.isEmpty(outcomesProfitLoss)) {
return buckets.map((bucket) => ({
timestamp: bucket.timestamp,
position: ZERO,
realized: ZERO,
unrealized: ZERO,
total: ZERO,
cost: ZERO,
averagePrice: ZERO,
numEscrowed: ZERO,
totalPosition: ZERO,
outcome: 0,
netPosition: ZERO,
marketId: "",
unrealizedCost: ZERO,
realizedCost: ZERO,
totalCost: ZERO,
realizedPercent: ZERO,
unrealizedPercent: ZERO,
totalPercent: ZERO,
unrealizedRevenue: ZERO,
frozenFunds: ZERO,
lastTradePrice: ZERO,
lastTradePrice24hAgo: ZERO,
lastTradePrice24hChangePercent: ZERO,
unrealizedRevenue24hAgo: ZERO,
unrealizedRevenue24hChangePercent: ZERO,
}));
}
// This takes us from::
// <marketId>: [[{timestamp: N,... }, {timestamp: N,... }], [{timestamp: M,... }, {timestamp: M,... }]]
// <marketId>: [[{timestamp: N,... }, {timestamp: N,... }], [{timestamp: M,... }, {timestamp: M,... }]]
//
// to:
// [
// [[{timestamp: N,... }, {timestamp: N,... }], [{timestamp: N,... }, {timestamp: N,... }]]
// [[{timestamp: M,... }, {timestamp: M,... }], [{timestamp: M,... }, {timestamp: M,... }]]
// ]
//
// This makes it easy to sum across the groups of timestamps
const bucketsProfitLoss = _.zip(..._.values(outcomesProfitLoss));
return bucketsProfitLoss.map((bucketsProfitLoss: Array<Array<ProfitLossResult>>): ProfitLossResult => _.reduce(_.flatten(bucketsProfitLoss), (left: ProfitLossResult, right: ProfitLossResult) => sumProfitLossResults(left, right))!);
}
export async function getProfitLossSummary(db: Knex, augur: Augur, params: GetProfitLossSummaryParamsType): Promise<NumericDictionary<ProfitLossResult>> {
const endTime = params.endTime || getCurrentTime();
const result: NumericDictionary<ProfitLossResult> = {};
for (const days of [1, 30]) {
const periodInterval = days * 60 * 60 * 24;
const startTime = endTime - periodInterval;
const [startProfit, endProfit, ...rest] = await getProfitLoss(db, augur, {
universe: params.universe,
account: params.account,
marketId: params.marketId,
startTime,
endTime,
periodInterval,
});
if (rest.length !== 0) throw new Error("PL calculation in summary returning more thant two bucket");
const negativeStartProfit: ProfitLossResult = {
timestamp: startProfit.timestamp,
marketId: startProfit.marketId,
outcome: startProfit.outcome,
netPosition: startProfit.netPosition.negated(),
averagePrice: startProfit.averagePrice,
realized: startProfit.realized.negated(),
unrealized: startProfit.unrealized.negated(),
total: startProfit.total.negated(),
unrealizedCost: startProfit.unrealizedCost.negated(),
realizedCost: startProfit.realizedCost.negated(),
totalCost: startProfit.totalCost.negated(),
realizedPercent: startProfit.realizedPercent,
unrealizedPercent: startProfit.unrealizedPercent,
totalPercent: startProfit.totalPercent,
unrealizedRevenue: startProfit.unrealizedRevenue.negated(),
frozenFunds: startProfit.frozenFunds.negated(),
lastTradePrice: startProfit.lastTradePrice.negated(),
lastTradePrice24hAgo: startProfit.lastTradePrice24hAgo.negated(),
lastTradePrice24hChangePercent: startProfit.lastTradePrice24hChangePercent,
unrealizedRevenue24hAgo: startProfit.unrealizedRevenue24hAgo.negated(),
unrealizedRevenue24hChangePercent: startProfit.unrealizedRevenue24hChangePercent,
};
result[days] = sumProfitLossResults(endProfit, negativeStartProfit);
}
return result;
}
async function getLastTradePriceMinusMinPrice24hAgoByOutcomeByMarketId(db: Knex, now: number, params: GetProfitLossParamsType): Promise<Dictionary<Dictionary<OutcomeValueTimeseries>>> {
const lastTradePriceMinusMinPrice24hAgo = await queryOutcomeValueTimeseries(db, now, {
...params,
startTime: null, // we need the lastTradePriceMinusMinPrice as of 24h ago, which might be a price arbitrarily old if an outcome hasn't been traded recently
endTime: (params.endTime || now) - 86400, // endTime is a unix timestamp in seconds; 86400 is one day in seconds, ie. endTime should be one day prior to passed params.endTime
});
const lastTradePriceMinusMinPrice24hAgoByMarketId = _.groupBy(lastTradePriceMinusMinPrice24hAgo, (r) => r.marketId);
const lastTradePriceMinusMinPrice24hAgoByOutcomeByMarketId: Dictionary<Dictionary<OutcomeValueTimeseries>> = _.reduce(lastTradePriceMinusMinPrice24hAgoByMarketId, (result, allPricesOneMarket, marketId) => {
const allPricesOneMarketByOutcome = _.groupBy(allPricesOneMarket, (r) => r.outcome);
const lastPrice24hAgoByOutcome = _.mapValues(allPricesOneMarketByOutcome, (allPricesForOneOutcome) => {
let latestPrice = allPricesForOneOutcome[0];
for (const price of allPricesForOneOutcome) {
if (price.timestamp > latestPrice.timestamp) {
latestPrice = price;
}
}
return latestPrice;
});
result[marketId] = lastPrice24hAgoByOutcome;
return result;
}, {} as Dictionary<Dictionary<OutcomeValueTimeseries>>);
return lastTradePriceMinusMinPrice24hAgoByOutcomeByMarketId;
}
async function getOldestTradePriceMinusMinPriceUserPaidForOpenPositionInLast24hByOutcomeByMarketId(db: Knex, now: number, params: GetProfitLossParamsType): Promise<Dictionary<Dictionary<Price>>> {
const newerThan = (params.endTime || now) - 86400; // newerThan is a unix timestamp in seconds; 86400 is one day in seconds, ie. filter by trades that opened a user's position in the last day
const trades: Array<
Pick<TradesRow<BigNumber>, "marketId" | "outcome" | "price"> &
Pick<MarketsRow<BigNumber>, "minPrice">
> = await db.raw(`
SELECT trades.marketId, trades.outcome, price, a.minPrice
FROM trades
INNER JOIN (
SELECT wcl_profit_loss_timeseries.marketId, wcl_profit_loss_timeseries.outcome, timestamp, wcl_profit_loss_timeseries.transactionHash, wcl_profit_loss_timeseries.logIndex, markets.minPrice
FROM wcl_profit_loss_timeseries
INNER JOIN markets ON wcl_profit_loss_timeseries.marketId = markets.marketId
WHERE account = :account
AND universe = :universe
AND CAST(quantityOpened as REAL) > 0
AND timestamp >= :newerThan
GROUP by wcl_profit_loss_timeseries.marketId, outcome
HAVING min(timestamp)
) a ON trades.marketId = a.marketId AND trades.outcome = a.outcome AND trades.transactionHash = a.transactionHash AND trades.logIndex = a.logIndex
GROUP BY trades.marketId, trades.outcome
HAVING min(CAST(price as REAL))
`, {
account: params.account,
universe: params.universe,
newerThan,
});
const tradesByMarketId = _.groupBy(trades, "marketId");
return _.reduce(tradesByMarketId, (result, allTradesOneMarket, marketId) => {
const allTradesOneMarketByOutcome = _.groupBy(allTradesOneMarket, (r) => r.outcome);
const priceByOutcome = _.mapValues(allTradesOneMarketByOutcome, (allTradesForOneOutcome) => {
if (allTradesForOneOutcome.length !== 1) {
throw new Error(`getOldestTradePriceMinusMinPriceUserPaidForOpenPositionInLast24hByOutcomeByMarketId: expected allTradesForOneOutcome to have lenght 1 because we select exactly one trade row per matching (marketId, outcome), allTradesForOneOutcome.length=${allTradesForOneOutcome.length}, params=${params}`);
}
return new Price(allTradesForOneOutcome[0].price.minus(allTradesForOneOutcome[0].minPrice));
});
result[marketId] = priceByOutcome;
return result;
}, {} as Dictionary<Dictionary<Price>>);
} | the_stack |
export const enum KeyType {
RSA = "RSA",
Ed25519 = "Ed25519",
Secp256k1 = "Secp256k1",
}
export const encodeKeyType: { [key: string]: number } = {
RSA: 0,
Ed25519: 1,
Secp256k1: 2,
};
export const decodeKeyType: { [key: number]: KeyType } = {
0: KeyType.RSA,
1: KeyType.Ed25519,
2: KeyType.Secp256k1,
};
export interface PublicKey {
Type: KeyType;
Data: Uint8Array;
}
export function encodePublicKey(message: PublicKey): Uint8Array {
let bb = popByteBuffer();
_encodePublicKey(message, bb);
return toUint8Array(bb);
}
function _encodePublicKey(message: PublicKey, bb: ByteBuffer): void {
// required KeyType Type = 1;
let $Type = message.Type;
if ($Type !== undefined) {
writeVarint32(bb, 8);
writeVarint32(bb, encodeKeyType[$Type]);
}
// required bytes Data = 2;
let $Data = message.Data;
if ($Data !== undefined) {
writeVarint32(bb, 18);
writeVarint32(bb, $Data.length), writeBytes(bb, $Data);
}
}
export function decodePublicKey(binary: Uint8Array): PublicKey {
return _decodePublicKey(wrapByteBuffer(binary));
}
function _decodePublicKey(bb: ByteBuffer): PublicKey {
let message: PublicKey = {} as any;
end_of_message: while (!isAtEnd(bb)) {
let tag = readVarint32(bb);
switch (tag >>> 3) {
case 0:
break end_of_message;
// required KeyType Type = 1;
case 1: {
message.Type = decodeKeyType[readVarint32(bb)];
break;
}
// required bytes Data = 2;
case 2: {
message.Data = readBytes(bb, readVarint32(bb));
break;
}
default:
skipUnknownField(bb, tag & 7);
}
}
if (message.Type === undefined)
throw new Error("Missing required field: Type");
if (message.Data === undefined)
throw new Error("Missing required field: Data");
return message;
}
export interface PrivateKey {
Type: KeyType;
Data: Uint8Array;
}
export function encodePrivateKey(message: PrivateKey): Uint8Array {
let bb = popByteBuffer();
_encodePrivateKey(message, bb);
return toUint8Array(bb);
}
function _encodePrivateKey(message: PrivateKey, bb: ByteBuffer): void {
// required KeyType Type = 1;
let $Type = message.Type;
if ($Type !== undefined) {
writeVarint32(bb, 8);
writeVarint32(bb, encodeKeyType[$Type]);
}
// required bytes Data = 2;
let $Data = message.Data;
if ($Data !== undefined) {
writeVarint32(bb, 18);
writeVarint32(bb, $Data.length), writeBytes(bb, $Data);
}
}
export function decodePrivateKey(binary: Uint8Array): PrivateKey {
return _decodePrivateKey(wrapByteBuffer(binary));
}
function _decodePrivateKey(bb: ByteBuffer): PrivateKey {
let message: PrivateKey = {} as any;
end_of_message: while (!isAtEnd(bb)) {
let tag = readVarint32(bb);
switch (tag >>> 3) {
case 0:
break end_of_message;
// required KeyType Type = 1;
case 1: {
message.Type = decodeKeyType[readVarint32(bb)];
break;
}
// required bytes Data = 2;
case 2: {
message.Data = readBytes(bb, readVarint32(bb));
break;
}
default:
skipUnknownField(bb, tag & 7);
}
}
if (message.Type === undefined)
throw new Error("Missing required field: Type");
if (message.Data === undefined)
throw new Error("Missing required field: Data");
return message;
}
export interface Long {
low: number;
high: number;
unsigned: boolean;
}
interface ByteBuffer {
bytes: Uint8Array;
offset: number;
limit: number;
}
function pushTemporaryLength(bb: ByteBuffer): number {
let length = readVarint32(bb);
let limit = bb.limit;
bb.limit = bb.offset + length;
return limit;
}
function skipUnknownField(bb: ByteBuffer, type: number): void {
switch (type) {
case 0: while (readByte(bb) & 0x80) { } break;
case 2: skip(bb, readVarint32(bb)); break;
case 5: skip(bb, 4); break;
case 1: skip(bb, 8); break;
default: throw new Error("Unimplemented type: " + type);
}
}
function stringToLong(value: string): Long {
return {
low: value.charCodeAt(0) | (value.charCodeAt(1) << 16),
high: value.charCodeAt(2) | (value.charCodeAt(3) << 16),
unsigned: false,
};
}
function longToString(value: Long): string {
let low = value.low;
let high = value.high;
return String.fromCharCode(
low & 0xFFFF,
low >>> 16,
high & 0xFFFF,
high >>> 16);
}
// The code below was modified from https://github.com/protobufjs/bytebuffer.js
// which is under the Apache License 2.0.
let f32 = new Float32Array(1);
let f32_u8 = new Uint8Array(f32.buffer);
let f64 = new Float64Array(1);
let f64_u8 = new Uint8Array(f64.buffer);
function intToLong(value: number): Long {
value |= 0;
return {
low: value,
high: value >> 31,
unsigned: value >= 0,
};
}
let bbStack: ByteBuffer[] = [];
function popByteBuffer(): ByteBuffer {
const bb = bbStack.pop();
if (!bb) return { bytes: new Uint8Array(64), offset: 0, limit: 0 };
bb.offset = bb.limit = 0;
return bb;
}
function pushByteBuffer(bb: ByteBuffer): void {
bbStack.push(bb);
}
function wrapByteBuffer(bytes: Uint8Array): ByteBuffer {
return { bytes, offset: 0, limit: bytes.length };
}
function toUint8Array(bb: ByteBuffer): Uint8Array {
let bytes = bb.bytes;
let limit = bb.limit;
return bytes.length === limit ? bytes : bytes.subarray(0, limit);
}
function skip(bb: ByteBuffer, offset: number): void {
if (bb.offset + offset > bb.limit) {
throw new Error('Skip past limit');
}
bb.offset += offset;
}
function isAtEnd(bb: ByteBuffer): boolean {
return bb.offset >= bb.limit;
}
function grow(bb: ByteBuffer, count: number): number {
let bytes = bb.bytes;
let offset = bb.offset;
let limit = bb.limit;
let finalOffset = offset + count;
if (finalOffset > bytes.length) {
let newBytes = new Uint8Array(finalOffset * 2);
newBytes.set(bytes);
bb.bytes = newBytes;
}
bb.offset = finalOffset;
if (finalOffset > limit) {
bb.limit = finalOffset;
}
return offset;
}
function advance(bb: ByteBuffer, count: number): number {
let offset = bb.offset;
if (offset + count > bb.limit) {
throw new Error('Read past limit');
}
bb.offset += count;
return offset;
}
function readBytes(bb: ByteBuffer, count: number): Uint8Array {
let offset = advance(bb, count);
return bb.bytes.subarray(offset, offset + count);
}
function writeBytes(bb: ByteBuffer, buffer: Uint8Array): void {
let offset = grow(bb, buffer.length);
bb.bytes.set(buffer, offset);
}
function readString(bb: ByteBuffer, count: number): string {
// Sadly a hand-coded UTF8 decoder is much faster than subarray+TextDecoder in V8
let offset = advance(bb, count);
let fromCharCode = String.fromCharCode;
let bytes = bb.bytes;
let invalid = '\uFFFD';
let text = '';
for (let i = 0; i < count; i++) {
let c1 = bytes[i + offset], c2: number, c3: number, c4: number, c: number;
// 1 byte
if ((c1 & 0x80) === 0) {
text += fromCharCode(c1);
}
// 2 bytes
else if ((c1 & 0xE0) === 0xC0) {
if (i + 1 >= count) text += invalid;
else {
c2 = bytes[i + offset + 1];
if ((c2 & 0xC0) !== 0x80) text += invalid;
else {
c = ((c1 & 0x1F) << 6) | (c2 & 0x3F);
if (c < 0x80) text += invalid;
else {
text += fromCharCode(c);
i++;
}
}
}
}
// 3 bytes
else if ((c1 & 0xF0) == 0xE0) {
if (i + 2 >= count) text += invalid;
else {
c2 = bytes[i + offset + 1];
c3 = bytes[i + offset + 2];
if (((c2 | (c3 << 8)) & 0xC0C0) !== 0x8080) text += invalid;
else {
c = ((c1 & 0x0F) << 12) | ((c2 & 0x3F) << 6) | (c3 & 0x3F);
if (c < 0x0800 || (c >= 0xD800 && c <= 0xDFFF)) text += invalid;
else {
text += fromCharCode(c);
i += 2;
}
}
}
}
// 4 bytes
else if ((c1 & 0xF8) == 0xF0) {
if (i + 3 >= count) text += invalid;
else {
c2 = bytes[i + offset + 1];
c3 = bytes[i + offset + 2];
c4 = bytes[i + offset + 3];
if (((c2 | (c3 << 8) | (c4 << 16)) & 0xC0C0C0) !== 0x808080) text += invalid;
else {
c = ((c1 & 0x07) << 0x12) | ((c2 & 0x3F) << 0x0C) | ((c3 & 0x3F) << 0x06) | (c4 & 0x3F);
if (c < 0x10000 || c > 0x10FFFF) text += invalid;
else {
c -= 0x10000;
text += fromCharCode((c >> 10) + 0xD800, (c & 0x3FF) + 0xDC00);
i += 3;
}
}
}
}
else text += invalid;
}
return text;
}
function writeString(bb: ByteBuffer, text: string): void {
// Sadly a hand-coded UTF8 encoder is much faster than TextEncoder+set in V8
let n = text.length;
let byteCount = 0;
// Write the byte count first
for (let i = 0; i < n; i++) {
let c = text.charCodeAt(i);
if (c >= 0xD800 && c <= 0xDBFF && i + 1 < n) {
c = (c << 10) + text.charCodeAt(++i) - 0x35FDC00;
}
byteCount += c < 0x80 ? 1 : c < 0x800 ? 2 : c < 0x10000 ? 3 : 4;
}
writeVarint32(bb, byteCount);
let offset = grow(bb, byteCount);
let bytes = bb.bytes;
// Then write the bytes
for (let i = 0; i < n; i++) {
let c = text.charCodeAt(i);
if (c >= 0xD800 && c <= 0xDBFF && i + 1 < n) {
c = (c << 10) + text.charCodeAt(++i) - 0x35FDC00;
}
if (c < 0x80) {
bytes[offset++] = c;
} else {
if (c < 0x800) {
bytes[offset++] = ((c >> 6) & 0x1F) | 0xC0;
} else {
if (c < 0x10000) {
bytes[offset++] = ((c >> 12) & 0x0F) | 0xE0;
} else {
bytes[offset++] = ((c >> 18) & 0x07) | 0xF0;
bytes[offset++] = ((c >> 12) & 0x3F) | 0x80;
}
bytes[offset++] = ((c >> 6) & 0x3F) | 0x80;
}
bytes[offset++] = (c & 0x3F) | 0x80;
}
}
}
function writeByteBuffer(bb: ByteBuffer, buffer: ByteBuffer): void {
let offset = grow(bb, buffer.limit);
let from = bb.bytes;
let to = buffer.bytes;
// This for loop is much faster than subarray+set on V8
for (let i = 0, n = buffer.limit; i < n; i++) {
from[i + offset] = to[i];
}
}
function readByte(bb: ByteBuffer): number {
return bb.bytes[advance(bb, 1)];
}
function writeByte(bb: ByteBuffer, value: number): void {
let offset = grow(bb, 1);
bb.bytes[offset] = value;
}
function readFloat(bb: ByteBuffer): number {
let offset = advance(bb, 4);
let bytes = bb.bytes;
// Manual copying is much faster than subarray+set in V8
f32_u8[0] = bytes[offset++];
f32_u8[1] = bytes[offset++];
f32_u8[2] = bytes[offset++];
f32_u8[3] = bytes[offset++];
return f32[0];
}
function writeFloat(bb: ByteBuffer, value: number): void {
let offset = grow(bb, 4);
let bytes = bb.bytes;
f32[0] = value;
// Manual copying is much faster than subarray+set in V8
bytes[offset++] = f32_u8[0];
bytes[offset++] = f32_u8[1];
bytes[offset++] = f32_u8[2];
bytes[offset++] = f32_u8[3];
}
function readDouble(bb: ByteBuffer): number {
let offset = advance(bb, 8);
let bytes = bb.bytes;
// Manual copying is much faster than subarray+set in V8
f64_u8[0] = bytes[offset++];
f64_u8[1] = bytes[offset++];
f64_u8[2] = bytes[offset++];
f64_u8[3] = bytes[offset++];
f64_u8[4] = bytes[offset++];
f64_u8[5] = bytes[offset++];
f64_u8[6] = bytes[offset++];
f64_u8[7] = bytes[offset++];
return f64[0];
}
function writeDouble(bb: ByteBuffer, value: number): void {
let offset = grow(bb, 8);
let bytes = bb.bytes;
f64[0] = value;
// Manual copying is much faster than subarray+set in V8
bytes[offset++] = f64_u8[0];
bytes[offset++] = f64_u8[1];
bytes[offset++] = f64_u8[2];
bytes[offset++] = f64_u8[3];
bytes[offset++] = f64_u8[4];
bytes[offset++] = f64_u8[5];
bytes[offset++] = f64_u8[6];
bytes[offset++] = f64_u8[7];
}
function readInt32(bb: ByteBuffer): number {
let offset = advance(bb, 4);
let bytes = bb.bytes;
return (
bytes[offset] |
(bytes[offset + 1] << 8) |
(bytes[offset + 2] << 16) |
(bytes[offset + 3] << 24)
);
}
function writeInt32(bb: ByteBuffer, value: number): void {
let offset = grow(bb, 4);
let bytes = bb.bytes;
bytes[offset] = value;
bytes[offset + 1] = value >> 8;
bytes[offset + 2] = value >> 16;
bytes[offset + 3] = value >> 24;
}
function readInt64(bb: ByteBuffer, unsigned: boolean): Long {
return {
low: readInt32(bb),
high: readInt32(bb),
unsigned,
};
}
function writeInt64(bb: ByteBuffer, value: Long): void {
writeInt32(bb, value.low);
writeInt32(bb, value.high);
}
function readVarint32(bb: ByteBuffer): number {
let c = 0;
let value = 0;
let b: number;
do {
b = readByte(bb);
if (c < 32) value |= (b & 0x7F) << c;
c += 7;
} while (b & 0x80);
return value;
}
function writeVarint32(bb: ByteBuffer, value: number): void {
value >>>= 0;
while (value >= 0x80) {
writeByte(bb, (value & 0x7f) | 0x80);
value >>>= 7;
}
writeByte(bb, value);
}
function readVarint64(bb: ByteBuffer, unsigned: boolean): Long {
let part0 = 0;
let part1 = 0;
let part2 = 0;
let b: number;
b = readByte(bb); part0 = (b & 0x7F); if (b & 0x80) {
b = readByte(bb); part0 |= (b & 0x7F) << 7; if (b & 0x80) {
b = readByte(bb); part0 |= (b & 0x7F) << 14; if (b & 0x80) {
b = readByte(bb); part0 |= (b & 0x7F) << 21; if (b & 0x80) {
b = readByte(bb); part1 = (b & 0x7F); if (b & 0x80) {
b = readByte(bb); part1 |= (b & 0x7F) << 7; if (b & 0x80) {
b = readByte(bb); part1 |= (b & 0x7F) << 14; if (b & 0x80) {
b = readByte(bb); part1 |= (b & 0x7F) << 21; if (b & 0x80) {
b = readByte(bb); part2 = (b & 0x7F); if (b & 0x80) {
b = readByte(bb); part2 |= (b & 0x7F) << 7;
}
}
}
}
}
}
}
}
}
return {
low: part0 | (part1 << 28),
high: (part1 >>> 4) | (part2 << 24),
unsigned,
};
}
function writeVarint64(bb: ByteBuffer, value: Long): void {
let part0 = value.low >>> 0;
let part1 = ((value.low >>> 28) | (value.high << 4)) >>> 0;
let part2 = value.high >>> 24;
// ref: src/google/protobuf/io/coded_stream.cc
let size =
part2 === 0 ?
part1 === 0 ?
part0 < 1 << 14 ?
part0 < 1 << 7 ? 1 : 2 :
part0 < 1 << 21 ? 3 : 4 :
part1 < 1 << 14 ?
part1 < 1 << 7 ? 5 : 6 :
part1 < 1 << 21 ? 7 : 8 :
part2 < 1 << 7 ? 9 : 10;
let offset = grow(bb, size);
let bytes = bb.bytes;
switch (size) {
case 10: bytes[offset + 9] = (part2 >>> 7) & 0x01;
case 9: bytes[offset + 8] = size !== 9 ? part2 | 0x80 : part2 & 0x7F;
case 8: bytes[offset + 7] = size !== 8 ? (part1 >>> 21) | 0x80 : (part1 >>> 21) & 0x7F;
case 7: bytes[offset + 6] = size !== 7 ? (part1 >>> 14) | 0x80 : (part1 >>> 14) & 0x7F;
case 6: bytes[offset + 5] = size !== 6 ? (part1 >>> 7) | 0x80 : (part1 >>> 7) & 0x7F;
case 5: bytes[offset + 4] = size !== 5 ? part1 | 0x80 : part1 & 0x7F;
case 4: bytes[offset + 3] = size !== 4 ? (part0 >>> 21) | 0x80 : (part0 >>> 21) & 0x7F;
case 3: bytes[offset + 2] = size !== 3 ? (part0 >>> 14) | 0x80 : (part0 >>> 14) & 0x7F;
case 2: bytes[offset + 1] = size !== 2 ? (part0 >>> 7) | 0x80 : (part0 >>> 7) & 0x7F;
case 1: bytes[offset] = size !== 1 ? part0 | 0x80 : part0 & 0x7F;
}
}
function readVarint32ZigZag(bb: ByteBuffer): number {
let value = readVarint32(bb);
// ref: src/google/protobuf/wire_format_lite.h
return (value >>> 1) ^ -(value & 1);
}
function writeVarint32ZigZag(bb: ByteBuffer, value: number): void {
// ref: src/google/protobuf/wire_format_lite.h
writeVarint32(bb, (value << 1) ^ (value >> 31));
}
function readVarint64ZigZag(bb: ByteBuffer): Long {
let value = readVarint64(bb, /* unsigned */ false);
let low = value.low;
let high = value.high;
let flip = -(low & 1);
// ref: src/google/protobuf/wire_format_lite.h
return {
low: ((low >>> 1) | (high << 31)) ^ flip,
high: (high >>> 1) ^ flip,
unsigned: false,
};
}
function writeVarint64ZigZag(bb: ByteBuffer, value: Long): void {
let low = value.low;
let high = value.high;
let flip = high >> 31;
// ref: src/google/protobuf/wire_format_lite.h
writeVarint64(bb, {
low: (low << 1) ^ flip,
high: ((high << 1) | (low >>> 31)) ^ flip,
unsigned: false,
});
} | the_stack |
// This is the "new" UI rendered in newDesktopFrame.html
(function () {
"use strict";
let overlay = null;
let spinner = null;
function postError(error, message) {
poster.postMessageToParent("LogError", { error: JSON.stringify(error), message: message });
}
function initializeFabric() {
const overlayComponent = document.querySelector(".ms-Overlay");
// Override click so user can't dismiss overlay
overlayComponent.addEventListener("click", function (e) {
e.preventDefault();
e.stopImmediatePropagation();
});
overlay = new fabric["Overlay"](overlayComponent);
const spinnerElement = document.querySelector(".ms-Spinner");
spinner = new fabric["Spinner"](spinnerElement);
spinner.stop();
const commandBarElements = document.querySelectorAll(".ms-CommandBar");
let i;
for (i = 0; i < commandBarElements.length; i++) {
new fabric["CommandBar"](commandBarElements[i]);
}
const commandButtonElements = document.querySelectorAll(".ms-CommandButton");
for (i = 0; i < commandButtonElements.length; i++) {
new fabric["CommandButton"](commandButtonElements[i]);
}
const buttonElement = document.querySelector("#orig-header-btn");
new fabric["Button"](buttonElement, function () {
const btnIcon = $(this).find(".ms-Icon");
if (btnIcon.hasClass("ms-Icon--Add")) {
$("#original-headers").show();
btnIcon.removeClass("ms-Icon--Add").addClass("ms-Icon--Remove");
} else {
$("#original-headers").hide();
btnIcon.removeClass("ms-Icon--Remove").addClass("ms-Icon--Add");
}
});
// Show summary by default
$(".header-view[data-content='summary-view']").show();
// Wire up click events for nav buttons
$("#nav-bar .ms-CommandButton").click(function () {
// Remove active from current active
$("#nav-bar .is-active").removeClass("is-active");
// Add active class to clicked button
$(this).addClass("is-active");
// Get content marker
const content = $(this).attr("data-content");
// Hide sub-views
$(".header-view").hide();
$(".header-view[data-content='" + content + "']").show();
});
}
function updateStatus(message) {
$(".status-message").text(message);
overlay.show();
spinner.start();
}
function makeBold(text) {
return '<span class="ms-fontWeight-semibold">' + text + "</span>";
}
function addCalloutEntry(name, value, parent) {
if (value) {
$("<p/>")
.addClass("ms-Callout-subText")
.html(makeBold(name + ": ") + value)
.appendTo(parent);
}
}
function buildViews(headers) {
const viewModel = HeaderModel(headers);
// Build summary view
const summaryList = $(".summary-list");
let headerVal;
let pre;
let i;
for (i = 0; i < viewModel.summary.summaryRows.length; i++) {
if (viewModel.summary.summaryRows[i].value) {
$("<div/>")
.addClass("ms-font-s")
.addClass("ms-fontWeight-semibold")
.text(viewModel.summary.summaryRows[i].label)
.appendTo(summaryList);
headerVal = $("<div/>")
.addClass("code-box")
.appendTo(summaryList);
pre = $("<pre/>").appendTo(headerVal);
$("<code/>")
.text(viewModel.summary.summaryRows[i].value)
.appendTo(pre);
}
}
// Save original headers and show ui
$("#original-headers code").text(viewModel.originalHeaders);
if (viewModel.originalHeaders) {
$(".orig-header-ui").show();
}
// Build received view
const receivedList = $(".received-list");
if (viewModel.receivedHeaders.receivedRows.length > 0) {
const list = $("<ul/>")
.addClass("ms-List")
.appendTo(receivedList);
for (i = 0; i < viewModel.receivedHeaders.receivedRows.length; i++) {
const listItem = $("<li/>")
.addClass("ms-ListItem")
.addClass("ms-ListItem--document")
.appendTo(list);
if (i === 0) {
$("<span/>")
.addClass("ms-ListItem-primaryText")
.html(makeBold("From: ") + viewModel.receivedHeaders.receivedRows[i].from)
.appendTo(listItem);
$("<span/>")
.addClass("ms-ListItem-secondaryText")
.html(makeBold("To: ") + viewModel.receivedHeaders.receivedRows[i].by)
.appendTo(listItem);
} else {
const wrap = $("<div/>")
.addClass("progress-icon")
.appendTo(listItem);
const iconbox = $("<div/>")
.addClass("ms-font-xxl")
.addClass("down-icon")
.appendTo(wrap);
$("<i/>")
.addClass("ms-Icon")
.addClass("ms-Icon--Down")
.appendTo(iconbox);
const delay = $("<div/>")
.addClass("ms-ProgressIndicator")
.appendTo(wrap);
const bar = $("<div/>")
.addClass("ms-ProgressIndicator-itemProgress")
.appendTo(delay);
$("<div/>")
.addClass("ms-ProgressIndicator-progressTrack")
.appendTo(bar);
const width = 1.8 * viewModel.receivedHeaders.receivedRows[i].percent;
$("<div/>")
.addClass("ms-ProgressIndicator-progressBar")
.css("width", width)
.appendTo(bar);
$("<div/>")
.addClass("ms-ProgressIndicator-itemDescription")
.text(viewModel.receivedHeaders.receivedRows[i].delay)
.appendTo(delay);
$("<span/>")
.addClass("ms-ListItem-secondaryText")
.html(makeBold("To: ") + viewModel.receivedHeaders.receivedRows[i].by)
.appendTo(listItem);
}
$("<div/>")
.addClass("ms-ListItem-selectionTarget")
.appendTo(listItem);
// Callout
const callout = $("<div/>")
.addClass("ms-Callout is-hidden")
.appendTo(listItem);
const calloutMain = $("<div/>")
.addClass("ms-Callout-main")
.appendTo(callout);
const calloutHeader = $("<div/>")
.addClass("ms-Callout-header")
.appendTo(calloutMain);
$("<p/>")
.addClass("ms-Callout-title")
.text("Hop Details")
.appendTo(calloutHeader);
const calloutInner = $("<div/>")
.addClass("ms-Callout-inner")
.appendTo(calloutMain);
const calloutContent = $("<div/>")
.addClass("ms-Callout-content")
.appendTo(calloutInner);
addCalloutEntry("From", viewModel.receivedHeaders.receivedRows[i].from, calloutContent);
addCalloutEntry("To", viewModel.receivedHeaders.receivedRows[i].by, calloutContent);
addCalloutEntry("Time", viewModel.receivedHeaders.receivedRows[i].date, calloutContent);
addCalloutEntry("Type", viewModel.receivedHeaders.receivedRows[i].with, calloutContent);
addCalloutEntry("ID", viewModel.receivedHeaders.receivedRows[i].id, calloutContent);
addCalloutEntry("For", viewModel.receivedHeaders.receivedRows[i].for, calloutContent);
addCalloutEntry("Via", viewModel.receivedHeaders.receivedRows[i].via, calloutContent);
}
}
// Build antispam view
const antispamList = $(".antispam-list");
// Forefront
let tbody;
let table;
let row;
if (viewModel.forefrontAntiSpamReport.forefrontAntiSpamRows.length > 0) {
$("<div/>")
.addClass("ms-font-m")
.text("Forefront Antispam Report")
.appendTo(antispamList);
$("<hr/>").appendTo(antispamList);
table = $("<table/>")
.addClass("ms-Table")
.addClass("ms-Table--fixed")
.addClass("spam-report")
.appendTo(antispamList);
tbody = $("<tbody/>")
.appendTo(table);
for (i = 0; i < viewModel.forefrontAntiSpamReport.forefrontAntiSpamRows.length; i++) {
if (viewModel.forefrontAntiSpamReport.forefrontAntiSpamRows[i].value) {
row = $("<tr/>").appendTo(tbody);
$("<td/>")
.text(viewModel.forefrontAntiSpamReport.forefrontAntiSpamRows[i].label)
.appendTo(row);
$("<td/>")
.html(viewModel.forefrontAntiSpamReport.forefrontAntiSpamRows[i].valueUrl)
.appendTo(row);
}
}
}
// Microsoft
if (viewModel.antiSpamReport.antiSpamRows.length > 0) {
$("<div/>")
.addClass("ms-font-m")
.text("Microsoft Antispam Report")
.appendTo(antispamList);
$("<hr/>").appendTo(antispamList);
table = $("<table/>")
.addClass("ms-Table")
.addClass("ms-Table--fixed")
.addClass("spam-report")
.appendTo(antispamList);
tbody = $("<tbody/>")
.appendTo(table);
for (i = 0; i < viewModel.antiSpamReport.antiSpamRows.length; i++) {
if (viewModel.antiSpamReport.antiSpamRows[i].value) {
row = $("<tr/>").appendTo(tbody);
$("<td/>")
.text(viewModel.antiSpamReport.antiSpamRows[i].label)
.appendTo(row);
$("<td/>")
.html(viewModel.antiSpamReport.antiSpamRows[i].valueUrl)
.appendTo(row);
}
}
}
// Build other view
const otherList = $(".other-list");
for (i = 0; i < viewModel.otherHeaders.otherRows.length; i++) {
if (viewModel.otherHeaders.otherRows[i].value) {
const headerName = $("<div/>")
.addClass("ms-font-s")
.addClass("ms-fontWeight-semibold")
.text(viewModel.otherHeaders.otherRows[i].header)
.appendTo(otherList);
if (viewModel.otherHeaders.otherRows[i].url) {
headerName.html(viewModel.otherHeaders.otherRows[i].url);
}
headerVal = $("<div/>")
.addClass("code-box")
.appendTo(otherList);
pre = $("<pre/>").appendTo(headerVal);
$("<code/>")
.text(viewModel.otherHeaders.otherRows[i].value)
.appendTo(pre);
}
}
// Initialize any fabric lists added
const listElements = document.querySelectorAll(".ms-List");
for (i = 0; i < listElements.length; i++) {
new fabric["List"](listElements[i]);
}
const listItemElements = document.querySelectorAll(".ms-ListItem");
for (i = 0; i < listItemElements.length; i++) {
new fabric["ListItem"](listItemElements[i]);
// Init corresponding callout
const calloutElement = listItemElements[i].querySelector(".ms-Callout");
new fabric["Callout"](calloutElement, listItemElements[i], "right");
}
}
function hideStatus() {
spinner.stop();
overlay.hide();
}
function renderItem(headers) {
// Empty data
$(".summary-list").empty();
$("#original-headers code").empty();
$(".orig-header-ui").hide();
$(".received-list").empty();
$(".antispam-list").empty();
$(".other-list").empty();
$("#error-display .ms-MessageBar-text").empty();
$("#error-display").hide();
// Load new itemDescription
updateStatus(mhaStrings.mhaLoading);
buildViews(headers);
hideStatus();
}
// Handles rendering of an error.
// Does not log the error - caller is responsible for calling PostError
function showError(error, message) {
$("#error-display .ms-MessageBar-text").text(message);
$("#error-display").show();
}
function eventListener(event) {
if (!event || event.origin !== poster.site()) return;
if (event.data) {
switch (event.data.eventName) {
case "showError":
showError(JSON.parse(event.data.data.error), event.data.data.message);
break;
case "updateStatus":
updateStatus(event.data.data);
break;
case "renderItem":
renderItem(event.data.data);
break;
}
}
}
$(document).ready(function () {
try {
initializeFabric();
updateStatus(mhaStrings.mhaLoading);
window.addEventListener("message", eventListener, false);
poster.postMessageToParent("frameActive");
}
catch (e) {
postError(e, "Failed initializing frame");
showError(e, "Failed initializing frame");
}
});
})(); | the_stack |
import * as Types from "./";
import {
Client,
InvokeApiResult
} from "@web3api/core-js";
export type UInt = number;
export type UInt8 = number;
export type UInt16 = number;
export type UInt32 = number;
export type Int = number;
export type Int8 = number;
export type Int16 = number;
export type Int32 = number;
export type Bytes = Uint8Array;
export type BigInt = string;
export type Json = string;
export type String = string;
export type Boolean = boolean;
export interface Object {
u: UInt;
array: Array<Boolean>;
bytes?: Bytes | null;
}
/// Imported Objects START ///
/* URI: "ens/ethereum.web3api.eth" */
export interface Ethereum_Connection {
node?: String | null;
networkNameOrChainId?: String | null;
}
/* URI: "ens/ethereum.web3api.eth" */
export interface Ethereum_TxOverrides {
gasLimit?: BigInt | null;
gasPrice?: BigInt | null;
value?: BigInt | null;
}
/* URI: "ens/ethereum.web3api.eth" */
export interface Ethereum_StaticTxResult {
result: String;
error: Boolean;
}
/* URI: "ens/ethereum.web3api.eth" */
export interface Ethereum_TxRequest {
to?: String | null;
from?: String | null;
nonce?: UInt32 | null;
gasLimit?: BigInt | null;
gasPrice?: BigInt | null;
data?: String | null;
value?: BigInt | null;
chainId?: UInt32 | null;
type?: UInt32 | null;
}
/* URI: "ens/ethereum.web3api.eth" */
export interface Ethereum_TxReceipt {
to: String;
from: String;
contractAddress: String;
transactionIndex: UInt32;
root?: String | null;
gasUsed: BigInt;
logsBloom: String;
transactionHash: String;
logs: Array<Types.Ethereum_Log>;
blockNumber: BigInt;
blockHash: String;
confirmations: UInt32;
cumulativeGasUsed: BigInt;
effectiveGasPrice: BigInt;
byzantium: Boolean;
type: UInt32;
status?: UInt32 | null;
}
/* URI: "ens/ethereum.web3api.eth" */
export interface Ethereum_Log {
blockNumber: BigInt;
blockHash: String;
transactionIndex: UInt32;
removed: Boolean;
address: String;
data: String;
topics: Array<String>;
transactionHash: String;
logIndex: UInt32;
}
/* URI: "ens/ethereum.web3api.eth" */
export interface Ethereum_EventNotification {
data: String;
address: String;
log: Types.Ethereum_Log;
}
/* URI: "ens/ethereum.web3api.eth" */
export interface Ethereum_Network {
name: String;
chainId: Int;
ensAddress?: String | null;
}
/* URI: "ens/ethereum.web3api.eth" */
export interface Ethereum_TxResponse {
hash: String;
to?: String | null;
from: String;
nonce: UInt32;
gasLimit: BigInt;
gasPrice?: BigInt | null;
data: String;
value: BigInt;
chainId: UInt32;
blockNumber?: BigInt | null;
blockHash?: String | null;
timestamp?: UInt32 | null;
confirmations: UInt32;
raw?: String | null;
r?: String | null;
s?: String | null;
v?: UInt32 | null;
type?: UInt32 | null;
accessList?: Array<Types.Ethereum_Access> | null;
}
/* URI: "ens/ethereum.web3api.eth" */
export interface Ethereum_Access {
address: String;
storageKeys: Array<String>;
}
/// Imported Objects END ///
/// Imported Queries START ///
/* URI: "ens/ethereum.web3api.eth" */
interface Ethereum_Query_Input_callContractView extends Record<string, unknown> {
address: String;
method: String;
args?: Array<String> | null;
connection?: Types.Ethereum_Connection | null;
}
/* URI: "ens/ethereum.web3api.eth" */
interface Ethereum_Query_Input_callContractStatic extends Record<string, unknown> {
address: String;
method: String;
args?: Array<String> | null;
connection?: Types.Ethereum_Connection | null;
txOverrides?: Types.Ethereum_TxOverrides | null;
}
/* URI: "ens/ethereum.web3api.eth" */
interface Ethereum_Query_Input_encodeParams extends Record<string, unknown> {
types: Array<String>;
values: Array<String>;
}
/* URI: "ens/ethereum.web3api.eth" */
interface Ethereum_Query_Input_encodeFunction extends Record<string, unknown> {
method: String;
args?: Array<String> | null;
}
/* URI: "ens/ethereum.web3api.eth" */
interface Ethereum_Query_Input_getSignerAddress extends Record<string, unknown> {
connection?: Types.Ethereum_Connection | null;
}
/* URI: "ens/ethereum.web3api.eth" */
interface Ethereum_Query_Input_getSignerBalance extends Record<string, unknown> {
blockTag?: BigInt | null;
connection?: Types.Ethereum_Connection | null;
}
/* URI: "ens/ethereum.web3api.eth" */
interface Ethereum_Query_Input_getSignerTransactionCount extends Record<string, unknown> {
blockTag?: BigInt | null;
connection?: Types.Ethereum_Connection | null;
}
/* URI: "ens/ethereum.web3api.eth" */
interface Ethereum_Query_Input_getGasPrice extends Record<string, unknown> {
connection?: Types.Ethereum_Connection | null;
}
/* URI: "ens/ethereum.web3api.eth" */
interface Ethereum_Query_Input_estimateTransactionGas extends Record<string, unknown> {
tx: Types.Ethereum_TxRequest;
connection?: Types.Ethereum_Connection | null;
}
/* URI: "ens/ethereum.web3api.eth" */
interface Ethereum_Query_Input_estimateContractCallGas extends Record<string, unknown> {
address: String;
method: String;
args?: Array<String> | null;
connection?: Types.Ethereum_Connection | null;
txOverrides?: Types.Ethereum_TxOverrides | null;
}
/* URI: "ens/ethereum.web3api.eth" */
interface Ethereum_Query_Input_checkAddress extends Record<string, unknown> {
address: String;
}
/* URI: "ens/ethereum.web3api.eth" */
interface Ethereum_Query_Input_toWei extends Record<string, unknown> {
eth: String;
}
/* URI: "ens/ethereum.web3api.eth" */
interface Ethereum_Query_Input_toEth extends Record<string, unknown> {
wei: BigInt;
}
/* URI: "ens/ethereum.web3api.eth" */
interface Ethereum_Query_Input_awaitTransaction extends Record<string, unknown> {
txHash: String;
confirmations: UInt32;
timeout: UInt32;
connection?: Types.Ethereum_Connection | null;
}
/* URI: "ens/ethereum.web3api.eth" */
interface Ethereum_Query_Input_waitForEvent extends Record<string, unknown> {
address: String;
event: String;
args?: Array<String> | null;
timeout?: UInt32 | null;
connection?: Types.Ethereum_Connection | null;
}
/* URI: "ens/ethereum.web3api.eth" */
interface Ethereum_Query_Input_getNetwork extends Record<string, unknown> {
connection?: Types.Ethereum_Connection | null;
}
/* URI: "ens/ethereum.web3api.eth" */
export const Ethereum_Query = {
callContractView: async (
input: Ethereum_Query_Input_callContractView,
client: Client
): Promise<InvokeApiResult<String>> => {
return client.invoke<String>({
uri: "ens/ethereum.web3api.eth",
module: "query",
method: "callContractView",
input
});
},
callContractStatic: async (
input: Ethereum_Query_Input_callContractStatic,
client: Client
): Promise<InvokeApiResult<Types.Ethereum_StaticTxResult>> => {
return client.invoke<Types.Ethereum_StaticTxResult>({
uri: "ens/ethereum.web3api.eth",
module: "query",
method: "callContractStatic",
input
});
},
encodeParams: async (
input: Ethereum_Query_Input_encodeParams,
client: Client
): Promise<InvokeApiResult<String>> => {
return client.invoke<String>({
uri: "ens/ethereum.web3api.eth",
module: "query",
method: "encodeParams",
input
});
},
encodeFunction: async (
input: Ethereum_Query_Input_encodeFunction,
client: Client
): Promise<InvokeApiResult<String>> => {
return client.invoke<String>({
uri: "ens/ethereum.web3api.eth",
module: "query",
method: "encodeFunction",
input
});
},
getSignerAddress: async (
input: Ethereum_Query_Input_getSignerAddress,
client: Client
): Promise<InvokeApiResult<String>> => {
return client.invoke<String>({
uri: "ens/ethereum.web3api.eth",
module: "query",
method: "getSignerAddress",
input
});
},
getSignerBalance: async (
input: Ethereum_Query_Input_getSignerBalance,
client: Client
): Promise<InvokeApiResult<BigInt>> => {
return client.invoke<BigInt>({
uri: "ens/ethereum.web3api.eth",
module: "query",
method: "getSignerBalance",
input
});
},
getSignerTransactionCount: async (
input: Ethereum_Query_Input_getSignerTransactionCount,
client: Client
): Promise<InvokeApiResult<BigInt>> => {
return client.invoke<BigInt>({
uri: "ens/ethereum.web3api.eth",
module: "query",
method: "getSignerTransactionCount",
input
});
},
getGasPrice: async (
input: Ethereum_Query_Input_getGasPrice,
client: Client
): Promise<InvokeApiResult<BigInt>> => {
return client.invoke<BigInt>({
uri: "ens/ethereum.web3api.eth",
module: "query",
method: "getGasPrice",
input
});
},
estimateTransactionGas: async (
input: Ethereum_Query_Input_estimateTransactionGas,
client: Client
): Promise<InvokeApiResult<BigInt>> => {
return client.invoke<BigInt>({
uri: "ens/ethereum.web3api.eth",
module: "query",
method: "estimateTransactionGas",
input
});
},
estimateContractCallGas: async (
input: Ethereum_Query_Input_estimateContractCallGas,
client: Client
): Promise<InvokeApiResult<BigInt>> => {
return client.invoke<BigInt>({
uri: "ens/ethereum.web3api.eth",
module: "query",
method: "estimateContractCallGas",
input
});
},
checkAddress: async (
input: Ethereum_Query_Input_checkAddress,
client: Client
): Promise<InvokeApiResult<Boolean>> => {
return client.invoke<Boolean>({
uri: "ens/ethereum.web3api.eth",
module: "query",
method: "checkAddress",
input
});
},
toWei: async (
input: Ethereum_Query_Input_toWei,
client: Client
): Promise<InvokeApiResult<BigInt>> => {
return client.invoke<BigInt>({
uri: "ens/ethereum.web3api.eth",
module: "query",
method: "toWei",
input
});
},
toEth: async (
input: Ethereum_Query_Input_toEth,
client: Client
): Promise<InvokeApiResult<String>> => {
return client.invoke<String>({
uri: "ens/ethereum.web3api.eth",
module: "query",
method: "toEth",
input
});
},
awaitTransaction: async (
input: Ethereum_Query_Input_awaitTransaction,
client: Client
): Promise<InvokeApiResult<Types.Ethereum_TxReceipt>> => {
return client.invoke<Types.Ethereum_TxReceipt>({
uri: "ens/ethereum.web3api.eth",
module: "query",
method: "awaitTransaction",
input
});
},
waitForEvent: async (
input: Ethereum_Query_Input_waitForEvent,
client: Client
): Promise<InvokeApiResult<Types.Ethereum_EventNotification>> => {
return client.invoke<Types.Ethereum_EventNotification>({
uri: "ens/ethereum.web3api.eth",
module: "query",
method: "waitForEvent",
input
});
},
getNetwork: async (
input: Ethereum_Query_Input_getNetwork,
client: Client
): Promise<InvokeApiResult<Types.Ethereum_Network>> => {
return client.invoke<Types.Ethereum_Network>({
uri: "ens/ethereum.web3api.eth",
module: "query",
method: "getNetwork",
input
});
}
}
/* URI: "ens/ethereum.web3api.eth" */
interface Ethereum_Mutation_Input_callContractMethod extends Record<string, unknown> {
address: String;
method: String;
args?: Array<String> | null;
connection?: Types.Ethereum_Connection | null;
txOverrides?: Types.Ethereum_TxOverrides | null;
}
/* URI: "ens/ethereum.web3api.eth" */
interface Ethereum_Mutation_Input_callContractMethodAndWait extends Record<string, unknown> {
address: String;
method: String;
args?: Array<String> | null;
connection?: Types.Ethereum_Connection | null;
txOverrides?: Types.Ethereum_TxOverrides | null;
}
/* URI: "ens/ethereum.web3api.eth" */
interface Ethereum_Mutation_Input_sendTransaction extends Record<string, unknown> {
tx: Types.Ethereum_TxRequest;
connection?: Types.Ethereum_Connection | null;
}
/* URI: "ens/ethereum.web3api.eth" */
interface Ethereum_Mutation_Input_sendTransactionAndWait extends Record<string, unknown> {
tx: Types.Ethereum_TxRequest;
connection?: Types.Ethereum_Connection | null;
}
/* URI: "ens/ethereum.web3api.eth" */
interface Ethereum_Mutation_Input_deployContract extends Record<string, unknown> {
abi: String;
bytecode: String;
args?: Array<String> | null;
connection?: Types.Ethereum_Connection | null;
}
/* URI: "ens/ethereum.web3api.eth" */
interface Ethereum_Mutation_Input_signMessage extends Record<string, unknown> {
message: String;
connection?: Types.Ethereum_Connection | null;
}
/* URI: "ens/ethereum.web3api.eth" */
interface Ethereum_Mutation_Input_sendRPC extends Record<string, unknown> {
method: String;
params: Array<String>;
connection?: Types.Ethereum_Connection | null;
}
/* URI: "ens/ethereum.web3api.eth" */
export const Ethereum_Mutation = {
callContractMethod: async (
input: Ethereum_Mutation_Input_callContractMethod,
client: Client
): Promise<InvokeApiResult<Types.Ethereum_TxResponse>> => {
return client.invoke<Types.Ethereum_TxResponse>({
uri: "ens/ethereum.web3api.eth",
module: "mutation",
method: "callContractMethod",
input
});
},
callContractMethodAndWait: async (
input: Ethereum_Mutation_Input_callContractMethodAndWait,
client: Client
): Promise<InvokeApiResult<Types.Ethereum_TxReceipt>> => {
return client.invoke<Types.Ethereum_TxReceipt>({
uri: "ens/ethereum.web3api.eth",
module: "mutation",
method: "callContractMethodAndWait",
input
});
},
sendTransaction: async (
input: Ethereum_Mutation_Input_sendTransaction,
client: Client
): Promise<InvokeApiResult<Types.Ethereum_TxResponse>> => {
return client.invoke<Types.Ethereum_TxResponse>({
uri: "ens/ethereum.web3api.eth",
module: "mutation",
method: "sendTransaction",
input
});
},
sendTransactionAndWait: async (
input: Ethereum_Mutation_Input_sendTransactionAndWait,
client: Client
): Promise<InvokeApiResult<Types.Ethereum_TxReceipt>> => {
return client.invoke<Types.Ethereum_TxReceipt>({
uri: "ens/ethereum.web3api.eth",
module: "mutation",
method: "sendTransactionAndWait",
input
});
},
deployContract: async (
input: Ethereum_Mutation_Input_deployContract,
client: Client
): Promise<InvokeApiResult<String>> => {
return client.invoke<String>({
uri: "ens/ethereum.web3api.eth",
module: "mutation",
method: "deployContract",
input
});
},
signMessage: async (
input: Ethereum_Mutation_Input_signMessage,
client: Client
): Promise<InvokeApiResult<String>> => {
return client.invoke<String>({
uri: "ens/ethereum.web3api.eth",
module: "mutation",
method: "signMessage",
input
});
},
sendRPC: async (
input: Ethereum_Mutation_Input_sendRPC,
client: Client
): Promise<InvokeApiResult<String | null>> => {
return client.invoke<String | null>({
uri: "ens/ethereum.web3api.eth",
module: "mutation",
method: "sendRPC",
input
});
}
}
/// Imported Queries END /// | the_stack |
import { Histogram } from "./histogram.js";
import { ColorRGBA64 } from "./color-rgba-64.js";
/**
* Adds a newItem to an already sorted list without needing to do a full re-sort.
* Higher sort priority puts the newItem closer to the start (index 0) of the list.
*
* @public
*/
export function insertIntoSortedList(
list: PixelBox[],
newItem: PixelBox,
sortPriority: (box: PixelBox) => number
): void {
if (list.length === 0) {
list.push(newItem);
return;
}
const newItemPriority: number = sortPriority(newItem);
// The new item being either first or last happens often enough that it is worth special casing
// In cases of a tie the new item should be inserted after existing items of the same priority
if (newItemPriority > sortPriority(list[0])) {
list.unshift(newItem);
return;
}
if (newItemPriority <= sortPriority(list[list.length - 1])) {
list.push(newItem);
return;
}
let newIndex: number = 0;
for (let i: number = 0; i < list.length; i++) {
if (newItemPriority > sortPriority(list[i])) {
newIndex = i;
break;
}
}
list.splice(newIndex, 0, newItem);
}
/**
* Represents a range of colors in RGB color space.
*
* @public
*/
export class PixelBox {
constructor(
globalHistogram: Histogram,
minRed: number,
maxRed: number,
minGreen: number,
maxGreen: number,
minBlue: number,
maxBlue: number
) {
this.pixelCount = 0;
this.globalHistogram = globalHistogram;
this.minRed = minRed;
this.maxRed = maxRed;
this.minGreen = minGreen;
this.maxGreen = maxGreen;
this.minBlue = minBlue;
this.maxBlue = maxBlue;
this.rangeRed = this.maxRed - this.minRed + 1;
this.rangeGreen = this.maxGreen - this.minGreen + 1;
this.rangeBlue = this.maxBlue - this.minBlue + 1;
this.colorVolume = this.rangeRed * this.rangeGreen * this.rangeBlue;
let redSum: number = 0;
let greenSum: number = 0;
let blueSum: number = 0;
const factor: number = 1 << (8 - this.globalHistogram.significantBits);
for (let r: number = minRed; r <= maxRed; r++) {
for (let g: number = minGreen; g <= maxGreen; g++) {
for (let b: number = minBlue; b <= maxBlue; b++) {
const histoValue: number = this.globalHistogram.getHistogramValue(
r,
g,
b
);
this.pixelCount += histoValue;
redSum += histoValue * (r + 0.5) * factor;
greenSum += histoValue * (g + 0.5) * factor;
blueSum += histoValue * (b + 0.5) * factor;
}
}
}
if (this.pixelCount === 0) {
this.averageColor = new ColorRGBA64(
(factor * ((minRed + maxRed + 1) / 2)) / 255,
(factor * ((minGreen + maxGreen + 1) / 2)) / 255,
(factor * ((minBlue + maxBlue + 1) / 2)) / 255,
1
);
} else {
this.averageColor = new ColorRGBA64(
redSum / this.pixelCount / 255,
greenSum / this.pixelCount / 255,
blueSum / this.pixelCount / 255,
1
);
}
}
public readonly globalHistogram: Histogram;
public readonly pixelCount: number;
public readonly minRed: number;
public readonly maxRed: number;
public readonly rangeRed: number;
public readonly minGreen: number;
public readonly maxGreen: number;
public readonly rangeGreen: number;
public readonly minBlue: number;
public readonly maxBlue: number;
public readonly rangeBlue: number;
public readonly colorVolume: number;
public readonly averageColor: ColorRGBA64;
/**
* Attempts to divide the range of colors represented by this PixelBox into two smaller PixelBox objects.
* This does not actually cut directly at the median, rather it finds the median then cuts halfway through the larger box on either side of that median. The result is that small areas of color are better represented in the final output.
* Based on the Modified Median Cut Quantization implementation from https://github.com/DanBloomberg/leptonica/blob/master/src/colorquant2.c
*/
public modifiedMedianCut = (): [PixelBox | null, PixelBox | null] => {
if (this.rangeRed === 1 && this.rangeGreen === 1 && this.rangeBlue === 1) {
// This box is already sliced as finely as possible
return [this, null];
}
enum CutAxis {
Red,
Green,
Blue,
}
let axis: CutAxis;
let axisRange: number;
if (this.rangeRed >= this.rangeGreen && this.rangeRed >= this.rangeBlue) {
axis = CutAxis.Red;
axisRange = this.rangeRed;
} else if (
this.rangeGreen >= this.rangeRed &&
this.rangeGreen >= this.rangeBlue
) {
axis = CutAxis.Green;
axisRange = this.rangeGreen;
} else {
axis = CutAxis.Blue;
axisRange = this.rangeBlue;
}
const partialSum: number[] = new Array(axisRange);
const lookAheadSum: number[] = new Array(axisRange);
let retLeft: PixelBox | null = null;
let retRight: PixelBox | null = null;
let axisTotal: number = 0;
// This does not actually cut directly at the median, rather it finds the median then
// cuts halfway through the larger box on either side of that median
// The result is that small areas of color are better represented in the final output
if (axis === CutAxis.Red) {
// Calculate partial sums
for (let r: number = this.minRed; r <= this.maxRed; r++) {
let sum: number = 0;
for (let g: number = this.minGreen; g <= this.maxGreen; g++) {
for (let b: number = this.minBlue; b <= this.maxBlue; b++) {
sum += this.globalHistogram.getHistogramValue(r, g, b);
}
}
axisTotal += sum;
partialSum[r - this.minRed] = axisTotal;
}
for (let i: number = 0; i < partialSum.length; i++) {
lookAheadSum[i] = axisTotal - partialSum[i];
}
// Find the cut point based on partial sums vs total
for (let r: number = this.minRed; r <= this.maxRed; r++) {
if (partialSum[r - this.minRed] >= axisTotal / 2) {
const left: number = r - this.minRed;
const right: number = this.maxRed - r;
let cut: number;
if (left <= right) {
cut = Math.min(this.maxRed - 1, Math.floor(r + right / 2));
} else {
cut = Math.max(this.minRed, Math.floor(r - 1 - left / 2));
}
// Adjust the cut point if either side has 0 pixelCount
while (partialSum[cut - this.minRed] <= 0 && cut < this.maxRed - 1) {
cut++;
}
let lookAhead: number = lookAheadSum[cut - this.minRed];
while (
lookAhead === 0 &&
cut > this.minRed &&
partialSum[cut - this.minRed - 1] !== 0
) {
cut--;
lookAhead = lookAheadSum[cut - this.minRed];
}
retLeft = new PixelBox(
this.globalHistogram,
this.minRed,
cut,
this.minGreen,
this.maxGreen,
this.minBlue,
this.maxBlue
);
retRight = new PixelBox(
this.globalHistogram,
cut + 1,
this.maxRed,
this.minGreen,
this.maxGreen,
this.minBlue,
this.maxBlue
);
break;
}
}
} else if (axis === CutAxis.Green) {
// Calculate partial sums
for (let g: number = this.minGreen; g <= this.maxGreen; g++) {
let sum: number = 0;
for (let r: number = this.minRed; r <= this.maxRed; r++) {
for (let b: number = this.minBlue; b <= this.maxBlue; b++) {
sum += this.globalHistogram.getHistogramValue(r, g, b);
}
}
axisTotal += sum;
partialSum[g - this.minGreen] = axisTotal;
}
for (let i: number = 0; i < partialSum.length; i++) {
lookAheadSum[i] = axisTotal - partialSum[i];
}
// Find the cut point based on partial sums vs total
for (let g: number = this.minGreen; g <= this.maxGreen; g++) {
if (partialSum[g - this.minGreen] >= axisTotal / 2) {
const left: number = g - this.minGreen;
const right: number = this.maxGreen - g;
let cut: number;
if (left <= right) {
cut = Math.min(this.maxGreen - 1, Math.floor(g + right / 2));
} else {
cut = Math.max(this.minGreen, Math.floor(g - 1 - left / 2));
}
// Adjust the cut point if either side has 0 pixelCount
while (
partialSum[cut - this.minGreen] <= 0 &&
cut < this.maxGreen - 1
) {
cut++;
}
let lookAhead: number = lookAheadSum[cut - this.minGreen];
while (
lookAhead === 0 &&
cut > this.minGreen &&
partialSum[cut - this.minGreen - 1] !== 0
) {
cut--;
lookAhead = lookAheadSum[cut - this.minGreen];
}
retLeft = new PixelBox(
this.globalHistogram,
this.minRed,
this.maxRed,
this.minGreen,
cut,
this.minBlue,
this.maxBlue
);
retRight = new PixelBox(
this.globalHistogram,
this.minRed,
this.maxRed,
cut + 1,
this.maxGreen,
this.minBlue,
this.maxBlue
);
break;
}
}
} else {
// Calculate partial sums
for (let b: number = this.minBlue; b <= this.maxBlue; b++) {
let sum: number = 0;
for (let r: number = this.minRed; r <= this.maxRed; r++) {
for (let g: number = this.minGreen; g <= this.maxGreen; g++) {
sum += this.globalHistogram.getHistogramValue(r, g, b);
}
}
axisTotal += sum;
partialSum[b - this.minBlue] = axisTotal;
}
for (let i: number = 0; i < partialSum.length; i++) {
lookAheadSum[i] = axisTotal - partialSum[i];
}
// Find the cut point based on partial sums vs total
for (let b: number = this.minBlue; b <= this.maxBlue; b++) {
if (partialSum[b - this.minBlue] >= axisTotal / 2) {
const left: number = b - this.minBlue;
const right: number = this.maxBlue - b;
let cut: number;
if (left <= right) {
cut = Math.min(this.maxBlue - 1, Math.floor(b + right / 2));
} else {
cut = Math.max(this.minBlue, Math.floor(b - 1 - left / 2));
}
// Adjust the cut point if either side has 0 pixelCount
while (
partialSum[cut - this.minBlue] <= 0 &&
cut < this.maxBlue - 1
) {
cut++;
}
let lookAhead: number = lookAheadSum[cut - this.minBlue];
while (
lookAhead === 0 &&
cut > this.minBlue &&
partialSum[cut - this.minBlue - 1] !== 0
) {
cut--;
lookAhead = lookAheadSum[cut - this.minBlue];
}
retLeft = new PixelBox(
this.globalHistogram,
this.minRed,
this.maxRed,
this.minGreen,
this.maxGreen,
this.minBlue,
cut
);
retRight = new PixelBox(
this.globalHistogram,
this.minRed,
this.maxRed,
this.minGreen,
this.maxGreen,
cut + 1,
this.maxBlue
);
break;
}
}
}
return [retLeft, retRight];
};
} | the_stack |
import * as PropTypes from 'prop-types';
import * as React from 'react';
import * as RN from 'react-native';
import * as RNW from 'react-native-windows';
import AccessibilityUtil, { ImportantForAccessibilityValue } from '../native-common/AccessibilityUtil';
import assert from '../common/assert';
import { Button as ButtonBase, ButtonContext as ButtonContextBase } from '../native-common/Button';
import EventHelpers from '../native-common/utils/EventHelpers';
import { applyFocusableComponentMixin, FocusManagerFocusableComponent } from '../native-desktop/utils/FocusManager';
import { Types } from '../common/Interfaces';
import UserInterface from '../native-common/UserInterface';
const KEY_CODE_ENTER = 13;
const KEY_CODE_SPACE = 32;
const KEY_CODE_F10 = 121;
const KEY_CODE_APP = 500;
const DOWN_KEYCODES = [KEY_CODE_SPACE, KEY_CODE_ENTER, KEY_CODE_F10, KEY_CODE_APP];
const UP_KEYCODES = [KEY_CODE_SPACE];
const FocusableAnimatedView = RN.Animated.View; // RNW.createFocusableComponent(RN.Animated.View);
export interface ButtonContext extends ButtonContextBase {
isRxParentAContextMenuResponder?: boolean;
isRxParentAFocusableInSameFocusManager?: boolean;
}
export class Button extends ButtonBase implements React.ChildContextProvider<ButtonContext>, FocusManagerFocusableComponent {
// Context is provided by super - just re-typing here
context!: ButtonContext;
static childContextTypes: React.ValidationMap<any> = {
isRxParentAContextMenuResponder: PropTypes.bool,
isRxParentAFocusableInSameFocusManager: PropTypes.bool,
...ButtonBase.childContextTypes,
};
private _isFocusedWithKeyboard = false;
// Offset to show context menu using keyboard.
protected _getContextMenuOffset() {
return { x: 0, y: 0 };
}
protected _render(internalProps: RN.ViewProps, onMount: (btn: any) => void): JSX.Element {
// RNW.FocusableProps tabIndex: default is 0.
// -1 has no special semantic similar to DOM.
const tabIndex: number | undefined = this.getTabIndex();
// RNW.FocusableProps windowsTabFocusable:
// - true: keyboard focusable through any mean, receives keyboard input
// - false: not focusable at all, doesn't receive keyboard input
// The intermediate "focusable, but not in the tab order" case is not supported.
const windowsTabFocusable: boolean = !this.props.disabled && tabIndex !== undefined && tabIndex >= 0;
const importantForAccessibility: ImportantForAccessibilityValue | undefined = this.getImportantForAccessibility();
// We don't use 'string' ref type inside ReactXP
const originalRef = (internalProps as any).ref;
assert(!(typeof originalRef === 'string'), 'Button: ReactXP must not use string refs internally');
const componentRef = originalRef as Function;
const focusableViewProps: RNW.FocusableWindowsProps<RN.ExtendedViewProps | RNW.AccessibilityEvents> = {
...internalProps,
ref: onMount,
componentRef: componentRef,
onMouseEnter: this._onMouseEnter,
onMouseLeave: this._onMouseLeave,
isTabStop: windowsTabFocusable,
tabIndex: tabIndex,
importantForAccessibility: importantForAccessibility,
disableSystemFocusVisuals: false,
handledKeyDownKeys: DOWN_KEYCODES,
handledKeyUpKeys: UP_KEYCODES,
onKeyDown: this._onKeyDown,
onKeyUp: this._onKeyUp,
onFocus: this._onFocus,
onBlur: this._onBlur,
onAccessibilityTap: this._onAccessibilityTap,
};
return (
<FocusableAnimatedView { ...focusableViewProps as any }>
{ this.props.children }
</FocusableAnimatedView>
);
}
focus() {
if (this._buttonElement && this._buttonElement.focus) {
this._buttonElement.focus();
}
}
blur() {
if (this._buttonElement && this._buttonElement.blur) {
this._buttonElement.blur();
}
}
setNativeProps(nativeProps: RN.ViewProps) {
// Redirect to focusable component if present.
if (this._buttonElement) {
this._buttonElement.setNativeProps(nativeProps);
} else {
super.setNativeProps(nativeProps);
}
}
getChildContext(): ButtonContext {
const childContext: ButtonContext = super.getChildContext();
// We use a context field to signal any component in the subtree to disable any system provided context menus.
// This is not a bulletproof mechanism, context changes not being guaranteed to be detected by children, depending on factors
// like shouldComponentUpdate methods on intermediate nodes, etc.
// Fortunately press handlers are pretty stable.
// This instance can be a responder (even when button is disabled). It may or may not have to invoke an onContextMenu handler, but
// it will consume all corresponding touch events, so overwriting any parent-set value is the correct thing to do.
childContext.isRxParentAContextMenuResponder = !!this.props.onContextMenu;
// This button will hide other "accessible focusable" controls as part of being restricted/limited by a focus manager
// (more detailed description is in windows/View.tsx)
childContext.isRxParentAFocusableInSameFocusManager = true;
return childContext;
}
private _onAccessibilityTap = (e: RN.NativeSyntheticEvent<any>): void => {
if (!this.props.disabled && this.props.onPress) {
this.props.onPress(e);
}
};
private _onKeyDown = (e: React.SyntheticEvent<any>): void => {
if (!this.props.disabled) {
const keyEvent = EventHelpers.toKeyboardEvent(e);
if (this.props.onKeyPress) {
this.props.onKeyPress(keyEvent);
}
if (this.props.onPress) {
const key = keyEvent.keyCode;
// ENTER triggers press on key down
if (key === KEY_CODE_ENTER) {
this.props.onPress(keyEvent);
}
}
if (this.props.onContextMenu) {
const key = keyEvent.keyCode;
if ((key === KEY_CODE_APP) || (key === KEY_CODE_F10 && keyEvent.shiftKey)) {
if (this._isMounted) {
UserInterface.measureLayoutRelativeToWindow(this).then( layoutInfo => {
// need to simulate the mouse event so that we
// can show the context menu in the right position
if (this._isMounted) {
const mouseEvent = EventHelpers.keyboardToMouseEvent(keyEvent, layoutInfo,
this._getContextMenuOffset());
if (this.props.onContextMenu) {
this.props.onContextMenu(mouseEvent);
}
}
}).catch(e => {
console.warn('Button measureKayoutRelativeToWindow exception: ' + JSON.stringify(e));
});
}
}
}
}
};
private _onKeyUp = (e: React.SyntheticEvent<any>): void => {
const keyEvent = EventHelpers.toKeyboardEvent(e);
if (keyEvent.keyCode === KEY_CODE_SPACE && !this.props.disabled && this.props.onPress) {
this.props.onPress(keyEvent);
}
};
// When we get focus on an element, show the hover effect on the element.
// This ensures that users using keyboard also get the similar experience as mouse users for accessibility.
private _onFocus = (e: React.SyntheticEvent<any>): void => {
if (e.currentTarget === e.target) {
this.onFocus();
}
this._isFocusedWithKeyboard = UserInterface.isNavigatingWithKeyboard();
this._onHoverStart(e);
if (this.props.onFocus) {
this.props.onFocus(EventHelpers.toFocusEvent(e));
}
};
private _onBlur = (e: React.SyntheticEvent<any>): void => {
this._isFocusedWithKeyboard = false;
this._onHoverEnd(e);
if (this.props.onBlur) {
this.props.onBlur(EventHelpers.toFocusEvent(e));
}
};
protected _onHoverStart = (e: React.SyntheticEvent<any>) => {
if (!this._isHoverStarted && (this._isMouseOver || this._isFocusedWithKeyboard)) {
this._isHoverStarted = true;
if (this.props.onHoverStart) {
this.props.onHoverStart(e);
}
}
};
protected _onHoverEnd = (e: React.SyntheticEvent<any>) => {
if (this._isHoverStarted && !this._isMouseOver && !this._isFocusedWithKeyboard) {
this._isHoverStarted = false;
if (this.props.onHoverEnd) {
this.props.onHoverEnd(e);
}
}
};
// From FocusManagerFocusableComponent interface
//
onFocus() {
// Focus Manager hook
}
getTabIndex(): number | undefined {
// Button defaults to a tabIndex of 0
// Focus Manager may override this
return this.props.tabIndex || 0;
}
getImportantForAccessibility(): ImportantForAccessibilityValue | undefined {
// Focus Manager may override this
// We force a default of YES if no property is provided, consistent with the base class
return AccessibilityUtil.importantForAccessibilityToString(this.props.importantForAccessibility,
Types.ImportantForAccessibility.Yes);
}
updateNativeAccessibilityProps(): void {
if (this._buttonElement) {
const tabIndex: number | undefined = this.getTabIndex();
const windowsTabFocusable: boolean = !this.props.disabled && tabIndex !== undefined && tabIndex >= 0;
const importantForAccessibility: ImportantForAccessibilityValue | undefined = this.getImportantForAccessibility();
this._buttonElement.setNativeProps({
tabIndex: tabIndex,
isTabStop: windowsTabFocusable,
importantForAccessibility: importantForAccessibility,
});
}
}
}
applyFocusableComponentMixin(Button);
export default Button; | the_stack |
import React, { useState } from 'react';
import { I18nManager, Pressable, StatusBar, StyleSheet, Switch, Text, TextInput, View } from 'react-native';
import { SafeAreaProvider, SafeAreaView } from 'react-native-safe-area-context';
import { Slider } from '@sharcoux/slider';
import { PageScrollView } from 'pagescrollview';
import tinycolor from 'tinycolor2';
import { Shadow } from './src/index'; // Aliased in Sandbox in dev.
const defaults = {
distace: 50,
borderRadius: 30,
width: 200,
height: 200,
startColor: tinycolor('#00000020').toHex8String(),
finalColor: tinycolor('#0000').toHex8String(),
childColor: tinycolor('#fff').toHex8String(), // tinycolor('#fff').toHex8String(),
};
export const App: React.FC = () => {
const [distance, setDistance] = useState(defaults.distace);
const [borderRadius, setBorderRadius] = useState(defaults.borderRadius);
const [offsetX, setOffsetX] = useState(0);
const [offsetY, setOffsetY] = useState(0);
const [paintInside, setPaintInside] = useState<boolean | undefined>(undefined);
const [getChildRadius, setGetChildRadius] = useState(true);
const [getViewStyleRadius, setGetViewStyleRadius] = useState(true);
const [size, setSize] = useState([defaults.width, defaults.height] as [number, number]);
const [doUseSizeProp, setDoUseSizeProp] = useState(true);
const [childWidth, setChildWidth] = useState(defaults.width);
const [childHeight, setChildHeight] = useState(defaults.height);
const [startColor, setStartColor] = useState(defaults.startColor);
const [finalColor, setFinalColor] = useState(defaults.finalColor);
const [childColor, setChildColor] = useState(defaults.childColor);
const [rtl, setRtl] = useState(false);
// const [inset, setInset] = useState(true);
return (
<SafeAreaProvider>
<SafeAreaView style={{ flex: 1 }}>
<StatusBar backgroundColor={'#222'}/>
<PageScrollView viewStyle={styles.container}>
<Text style={styles.title}>{`react-native-shadow-2 sandbox`}</Text>
<Text style={styles.subtitle}>{`By SrBrahma @ https://github.com/SrBrahma/react-native-shadow-2`}</Text>
<View style={styles.sandbox}>
{/** Can't get this scroll to work properly in web */}
<View style={styles.settings}>
{/** View necessary so the settings won't grow too large in width */}
<View>
<MySlider name='Size Width Prop' step={0.1} range={[0, 200]} value={size[0]} onValueChange={(v)=>setSize([v, size[1]])}/>
<MySlider name='Size Height Prop' step={0.1} range={[0, 200]} value={size[1]} onValueChange={(v)=>setSize([size[0], v])}/>
<MySwitch
name='Use Size Prop' value={doUseSizeProp} onValueChange={setDoUseSizeProp}
description={'True uses the size prop (width and\nheight above), else obtains the child size.'}
/>
<MySlider name='Child Width' step={0.1} range={[0, 200]} value={childWidth} onValueChange={setChildWidth}/>
<MySlider name='Child Height' step={0.1} range={[0, 200]} value={childHeight} onValueChange={setChildHeight}/>
<MySlider name='Distance' value={distance} onValueChange={setDistance}
range={[-10, 100]} step={0.1} // min -10 to show < 0 won't do anything
/>
<MySlider name='Border Radius' value={borderRadius} onValueChange={setBorderRadius}
range={[-10, 100]} step={0.1} // min -10 to show < 0 won't do anything
/>
<MySlider name='Offset X' range={[-20, 20]} value={offsetX} onValueChange={setOffsetX}/>
<MySlider name='Offset Y' range={[-20, 20]} value={offsetY} onValueChange={setOffsetY}/>
<NameValue name='Start Color' value={startColor} valueMonospace/>
<TextInput style={styles.textInput} defaultValue={defaults.startColor} autoCorrect={false} onChangeText={(text) => {
const color = tinycolor(text);
if (color.isValid()) // Only change if valid input
setStartColor(color.toHex8String());
}}/>
<NameValue name='Final Color' value={finalColor} valueMonospace/>
<TextInput style={styles.textInput} defaultValue={defaults.finalColor} autoCorrect={false} onChangeText={(text) => {
const color = tinycolor(text);
if (color.isValid())
setFinalColor(color.toHex8String());
}}/>
<NameValue name='Child Color' value={childColor} valueMonospace/>
<TextInput style={styles.textInput} defaultValue={defaults.childColor} autoCorrect={false} onChangeText={(text) => {
const color = tinycolor(text);
if (color.isValid())
setChildColor(color.toHex8String());
}}/>
<MySwitch
name='Use RTL' value={rtl} onValueChange={(v) => {setRtl(v); I18nManager.forceRTL(v);}}
/>
{/* <NameValue name='Paint Inside' value={paintInside}/> */}
{/* <RadioForm // this $%&# added animations to all views.
initial={undefined}
radio_props={[{ label: 'undefined', value: undefined }, { label: 'false', value: false }, { label: 'true', value: true }] as any}
onPress={(v) => setPaintInside(v)}
formHorizontal
labelHorizontal={false}
/> */}
</View>
</View>
{/* Max child width is 200 and max dist is 100. Total max is 400. */}
<View style={{ width: 420, height: 420, justifyContent: 'center', alignItems: 'center' }}>
<Shadow
distance={distance}
startColor={startColor}
finalColor={finalColor}
offset={(offsetX || offsetY) ? [offsetX, offsetY] : undefined} // To test paintInside default
paintInside={paintInside}
getChildRadius={getChildRadius}
getViewStyleRadius={getViewStyleRadius}
containerViewStyle={{ margin: 100 }}
size={doUseSizeProp ? size : undefined}
radius={getChildRadius ? undefined : borderRadius}
// TopEnd to check if it's supporting the Start/End combinations. When uncommenting this, also comment radius prop above.
// viewStyle={[doUseSizeProp && { backgroundColor: childColor }, { borderTopLeftRadius: 100, borderTopEndRadius: 10 }]}
viewStyle={[doUseSizeProp && { backgroundColor: childColor }]}
>
<View style={[
!doUseSizeProp && { width: childWidth, height: childHeight }, {
backgroundColor: childColor,
// If borderRadius change from a positive value to a negative one, it won't change the current radius.
// This is here just to avoid the slider causing it to happen, for fast movements. You can disable this line
// to see what I mean. Nothing to worry about in prod envs.
borderRadius: Math.max(borderRadius, 0),
},
]}/>
</Shadow>
</View>
</View>
</PageScrollView>
</SafeAreaView>
</SafeAreaProvider>
);
};
const NameValue: React.FC<{
name: string; value: string | number | boolean | undefined; valueMonospace?: boolean;
}> = ({ name, value, valueMonospace = false }) => {
const prettyValue = typeof value === 'number' ? value.toFixed(1).replace(/[.,]0+$/, '') : String(value); // https://stackoverflow.com/a/5623195/10247962
return (
<View style={{
flexDirection: 'row',
justifyContent: 'space-between',
marginBottom: 2,
}}>
<Text style={{ fontSize: 16 }}>{name}</Text>
<Text style={{ fontSize: 16, fontWeight: 'bold', fontFamily: valueMonospace ? 'monospace' : undefined }}>{prettyValue}</Text>
</View>);
};
const MySlider: React.FC<{
name: string;
step?: number;
range: [min: number, max: number];
value: number;
onValueChange: (value: number) => void;
}> = ({ name, step = 1, range, value, onValueChange }) => {
return (
<View style={{ marginBottom: 18 }}>
<NameValue {...{ name, value }}/>
<View style={{ flexDirection: 'row' }}>
<Pressable onPress={() => onValueChange(value - step)} style={({ pressed }) => [styles.decIncButton, pressed && { backgroundColor: '#bbb' }]}>
<Text selectable={false} style={{ fontSize: 16, fontWeight: 'bold' }}>{'-'}</Text>
</Pressable>
<Slider style={{ width: 140, marginHorizontal: 20 }} minimumValue={range[0]} maximumValue={range[1]}
{...{ step, value, onValueChange }}
/>
<Pressable onPress={() => onValueChange(value + step)} style={({ pressed }) => [styles.decIncButton, pressed && { backgroundColor: '#bbb' }]}>
<Text selectable={false} style={{ fontSize: 16, fontWeight: 'bold' }}>{'+'}</Text>
</Pressable>
</View>
</View>
);
};
const MySwitch: React.FC<{
name: string;
value: boolean;
description?: string;
onValueChange: (value: boolean) => void;
}> = ({ name, onValueChange, value, description }) => {
return (
<View style={{ marginTop: 2, marginBottom: 18, flexShrink: 1 }}>
<View style={{ flexDirection: 'row', justifyContent: 'space-between' }}>
<Text style={{ fontSize: 16 }}>{name}</Text>
<Switch value={value} onValueChange={onValueChange}/>
</View>
<View style={{ marginTop: 4, marginLeft: 8 }}>
{/* I couldn't fking stop the text from growing the settings view, so I made this workaround. */}
{description?.split('\n')?.map((t) => <Text style={styles.description} numberOfLines={1} key={t}>{t}</Text>)}
</View>
</View>
);
};
// Flex all the way up to settings ScrollView: https://necolas.github.io/react-native-web/docs/scroll-view/
const styles = StyleSheet.create({
container: {
flex: 1,
paddingHorizontal: 30,
backgroundColor: '#f0f0f0',
alignItems: 'center',
justifyContent: 'center',
},
title: {
textAlign: 'center',
fontWeight: 'bold',
fontSize: 30,
marginTop: 20,
marginBottom: 20,
},
subtitle: {
textAlign: 'center',
fontWeight: 'bold',
fontSize: 14,
color: '#444',
marginBottom: 10,
},
sandbox: {
flex: 1,
flexWrap: 'wrap-reverse', // to make it responsive, with the shadow component being above the settings on small screens
flexDirection: 'row',
alignItems: 'flex-end',
alignContent: 'space-between',
justifyContent: 'space-evenly',
paddingBottom: 40,
},
settings: {
borderRadius: 8,
backgroundColor: '#e5e5e5',
paddingHorizontal: 40,
paddingVertical: 30,
marginTop: 20,
alignItems: 'center',
justifyContent: 'center',
},
description: {
color: '#222',
fontStyle: 'italic',
includeFontPadding: false,
},
button: {
paddingVertical: 10,
paddingHorizontal: 20,
backgroundColor: '#fff',
borderRadius: 4,
marginBottom: 18,
},
textInput: {
backgroundColor: '#fff',
borderColor: '#222',
borderRadius: 3,
paddingVertical: 3,
textAlign: 'center',
textAlignVertical: 'center',
paddingHorizontal: 8,
fontSize: 14,
borderWidth: StyleSheet.hairlineWidth,
marginBottom: 12,
fontFamily: 'monospace',
},
decIncButton: {
backgroundColor: '#fff',
padding: 5,
paddingHorizontal: 12,
borderRadius: 4,
},
}); | the_stack |
import { IDataSet } from '../../src/base/engine';
import { pivot_dataset } from '../base/datasource.spec';
import { PivotView } from '../../src/pivotview/base/pivotview';
import { createElement, remove, EmitType, EventHandler, extend } from '@syncfusion/ej2-base';
import { GroupingBar } from '../../src/common/grouping-bar/grouping-bar';
import { FieldList } from '../../src/common/actions/field-list';
import { TreeView } from '@syncfusion/ej2-navigations';
import { Dialog } from '@syncfusion/ej2-popups';
import { AggregateEventArgs, ColumnRenderEventArgs } from '../../src/common/base/interface';
import * as util from '../utils.spec';
import { profile, inMB, getMemoryProfile } from '../common.spec';
/// Spec for Aggregate Cell Info Event ///
describe('Pivot Grid with AggregateCellInfo Event', () => {
beforeAll(() => {
const isDef = (o: any) => o !== undefined && o !== null;
if (!isDef(window.performance)) {
console.log("Unsupported environment, window.performance.memory is unavailable");
this.skip(); //Skips test (in Chai)
return;
}
});
describe('- Grouping Bar with injected Module - ', () => {
let pivotGridObj: PivotView;
let elem: HTMLElement = createElement('div', { id: 'PivotGrid', styles: 'height:200px; width:500px' });
afterAll(() => {
if (pivotGridObj) {
pivotGridObj.destroy();
}
remove(elem);
});
beforeAll((done: Function) => {
if (!document.getElementById(elem.id)) {
document.body.appendChild(elem);
}
let dataBound: EmitType<Object> = () => { done(); };
PivotView.Inject(GroupingBar);
pivotGridObj = new PivotView({
dataSourceSettings: {
dataSource: pivot_dataset as IDataSet[],
expandAll: true,
enableSorting: true,
allowLabelFilter: true,
allowValueFilter: true,
sortSettings: [{ name: 'company', order: 'Descending' }],
formatSettings: [{ name: 'balance', format: 'C' }, { name: 'date', format: 'dd/MM/yyyy-hh:mm', type: 'date' }],
drilledMembers: [{ name: 'product', items: ['Bike', 'Car'] }, { name: 'gender', items: ['male'] }],
filterSettings: [
{ name: 'date', type: 'Date', condition: 'Between', value1: new Date('02/16/2000'), value2: new Date('02/16/2002') },
{ name: 'age', type: 'Exclude', items: ['25'] },
{ name: 'product', type: 'Include', items: ['Flight', 'Tempo'] },
],
valueSortSettings: { sortOrder: 'Descending', headerText: 'female~false~balance', headerDelimiter: '~' },
rows: [{ name: 'product', caption: 'Items' }, { name: 'eyeColor' }],
columns: [{ name: 'gender', caption: 'Population' }, { name: 'isActive' }],
values: [{ name: 'balance' }, { name: 'quantity' }],
filters: [{ name: 'age' }],
},
enableValueSorting: true,
showGroupingBar: true,
groupingBarSettings: { showFilterIcon: false, showRemoveIcon: false, showSortIcon: false, showValueTypeIcon: false },
dataBound: dataBound,
gridSettings: {
columnRender: (args: ColumnRenderEventArgs) => {
args.columns[0].width = 200;
args.columns[1].allowReordering = true;
args.columns[1].allowResizing = true;
},
rowHeight: 90
},
aggregateCellInfo: (args: AggregateEventArgs) => {
if (args.aggregateType === 'Avg') {
args.value = args.fieldName === 'balance' ? 225 : 5;
}
if (args.row.actualText === 'brown') {
args.skipFormatting = true;
args.value = args.fieldName === 'balance' ? 225 : 5;
}
if (args.fieldName === 'gender') {
args.skipFormatting = true;
args.value = args.cellSets[0][args.fieldName] as any;
}
}
});
pivotGridObj.appendTo('#PivotGrid');
});
let persistdata: string;
it('check window resize with grouping bar', () => {
pivotGridObj.onWindowResize();
pivotGridObj.renderModule.updateGridSettings();
expect(true).toBeTruthy();
});
it('grouping bar render testing', () => {
expect(pivotGridObj.element.children[0].classList.contains('e-grouping-bar')).toBeTruthy;
pivotGridObj.dataBind();
pivotGridObj.groupingBarSettings = { showFilterIcon: true, showRemoveIcon: true, showSortIcon: true };
expect(pivotGridObj.element.children[0].classList.contains('e-grouping-bar')).toBeTruthy;
});
it('check sorting order field', () => {
let pivotButtons: HTMLElement[] =
[].slice.call(pivotGridObj.element.querySelector('.e-columns').querySelectorAll('.e-pivot-button'));
expect(pivotButtons.length).toBeGreaterThan(0);
((pivotButtons[0]).querySelector('.e-sort') as HTMLElement).click();
expect(true).toBe(true);
});
it('sorting order after update', () => {
let pivotButtons: HTMLElement[] =
[].slice.call(pivotGridObj.element.querySelector('.e-columns').querySelectorAll('.e-pivot-button'));
expect(pivotButtons.length).toBeGreaterThan(0);
expect((pivotButtons[0]).querySelector('.e-descend')).toBeTruthy;
});
it('check filtering field', (done: Function) => {
let pivotButtons: HTMLElement[] =
[].slice.call(pivotGridObj.element.querySelector('.e-filters').querySelectorAll('.e-pivot-button'));
expect(pivotButtons.length).toBeGreaterThan(0);
((pivotButtons[0]).querySelector('.e-btn-filter') as HTMLElement).click();
jasmine.DEFAULT_TIMEOUT_INTERVAL = 10000;
setTimeout(() => {
let filterDialog: Dialog = pivotGridObj.pivotCommon.filterDialog.dialogPopUp;
expect(filterDialog.element.classList.contains('e-popup-open')).toBe(true);
done();
}, 1000);
});
it('check all nodes on filter popup', () => {
let treeObj: TreeView = pivotGridObj.pivotCommon.filterDialog.allMemberSelect;
let memberTreeObj: TreeView = pivotGridObj.pivotCommon.filterDialog.memberTreeView;
let filterDialog: Dialog = pivotGridObj.pivotCommon.filterDialog.dialogPopUp;
let allNode: HTMLElement = treeObj.element.querySelector('.e-checkbox-wrapper');
let checkEle: Element[] = <Element[] & NodeListOf<Element>>memberTreeObj.element.querySelectorAll('.e-checkbox-wrapper');
expect(checkEle.length).toBeGreaterThan(0);
expect(allNode.classList.contains('e-small')).toBe(false);
let args: MouseEvent = new MouseEvent("mousedown", { view: window, bubbles: true, cancelable: true });
allNode.querySelector('.e-frame').dispatchEvent(args);
args = new MouseEvent("mouseup", { view: window, bubbles: true, cancelable: true });
allNode.querySelector('.e-frame').dispatchEvent(args);
args = new MouseEvent("click", { view: window, bubbles: true, cancelable: true });
allNode.querySelector('.e-frame').dispatchEvent(args);
let checkedEle: Element[] = <Element[] & NodeListOf<Element>>memberTreeObj.element.querySelectorAll('.e-check');
expect(checkEle.length).toEqual(checkedEle.length);
expect(filterDialog.element.querySelector('.e-ok-btn').getAttribute('disabled')).toBe(null);
(filterDialog.element.querySelector('.e-ok-btn') as HTMLElement).click();
});
it('check filter state after update', () => {
let filterDialog: Dialog = pivotGridObj.pivotCommon.filterDialog.dialogPopUp;
expect(filterDialog).toBeUndefined;
});
it('check remove pivot button', (done: Function) => {
let pivotButton: HTMLElement =
(pivotGridObj.element.querySelector('.e-filters').querySelector('.e-pivot-button') as HTMLElement);
expect(pivotButton.id).toBe('age');
(pivotButton.querySelector('.e-remove') as HTMLElement).click();
jasmine.DEFAULT_TIMEOUT_INTERVAL = 10000;
setTimeout(() => {
pivotButton = (pivotGridObj.element.querySelector('.e-filters').querySelector('.e-pivot-button') as HTMLElement);
expect(pivotButton).toBeNull();
done();
}, 1000);
});
it('check drag and drop pivot button', (done: Function) => {
let valueAxiscontent: HTMLElement = pivotGridObj.element.querySelector('.e-values');
let columnAxiscontent: HTMLElement = pivotGridObj.element.querySelector('.e-columns');
let pivotButton: HTMLElement[] = [].slice.call((columnAxiscontent).querySelectorAll('.e-pivot-button'));
expect(pivotButton.length).toEqual(2);
let dragElement: HTMLElement = pivotButton[0].querySelector('.e-content');
let mousedown: any =
util.getEventObject('MouseEvents', 'mousedown', dragElement, dragElement, 15, 10);
EventHandler.trigger(dragElement, 'mousedown', mousedown);
let mousemove: any =
util.getEventObject('MouseEvents', 'mousemove', dragElement, valueAxiscontent, 15, 70);
mousemove.srcElement = mousemove.target = mousemove.toElement = valueAxiscontent;
EventHandler.trigger(<any>(document), 'mousemove', mousemove);
mousemove = util.setMouseCordinates(mousemove, 15, 75);
EventHandler.trigger(<any>(document), 'mousemove', mousemove);
let mouseOverEventArgs: any = extend({}, mousemove, null, true);
mouseOverEventArgs.type = 'mouseover';
(pivotGridObj.groupingBarModule as any).dropIndicatorUpdate(mouseOverEventArgs);
let mouseLeaveEventArgs: any = extend({}, mousemove, null, true);
mouseLeaveEventArgs.type = 'mouseleave';
(pivotGridObj.groupingBarModule as any).dropIndicatorUpdate(mouseLeaveEventArgs);
let mouseUp: any = util.getEventObject('MouseEvents', 'mouseup', dragElement, valueAxiscontent);
mouseUp.type = 'mouseup';
mouseUp.srcElement = mouseUp.target = mouseUp.toElement = valueAxiscontent;
EventHandler.trigger(<any>(document), 'mouseup', mouseUp);
jasmine.DEFAULT_TIMEOUT_INTERVAL = 10000;
setTimeout(() => {
pivotButton = [].slice.call((valueAxiscontent).querySelectorAll('.e-pivot-button'));
expect(pivotButton.length).toEqual(3);
done();
}, 1000);
});
it('destroy common event handlers', () => {
pivotGridObj.commonModule.destroy();
expect(true).toBeTruthy();
});
it('pivotgrid destroy', () => {
pivotGridObj.destroy();
expect(true).toBeTruthy();
});
it('pivotgrid destroy expect', () => {
expect(pivotGridObj.element.innerHTML).toBe('');
});
});
describe('- Field List with injected Module - ', () => {
let pivotGridObj: PivotView;
let elem: HTMLElement = createElement('div', { id: 'PivotGrid', styles: 'height:200px; width:500px' });
afterAll(() => {
if (pivotGridObj) {
pivotGridObj.destroy();
}
remove(elem);
});
beforeAll((done: Function) => {
if (!document.getElementById(elem.id)) {
document.body.appendChild(elem);
}
let dataBound: EmitType<Object> = () => { done(); };
PivotView.Inject(GroupingBar, FieldList);
pivotGridObj = new PivotView({
dataSourceSettings: {
dataSource: pivot_dataset as IDataSet[],
expandAll: true,
enableSorting: true,
allowLabelFilter: true,
allowValueFilter: true,
sortSettings: [{ name: 'company', order: 'Descending' }],
formatSettings: [{ name: 'balance', format: 'C' }, { name: 'date', format: 'dd/MM/yyyy-hh:mm', type: 'date' }],
drilledMembers: [{ name: 'product', items: ['Bike', 'Car'] }, { name: 'gender', items: ['male'] }],
filterSettings: [
{ name: 'date', type: 'Date', condition: 'Between', value1: new Date('02/16/2000'), value2: new Date('02/16/2002') },
{ name: 'age', type: 'Exclude', items: ['25'] },
{ name: 'product', type: 'Include', items: ['Flight', 'Tempo'] },
],
valueSortSettings: { sortOrder: 'Descending', headerText: 'female~false~balance', headerDelimiter: '~' },
rows: [{ name: 'product', caption: 'Items' }, { name: 'eyeColor' }],
columns: [{ name: 'gender', caption: 'Population' }, { name: 'isActive' }],
values: [{ name: 'balance' }, { name: 'quantity' }],
filters: [{ name: 'age' }],
},
enableValueSorting: true,
showGroupingBar: true,
showFieldList: true,
groupingBarSettings: { showFilterIcon: false, showRemoveIcon: false, showSortIcon: false, showValueTypeIcon: false },
dataBound: dataBound,
gridSettings: {
columnRender: (args: ColumnRenderEventArgs) => {
args.columns[0].width = 200;
args.columns[1].allowReordering = true;
args.columns[1].allowResizing = true;
},
rowHeight: 90
},
aggregateCellInfo: (args: AggregateEventArgs) => {
if (args.aggregateType === 'Avg') {
args.value = args.fieldName === 'balance' ? 225 : 5;
}
if (args.row.actualText === 'brown') {
args.skipFormatting = true;
args.value = args.fieldName === 'balance' ? 225 : 5;
}
if (args.fieldName === 'gender') {
args.skipFormatting = true;
args.value = args.cellSets[0][args.fieldName] as any;
}
}
});
pivotGridObj.appendTo('#PivotGrid');
util.disableDialogAnimation(pivotGridObj.pivotFieldListModule.dialogRenderer.fieldListDialog);
});
it('check window resize with grouping bar', () => {
pivotGridObj.onWindowResize();
pivotGridObj.renderModule.updateGridSettings();
expect(true).toBeTruthy();
});
it('grouping bar render testing', () => {
pivotGridObj.dataBind();
expect(pivotGridObj.element.querySelector('.e-grouping-bar')).toBeTruthy;
});
it('field list render testing', () => {
pivotGridObj.dataBind();
expect(pivotGridObj.pivotFieldListModule).not.toBeUndefined;
});
it('check open field list popup', () => {
(pivotGridObj.pivotFieldListModule.element.querySelector('.e-toggle-field-list') as HTMLElement).click();
expect(true).toBe(true);
});
it('check sorting order field', () => {
let pivotButtons: HTMLElement[] =
[].slice.call(pivotGridObj.element.querySelector('.e-columns').querySelectorAll('.e-pivot-button'));
expect(pivotButtons.length).toBeGreaterThan(0);
((pivotButtons[0]).querySelector('.e-sort') as HTMLElement).click();
expect(true).toBe(true);
});
it('sorting order after update', () => {
let pivotButtons: HTMLElement[] =
[].slice.call(pivotGridObj.element.querySelector('.e-columns').querySelectorAll('.e-pivot-button'));
expect(pivotButtons.length).toBeGreaterThan(0);
expect((pivotButtons[0]).querySelector('.e-descend')).toBeTruthy;
});
it('check filtering field', (done: Function) => {
let pivotButtons: HTMLElement[] =
[].slice.call(pivotGridObj.element.querySelector('.e-filters').querySelectorAll('.e-pivot-button'));
expect(pivotButtons.length).toBeGreaterThan(0);
((pivotButtons[0]).querySelector('.e-btn-filter') as HTMLElement).click();
jasmine.DEFAULT_TIMEOUT_INTERVAL = 10000;
setTimeout(() => {
let filterDialog: Dialog = pivotGridObj.pivotCommon.filterDialog.dialogPopUp;
expect(filterDialog.element.classList.contains('e-popup-open')).toBe(true);
done();
}, 1000);
});
it('check all nodes on filter popup', () => {
let treeObj: TreeView = pivotGridObj.pivotCommon.filterDialog.allMemberSelect;
let memberTreeObj: TreeView = pivotGridObj.pivotCommon.filterDialog.memberTreeView;
let filterDialog: Dialog = pivotGridObj.pivotCommon.filterDialog.dialogPopUp;
let allNode: HTMLElement = treeObj.element.querySelector('.e-checkbox-wrapper');
let checkEle: Element[] = <Element[] & NodeListOf<Element>>memberTreeObj.element.querySelectorAll('.e-checkbox-wrapper');
expect(checkEle.length).toBeGreaterThan(0);
expect(allNode.classList.contains('e-small')).toBe(false);
let args: MouseEvent = new MouseEvent("mousedown", { view: window, bubbles: true, cancelable: true });
allNode.querySelector('.e-frame').dispatchEvent(args);
args = new MouseEvent("mouseup", { view: window, bubbles: true, cancelable: true });
allNode.querySelector('.e-frame').dispatchEvent(args);
args = new MouseEvent("click", { view: window, bubbles: true, cancelable: true });
allNode.querySelector('.e-frame').dispatchEvent(args);
let checkedEle: Element[] = <Element[] & NodeListOf<Element>>memberTreeObj.element.querySelectorAll('.e-check');
expect(checkEle.length).toEqual(checkedEle.length);
expect(filterDialog.element.querySelector('.e-ok-btn').getAttribute('disabled')).toBe(null);
(filterDialog.element.querySelector('.e-ok-btn') as HTMLElement).click();
});
it('check filter state after update', () => {
let filterDialog: Dialog = pivotGridObj.pivotCommon.filterDialog.dialogPopUp;
expect(filterDialog).toBeUndefined;
});
it('check remove pivot button', (done: Function) => {
let pivotButton: HTMLElement =
(pivotGridObj.element.querySelector('.e-filters').querySelector('.e-pivot-button') as HTMLElement);
expect(pivotButton.id).toBe('age');
(pivotButton.querySelector('.e-remove') as HTMLElement).click();
jasmine.DEFAULT_TIMEOUT_INTERVAL = 10000;
setTimeout(() => {
pivotButton = (pivotGridObj.element.querySelector('.e-filters').querySelector('.e-pivot-button') as HTMLElement);
expect(pivotButton).toBeNull();
done();
}, 1000);
});
it('check drag and drop pivot button', (done: Function) => {
let valueAxiscontent: HTMLElement = pivotGridObj.element.querySelector('.e-values');
let columnAxiscontent: HTMLElement = pivotGridObj.element.querySelector('.e-columns');
let pivotButton: HTMLElement[] = [].slice.call((columnAxiscontent).querySelectorAll('.e-pivot-button'));
expect(pivotButton.length).toEqual(2);
let dragElement: HTMLElement = pivotButton[0].querySelector('.e-draggable');
let mousedown: any =
util.getEventObject('MouseEvents', 'mousedown', dragElement, dragElement, 15, 10);
EventHandler.trigger(dragElement, 'mousedown', mousedown);
let mousemove: any =
util.getEventObject('MouseEvents', 'mousemove', dragElement, valueAxiscontent, 15, 70);
mousemove.srcElement = mousemove.target = mousemove.toElement = valueAxiscontent;
EventHandler.trigger(<any>(document), 'mousemove', mousemove);
mousemove = util.setMouseCordinates(mousemove, 15, 75);
EventHandler.trigger(<any>(document), 'mousemove', mousemove);
let mouseUp: any = util.getEventObject('MouseEvents', 'mouseup', dragElement, valueAxiscontent);
mouseUp.type = 'mouseup';
mouseUp.srcElement = mouseUp.target = mouseUp.toElement = valueAxiscontent;
EventHandler.trigger(<any>(document), 'mouseup', mouseUp);
jasmine.DEFAULT_TIMEOUT_INTERVAL = 10000;
setTimeout(() => {
pivotButton = [].slice.call((valueAxiscontent).querySelectorAll('.e-pivot-button'));
expect(pivotButton.length).toEqual(3);
done();
}, 1000);
});
it('set rtl property', (done: Function) => {
pivotGridObj.enableRtl = true;
jasmine.DEFAULT_TIMEOUT_INTERVAL = 10000;
setTimeout(() => {
expect(pivotGridObj.element.classList.contains('e-rtl')).toBeTruthy;
done();
}, 1000);
});
it('remove rtl property', (done: Function) => {
pivotGridObj.enableRtl = false;
jasmine.DEFAULT_TIMEOUT_INTERVAL = 10000;
setTimeout(() => {
expect(pivotGridObj.element.classList.contains('e-rtl')).not.toBeTruthy;
done();
}, 1000);
});
it('destroy common event handlers', () => {
pivotGridObj.commonModule.destroy();
expect(true).toBeTruthy();
});
it('pivotgrid destroy', () => {
pivotGridObj.destroy();
expect(true).toBeTruthy();
});
it('pivotgrid destroy expect', () => {
expect(pivotGridObj.element.innerHTML).toBe('');
});
});
it('memory leak', () => {
profile.sample();
let average: any = inMB(profile.averageChange);
//Check average change in memory samples to not be over 10MB
//expect(average).toBeLessThan(10);
let memory: any = inMB(getMemoryProfile());
//Check the final memory usage against the first usage, there should be little change if everything was properly deallocated
expect(memory).toBeLessThan(profile.samples[0] + 0.25);
});
}); | the_stack |
import {
BufferAttribute,
BufferGeometry,
Float32BufferAttribute,
InterleavedBuffer,
InterleavedBufferAttribute,
TriangleFanDrawMode,
TriangleStripDrawMode,
TrianglesDrawMode,
Vector3,
Mesh,
Line,
Points,
Material,
SkinnedMesh,
MeshStandardMaterial,
} from 'three'
import { getWithKey } from '../types/helpers'
import { TypedArrayConstructors, TypedArray } from '../types/shared'
/**
* @param {Array<BufferGeometry>} geometries
* @param {Boolean} useGroups
* @return {BufferGeometry}
*/
export const mergeBufferGeometries = (geometries: BufferGeometry[], useGroups?: boolean): BufferGeometry | null => {
const isIndexed = geometries[0].index !== null
const attributesUsed = new Set(Object.keys(geometries[0].attributes))
const morphAttributesUsed = new Set(Object.keys(geometries[0].morphAttributes))
const attributes: { [key: string]: Array<InterleavedBufferAttribute | BufferAttribute> } = {}
const morphAttributes: { [key: string]: Array<BufferAttribute | InterleavedBufferAttribute>[] } = {}
const morphTargetsRelative = geometries[0].morphTargetsRelative
const mergedGeometry = new BufferGeometry()
let offset = 0
geometries.forEach((geom, i) => {
let attributesCount = 0
// ensure that all geometries are indexed, or none
if (isIndexed !== (geom.index !== null)) {
console.error(
'THREE.BufferGeometryUtils: .mergeBufferGeometries() failed with geometry at index ' +
i +
'. All geometries must have compatible attributes; make sure index attribute exists among all geometries, or in none of them.',
)
return null
}
// gather attributes, exit early if they're different
for (let name in geom.attributes) {
if (!attributesUsed.has(name)) {
console.error(
'THREE.BufferGeometryUtils: .mergeBufferGeometries() failed with geometry at index ' +
i +
'. All geometries must have compatible attributes; make sure "' +
name +
'" attribute exists among all geometries, or in none of them.',
)
return null
}
if (attributes[name] === undefined) {
attributes[name] = []
}
attributes[name].push(geom.attributes[name])
attributesCount++
}
// ensure geometries have the same number of attributes
if (attributesCount !== attributesUsed.size) {
console.error(
'THREE.BufferGeometryUtils: .mergeBufferGeometries() failed with geometry at index ' +
i +
'. Make sure all geometries have the same number of attributes.',
)
return null
}
// gather morph attributes, exit early if they're different
if (morphTargetsRelative !== geom.morphTargetsRelative) {
console.error(
'THREE.BufferGeometryUtils: .mergeBufferGeometries() failed with geometry at index ' +
i +
'. .morphTargetsRelative must be consistent throughout all geometries.',
)
return null
}
for (let name in geom.morphAttributes) {
if (!morphAttributesUsed.has(name)) {
console.error(
'THREE.BufferGeometryUtils: .mergeBufferGeometries() failed with geometry at index ' +
i +
'. .morphAttributes must be consistent throughout all geometries.',
)
return null
}
if (morphAttributes[name] === undefined) morphAttributes[name] = []
morphAttributes[name].push(geom.morphAttributes[name])
}
// gather .userData
mergedGeometry.userData.mergedUserData = mergedGeometry.userData.mergedUserData || []
mergedGeometry.userData.mergedUserData.push(geom.userData)
if (useGroups) {
let count
if (geom.index) {
count = geom.index.count
} else if (geom.attributes.position !== undefined) {
count = geom.attributes.position.count
} else {
console.error(
'THREE.BufferGeometryUtils: .mergeBufferGeometries() failed with geometry at index ' +
i +
'. The geometry must have either an index or a position attribute',
)
return null
}
mergedGeometry.addGroup(offset, count, i)
offset += count
}
})
// merge indices
if (isIndexed) {
let indexOffset = 0
const mergedIndex: number[] = []
geometries.forEach((geom) => {
const index = geom.index as BufferAttribute
for (let j = 0; j < index.count; ++j) {
mergedIndex.push(index.getX(j) + indexOffset)
}
indexOffset += geom.attributes.position.count
})
mergedGeometry.setIndex(mergedIndex)
}
// merge attributes
for (let name in attributes) {
const mergedAttribute = mergeBufferAttributes(attributes[name] as BufferAttribute[])
if (!mergedAttribute) {
console.error(
'THREE.BufferGeometryUtils: .mergeBufferGeometries() failed while trying to merge the ' + name + ' attribute.',
)
return null
}
mergedGeometry.setAttribute(name, mergedAttribute)
}
// merge morph attributes
for (let name in morphAttributes) {
const numMorphTargets = morphAttributes[name][0].length
if (numMorphTargets === 0) break
mergedGeometry.morphAttributes = mergedGeometry.morphAttributes || {}
mergedGeometry.morphAttributes[name] = []
for (let i = 0; i < numMorphTargets; ++i) {
const morphAttributesToMerge = []
for (let j = 0; j < morphAttributes[name].length; ++j) {
morphAttributesToMerge.push(morphAttributes[name][j][i])
}
const mergedMorphAttribute = mergeBufferAttributes(morphAttributesToMerge as BufferAttribute[])
if (!mergedMorphAttribute) {
console.error(
'THREE.BufferGeometryUtils: .mergeBufferGeometries() failed while trying to merge the ' +
name +
' morphAttribute.',
)
return null
}
mergedGeometry.morphAttributes[name].push(mergedMorphAttribute)
}
}
return mergedGeometry
}
/**
* @param {Array<BufferAttribute>} attributes
* @return {BufferAttribute}
*/
export const mergeBufferAttributes = (attributes: BufferAttribute[]): BufferAttribute | null | undefined => {
let TypedArray: TypedArrayConstructors | undefined = undefined
let itemSize: number | undefined = undefined
let normalized: boolean | undefined = undefined
let arrayLength = 0
attributes.forEach((attr) => {
if (TypedArray === undefined) {
TypedArray = attr.array.constructor
}
if (TypedArray !== attr.array.constructor) {
console.error(
'THREE.BufferGeometryUtils: .mergeBufferAttributes() failed. BufferAttribute.array must be of consistent array types across matching attributes.',
)
return null
}
if (itemSize === undefined) itemSize = attr.itemSize
if (itemSize !== attr.itemSize) {
console.error(
'THREE.BufferGeometryUtils: .mergeBufferAttributes() failed. BufferAttribute.itemSize must be consistent across matching attributes.',
)
return null
}
if (normalized === undefined) normalized = attr.normalized
if (normalized !== attr.normalized) {
console.error(
'THREE.BufferGeometryUtils: .mergeBufferAttributes() failed. BufferAttribute.normalized must be consistent across matching attributes.',
)
return null
}
arrayLength += attr.array.length
})
if (TypedArray && itemSize) {
// @ts-expect-error this works in JS and TS is complaining but it's such a tiny thing I can live with the guilt
const array = new TypedArray(arrayLength)
let offset = 0
attributes.forEach((attr) => {
array.set(attr.array, offset)
offset += attr.array.length
})
return new BufferAttribute(array, itemSize, normalized)
}
}
/**
* @param {Array<BufferAttribute>} attributes
* @return {Array<InterleavedBufferAttribute>}
*/
export const interleaveAttributes = (attributes: BufferAttribute[]): InterleavedBufferAttribute[] | null => {
// Interleaves the provided attributes into an InterleavedBuffer and returns
// a set of InterleavedBufferAttributes for each attribute
let TypedArray: TypedArrayConstructors | undefined = undefined
let arrayLength = 0
let stride = 0
// calculate the the length and type of the interleavedBuffer
for (let i = 0, l = attributes.length; i < l; ++i) {
const attribute = attributes[i]
if (TypedArray === undefined) TypedArray = attribute.array.constructor
if (TypedArray !== attribute.array.constructor) {
console.error('AttributeBuffers of different types cannot be interleaved')
return null
}
arrayLength += attribute.array.length
stride += attribute.itemSize
}
// Create the set of buffer attributes
// @ts-expect-error this works in JS and TS is complaining but it's such a tiny thing I can live with the guilt
const interleavedBuffer = new InterleavedBuffer(new TypedArray(arrayLength), stride)
let offset = 0
const res = []
const getters = ['getX', 'getY', 'getZ', 'getW']
const setters = ['setX', 'setY', 'setZ', 'setW']
for (let j = 0, l = attributes.length; j < l; j++) {
const attribute = attributes[j]
const itemSize = attribute.itemSize
const count = attribute.count
const iba = new InterleavedBufferAttribute(interleavedBuffer, itemSize, offset, attribute.normalized)
res.push(iba)
offset += itemSize
// Move the data for each attribute into the new interleavedBuffer
// at the appropriate offset
for (let c = 0; c < count; c++) {
for (let k = 0; k < itemSize; k++) {
const set = getWithKey(iba, setters[k] as keyof InterleavedBufferAttribute) as InterleavedBufferAttribute[
| 'setX'
| 'setY'
| 'setZ'
| 'setW']
const get = getWithKey(attribute, getters[k] as keyof BufferAttribute) as BufferAttribute[
| 'getX'
| 'getY'
| 'getZ'
| 'getW']
set(c, get(c))
}
}
}
return res
}
/**
* @param {Array<BufferGeometry>} geometry
* @return {number}
*/
export function estimateBytesUsed(geometry: BufferGeometry): number {
// Return the estimated memory used by this geometry in bytes
// Calculate using itemSize, count, and BYTES_PER_ELEMENT to account
// for InterleavedBufferAttributes.
let mem = 0
for (let name in geometry.attributes) {
const attr = geometry.getAttribute(name)
mem += attr.count * attr.itemSize * (attr.array as TypedArray).BYTES_PER_ELEMENT
}
const indices = geometry.getIndex()
mem += indices ? indices.count * indices.itemSize * (indices.array as TypedArray).BYTES_PER_ELEMENT : 0
return mem
}
/**
* @param {BufferGeometry} geometry
* @param {number} tolerance
* @return {BufferGeometry>}
*/
export function mergeVertices(geometry: BufferGeometry, tolerance = 1e-4): BufferGeometry {
tolerance = Math.max(tolerance, Number.EPSILON)
// Generate an index buffer if the geometry doesn't have one, or optimize it
// if it's already available.
const hashToIndex: {
[key: string]: number
} = {}
const indices = geometry.getIndex()
const positions = geometry.getAttribute('position')
const vertexCount = indices ? indices.count : positions.count
// next value for triangle indices
let nextIndex = 0
// attributes and new attribute arrays
const attributeNames = Object.keys(geometry.attributes)
const attrArrays: {
[key: string]: []
} = {}
const morphAttrsArrays: {
[key: string]: Array<Array<BufferAttribute | InterleavedBufferAttribute>>
} = {}
const newIndices = []
const getters = ['getX', 'getY', 'getZ', 'getW']
// initialize the arrays
for (let i = 0, l = attributeNames.length; i < l; i++) {
const name = attributeNames[i]
attrArrays[name] = []
const morphAttr = geometry.morphAttributes[name]
if (morphAttr) {
morphAttrsArrays[name] = new Array(morphAttr.length).fill(0).map(() => [])
}
}
// convert the error tolerance to an amount of decimal places to truncate to
const decimalShift = Math.log10(1 / tolerance)
const shiftMultiplier = Math.pow(10, decimalShift)
for (let i = 0; i < vertexCount; i++) {
const index = indices ? indices.getX(i) : i
// Generate a hash for the vertex attributes at the current index 'i'
let hash = ''
for (let j = 0, l = attributeNames.length; j < l; j++) {
const name = attributeNames[j]
const attribute = geometry.getAttribute(name)
const itemSize = attribute.itemSize
for (let k = 0; k < itemSize; k++) {
// double tilde truncates the decimal value
// @ts-ignore no
hash += `${~~(attribute[getters[k]](index) * shiftMultiplier)},`
}
}
// Add another reference to the vertex if it's already
// used by another index
if (hash in hashToIndex) {
newIndices.push(hashToIndex[hash])
} else {
// copy data to the new index in the attribute arrays
for (let j = 0, l = attributeNames.length; j < l; j++) {
const name = attributeNames[j]
const attribute = geometry.getAttribute(name)
const morphAttr = geometry.morphAttributes[name]
const itemSize = attribute.itemSize
const newarray = attrArrays[name]
const newMorphArrays = morphAttrsArrays[name]
for (let k = 0; k < itemSize; k++) {
const getterFunc = getters[k]
// @ts-ignore
newarray.push(attribute[getterFunc](index))
if (morphAttr) {
for (let m = 0, ml = morphAttr.length; m < ml; m++) {
// @ts-ignore
newMorphArrays[m].push(morphAttr[m][getterFunc](index))
}
}
}
}
hashToIndex[hash] = nextIndex
newIndices.push(nextIndex)
nextIndex++
}
}
// Generate typed arrays from new attribute arrays and update
// the attributeBuffers
const result = geometry.clone()
for (let i = 0, l = attributeNames.length; i < l; i++) {
const name = attributeNames[i]
const oldAttribute = geometry.getAttribute(name)
//@ts-expect-error something to do with functions and constructors and new
const buffer = new (oldAttribute.array as TypedArray).constructor(attrArrays[name])
const attribute = new BufferAttribute(buffer, oldAttribute.itemSize, oldAttribute.normalized)
result.setAttribute(name, attribute)
// Update the attribute arrays
if (name in morphAttrsArrays) {
for (let j = 0; j < morphAttrsArrays[name].length; j++) {
const oldMorphAttribute = geometry.morphAttributes[name][j]
//@ts-expect-error something to do with functions and constructors and new
const buffer = new (oldMorphAttribute.array as TypedArray).constructor(morphAttrsArrays[name][j])
const morphAttribute = new BufferAttribute(buffer, oldMorphAttribute.itemSize, oldMorphAttribute.normalized)
result.morphAttributes[name][j] = morphAttribute
}
}
}
// indices
result.setIndex(newIndices)
return result
}
/**
* @param {BufferGeometry} geometry
* @param {number} drawMode
* @return {BufferGeometry}
*/
export function toTrianglesDrawMode(geometry: BufferGeometry, drawMode: number): BufferGeometry {
if (drawMode === TrianglesDrawMode) {
console.warn('THREE.BufferGeometryUtils.toTrianglesDrawMode(): Geometry already defined as triangles.')
return geometry
}
if (drawMode === TriangleFanDrawMode || drawMode === TriangleStripDrawMode) {
let index = geometry.getIndex()
// generate index if not present
if (index === null) {
const indices = []
const position = geometry.getAttribute('position')
if (position !== undefined) {
for (let i = 0; i < position.count; i++) {
indices.push(i)
}
geometry.setIndex(indices)
index = geometry.getIndex()
} else {
console.error(
'THREE.BufferGeometryUtils.toTrianglesDrawMode(): Undefined position attribute. Processing not possible.',
)
return geometry
}
}
//
const numberOfTriangles = (index as BufferAttribute).count - 2
const newIndices = []
if (index) {
if (drawMode === TriangleFanDrawMode) {
// gl.TRIANGLE_FAN
for (let i = 1; i <= numberOfTriangles; i++) {
newIndices.push(index.getX(0))
newIndices.push(index.getX(i))
newIndices.push(index.getX(i + 1))
}
} else {
// gl.TRIANGLE_STRIP
for (let i = 0; i < numberOfTriangles; i++) {
if (i % 2 === 0) {
newIndices.push(index.getX(i))
newIndices.push(index.getX(i + 1))
newIndices.push(index.getX(i + 2))
} else {
newIndices.push(index.getX(i + 2))
newIndices.push(index.getX(i + 1))
newIndices.push(index.getX(i))
}
}
}
}
if (newIndices.length / 3 !== numberOfTriangles) {
console.error('THREE.BufferGeometryUtils.toTrianglesDrawMode(): Unable to generate correct amount of triangles.')
}
// build final geometry
const newGeometry = geometry.clone()
newGeometry.setIndex(newIndices)
newGeometry.clearGroups()
return newGeometry
} else {
console.error('THREE.BufferGeometryUtils.toTrianglesDrawMode(): Unknown draw mode:', drawMode)
return geometry
}
}
/**
* Calculates the morphed attributes of a morphed/skinned BufferGeometry.
* Helpful for Raytracing or Decals.
* @param {Mesh | Line | Points} object An instance of Mesh, Line or Points.
* @return {Object} An Object with original position/normal attributes and morphed ones.
*/
export type ComputedMorphedAttribute = {
positionAttribute: BufferAttribute | InterleavedBufferAttribute
normalAttribute: BufferAttribute | InterleavedBufferAttribute
morphedPositionAttribute: Float32BufferAttribute
morphedNormalAttribute: Float32BufferAttribute
}
export function computeMorphedAttributes(object: Mesh | Line | Points): ComputedMorphedAttribute | null {
if (object.geometry.isBufferGeometry !== true) {
console.error('THREE.BufferGeometryUtils: Geometry is not of type BufferGeometry.')
return null
}
const _vA = new Vector3()
const _vB = new Vector3()
const _vC = new Vector3()
const _tempA = new Vector3()
const _tempB = new Vector3()
const _tempC = new Vector3()
const _morphA = new Vector3()
const _morphB = new Vector3()
const _morphC = new Vector3()
function _calculateMorphedAttributeData(
object: Mesh | Line | Points,
material: Material,
attribute: BufferAttribute | InterleavedBufferAttribute,
morphAttribute: (BufferAttribute | InterleavedBufferAttribute)[],
morphTargetsRelative: boolean,
a: number,
b: number,
c: number,
modifiedAttributeArray: Float32Array,
): void {
_vA.fromBufferAttribute(attribute, a)
_vB.fromBufferAttribute(attribute, b)
_vC.fromBufferAttribute(attribute, c)
const morphInfluences = object.morphTargetInfluences
if ((material as MeshStandardMaterial).morphTargets && morphAttribute && morphInfluences) {
_morphA.set(0, 0, 0)
_morphB.set(0, 0, 0)
_morphC.set(0, 0, 0)
for (let i = 0, il = morphAttribute.length; i < il; i++) {
const influence = morphInfluences[i]
const morph = morphAttribute[i]
if (influence === 0) continue
_tempA.fromBufferAttribute(morph, a)
_tempB.fromBufferAttribute(morph, b)
_tempC.fromBufferAttribute(morph, c)
if (morphTargetsRelative) {
_morphA.addScaledVector(_tempA, influence)
_morphB.addScaledVector(_tempB, influence)
_morphC.addScaledVector(_tempC, influence)
} else {
_morphA.addScaledVector(_tempA.sub(_vA), influence)
_morphB.addScaledVector(_tempB.sub(_vB), influence)
_morphC.addScaledVector(_tempC.sub(_vC), influence)
}
}
_vA.add(_morphA)
_vB.add(_morphB)
_vC.add(_morphC)
}
if ((object as SkinnedMesh).isSkinnedMesh) {
// @ts-expect-error – https://github.com/three-types/three-ts-types/issues/37
object.boneTransform(a, _vA)
// @ts-expect-error – https://github.com/three-types/three-ts-types/issues/37
object.boneTransform(b, _vB)
// @ts-expect-error – https://github.com/three-types/three-ts-types/issues/37
object.boneTransform(c, _vC)
}
modifiedAttributeArray[a * 3 + 0] = _vA.x
modifiedAttributeArray[a * 3 + 1] = _vA.y
modifiedAttributeArray[a * 3 + 2] = _vA.z
modifiedAttributeArray[b * 3 + 0] = _vB.x
modifiedAttributeArray[b * 3 + 1] = _vB.y
modifiedAttributeArray[b * 3 + 2] = _vB.z
modifiedAttributeArray[c * 3 + 0] = _vC.x
modifiedAttributeArray[c * 3 + 1] = _vC.y
modifiedAttributeArray[c * 3 + 2] = _vC.z
}
const geometry = object.geometry
const material = object.material
let a, b, c
const index = geometry.index
const positionAttribute = geometry.attributes.position
const morphPosition = geometry.morphAttributes.position
const morphTargetsRelative = geometry.morphTargetsRelative
const normalAttribute = geometry.attributes.normal
const morphNormal = geometry.morphAttributes.position
const groups = geometry.groups
const drawRange = geometry.drawRange
let i, j, il, jl
let group, groupMaterial
let start, end
const modifiedPosition = new Float32Array(positionAttribute.count * positionAttribute.itemSize)
const modifiedNormal = new Float32Array(normalAttribute.count * normalAttribute.itemSize)
if (index !== null) {
// indexed buffer geometry
if (Array.isArray(material)) {
for (i = 0, il = groups.length; i < il; i++) {
group = groups[i]
groupMaterial = material[group.materialIndex as number]
start = Math.max(group.start, drawRange.start)
end = Math.min(group.start + group.count, drawRange.start + drawRange.count)
for (j = start, jl = end; j < jl; j += 3) {
a = index.getX(j)
b = index.getX(j + 1)
c = index.getX(j + 2)
_calculateMorphedAttributeData(
object,
groupMaterial,
positionAttribute,
morphPosition,
morphTargetsRelative,
a,
b,
c,
modifiedPosition,
)
_calculateMorphedAttributeData(
object,
groupMaterial,
normalAttribute,
morphNormal,
morphTargetsRelative,
a,
b,
c,
modifiedNormal,
)
}
}
} else {
start = Math.max(0, drawRange.start)
end = Math.min(index.count, drawRange.start + drawRange.count)
for (i = start, il = end; i < il; i += 3) {
a = index.getX(i)
b = index.getX(i + 1)
c = index.getX(i + 2)
_calculateMorphedAttributeData(
object,
material,
positionAttribute,
morphPosition,
morphTargetsRelative,
a,
b,
c,
modifiedPosition,
)
_calculateMorphedAttributeData(
object,
material,
normalAttribute,
morphNormal,
morphTargetsRelative,
a,
b,
c,
modifiedNormal,
)
}
}
} else if (positionAttribute !== undefined) {
// non-indexed buffer geometry
if (Array.isArray(material)) {
for (i = 0, il = groups.length; i < il; i++) {
group = groups[i]
groupMaterial = material[group.materialIndex as number]
start = Math.max(group.start, drawRange.start)
end = Math.min(group.start + group.count, drawRange.start + drawRange.count)
for (j = start, jl = end; j < jl; j += 3) {
a = j
b = j + 1
c = j + 2
_calculateMorphedAttributeData(
object,
groupMaterial,
positionAttribute,
morphPosition,
morphTargetsRelative,
a,
b,
c,
modifiedPosition,
)
_calculateMorphedAttributeData(
object,
groupMaterial,
normalAttribute,
morphNormal,
morphTargetsRelative,
a,
b,
c,
modifiedNormal,
)
}
}
} else {
start = Math.max(0, drawRange.start)
end = Math.min(positionAttribute.count, drawRange.start + drawRange.count)
for (i = start, il = end; i < il; i += 3) {
a = i
b = i + 1
c = i + 2
_calculateMorphedAttributeData(
object,
material,
positionAttribute,
morphPosition,
morphTargetsRelative,
a,
b,
c,
modifiedPosition,
)
_calculateMorphedAttributeData(
object,
material,
normalAttribute,
morphNormal,
morphTargetsRelative,
a,
b,
c,
modifiedNormal,
)
}
}
}
const morphedPositionAttribute = new Float32BufferAttribute(modifiedPosition, 3)
const morphedNormalAttribute = new Float32BufferAttribute(modifiedNormal, 3)
return {
positionAttribute: positionAttribute,
normalAttribute: normalAttribute,
morphedPositionAttribute: morphedPositionAttribute,
morphedNormalAttribute: morphedNormalAttribute,
}
} | the_stack |
import type {SetupWorkerApi} from 'msw'
import preval from 'preval.macro'
import React from 'react'
import ReactDOM from 'react-dom'
import {createBrowserHistory} from 'history'
import {setup as setupServer} from './server'
import {renderReactApp} from './react-app'
import type {
FileInfo,
LazyComponents,
Imports,
Backend,
DynamicImportFn,
DefaultDynamicImportFn,
} from './types'
const styleTag = document.createElement('style')
const requiredStyles = [
preval`module.exports = require('../other/css-file-to-string')('normalize.css/normalize.css')`,
preval`module.exports = require('../other/css-file-to-string')('./other/workshop-app-styles.css')`,
// this will happen when running the regular app and embedding the example
// in an iframe.
// pretty sure the types are wrong on this one... (It's been fixed in TS 4.2)
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
window.frameElement
? `#root{display:grid;place-items:center;height:100vh;}`
: '',
].join('\n')
styleTag.appendChild(document.createTextNode(requiredStyles))
document.head.prepend(styleTag)
const fillScreenCenter = `padding:30px;min-height:100vh;display:grid;align-items:center;justify-content:center;`
const originalDocumentElement = document.documentElement
let unmount: ((el: HTMLElement) => void) | undefined
function makeKCDWorkshopApp({
imports,
filesInfo,
projectTitle,
backend,
options = {},
...otherWorkshopOptions
}: {
imports: Imports
filesInfo: Array<FileInfo>
projectTitle: string
backend?: Backend
options?: {
concurrentMode?: boolean
}
} & {
gitHubRepoUrl: string
}) {
const lazyComponents: LazyComponents = {}
const componentExtensions = ['.js', '.md', '.mdx', '.tsx', '.ts']
for (const {ext, filePath} of filesInfo) {
if (componentExtensions.includes(ext)) {
lazyComponents[filePath] = React.lazy(
moduleWithDefaultExport(imports, filePath),
)
}
}
if (backend) {
const {
handlers,
quiet = true,
serviceWorker = {url: '/mockServiceWorker.js'},
...rest
} = backend
if (process.env.NODE_ENV !== 'test') {
const server = setupServer({handlers}) as SetupWorkerApi
void server.start({
quiet,
serviceWorker,
...rest,
})
}
}
const history = createBrowserHistory()
let previousLocation = history.location
let previousIsIsolated: boolean | null = null
function render(ui: React.ReactElement) {
const rootEl = document.getElementById('root')
if (rootEl) {
unmount?.(rootEl)
} else {
// eslint-disable-next-line no-alert
window.alert(
'This document has no div with the ID of "root." Please add one... Or bug Kent about it...',
)
return
}
if (options.concurrentMode) {
/* eslint-disable */
// @ts-expect-error I don't care enough to be type safe here
const root = (ReactDOM.unstable_createRoot || ReactDOM.createRoot)(rootEl)
root.render(ui)
unmount = () => root.unmount()
/* eslint-enable */
} else {
ReactDOM.render(ui, rootEl)
unmount = () => ReactDOM.unmountComponentAtNode(rootEl)
}
}
function escapeForClassList(name: string) {
// classList methods don't allow space or `/` characters
return encodeURIComponent(name.replace(/\//g, '_'))
}
function handleLocationChange(location = history.location) {
const {pathname} = location
// add location pathname to classList of the body
// avoid the dev-tools flash of update by not updating the class name unecessarily
const prevClassName = escapeForClassList(previousLocation.pathname)
const newClassName = escapeForClassList(pathname)
if (document.body.classList.contains(prevClassName)) {
document.body.classList.remove(
escapeForClassList(previousLocation.pathname),
)
}
if (!document.body.classList.contains(newClassName)) {
document.body.classList.add(escapeForClassList(pathname))
}
// set the title to have info for the exercise
const isIsolated = pathname.startsWith('/isolated')
let info: FileInfo | undefined
if (isIsolated) {
const filePath = pathname.replace('/isolated', 'src')
info = filesInfo.find(i => i.filePath === filePath)
} else {
const number = Number(pathname.split('/').slice(-1)[0])
info = filesInfo.find(
i => i.type === 'instruction' && i.number === number,
)
}
if (isIsolated && !info) {
document.body.innerHTML = `
<div style="${fillScreenCenter}">
<div>
Sorry... nothing here. To open one of the exercises, go to
<code>\`/exerciseNumber\`</code>, for example:
<a href="/1"><code>/1</code></a>
</div>
</div>
`
return
}
// I honestly have no clue why, but there appears to be some kind of
// race condition here with the title. It seems to get reset to the
// title that's defined in the index.html after we set it :shrugs:
setTimeout(() => {
const title = [
info
? [
info.number ? `${info.number}. ` : '',
info.title || info.filename,
].join('')
: null,
projectTitle,
]
.filter(Boolean)
.join(' | ')
// the dev-tools flash the title as changed on HMR even
// if it's not actually changed, so we'll only change it
// when it's necessary:
if (document.title !== title) {
document.title = title
}
}, 20)
if (isIsolated && info) {
renderIsolated(moduleWithDefaultExport(imports, info.filePath))
} else if (previousIsIsolated !== isIsolated) {
// if we aren't going from isolated to the app, then we don't need
// to bother rendering react anew. The app will handle that.
renderReact()
}
previousLocation = location
previousIsIsolated = isIsolated
}
function renderIsolated(isolatedModuleImport: DynamicImportFn) {
void isolatedModuleImport().then(async ({default: defaultExport}) => {
if (history.location !== previousLocation) {
// location has changed while we were getting the module
// so don't bother doing anything... Let the next event handler
// deal with it
return
}
if (typeof defaultExport === 'function') {
if (defaultExport === DO_NOT_RENDER) {
return
}
// regular react component.
render(React.createElement(defaultExport))
} else if (typeof defaultExport === 'string') {
// HTML file
const domParser = new DOMParser()
const newDocument = domParser.parseFromString(
defaultExport,
'text/html',
)
document.documentElement.replaceWith(newDocument.documentElement)
// to get all the scripts to actually run, you have to create new script
// elements, and no, cloneElement doesn't work unfortunately.
// Apparently, scripts will only get loaded/run if you use createElement.
const scripts = Array.from(document.querySelectorAll('script'))
const loadingScriptsQueue = []
for (const script of scripts) {
// if we're dealing with an inline script, we need to wait for all other
// scripts to finish loading before we run it
if (!script.hasAttribute('src')) {
// eslint-disable-next-line no-await-in-loop
await Promise.all(loadingScriptsQueue)
}
// replace the script
const newScript = document.createElement('script')
for (const attrName of script.getAttributeNames()) {
newScript.setAttribute(
attrName,
script.getAttribute(attrName) ?? '',
)
}
newScript.innerHTML = script.innerHTML
script.parentNode?.insertBefore(newScript, script)
script.parentNode?.removeChild(script)
// if the new script has a src, add it to the queue
if (script.hasAttribute('src')) {
loadingScriptsQueue.push(
new Promise(resolve => {
newScript.onload = resolve
}),
)
}
}
// now make sure all src scripts are loaded before continuing
await Promise.all(loadingScriptsQueue)
// Babel will call this when the DOMContentLoaded event fires
// but because the content has already loaded, that event will never
// fire, so we'll run it ourselves
if (window.Babel) {
window.Babel.transformScriptTags()
}
}
// otherwise we'll just expect that the file ran the thing it was supposed
// to run and doesn't need any help.
})
}
function renderReact() {
if (document.documentElement !== originalDocumentElement) {
document.documentElement.replaceWith(originalDocumentElement)
}
renderReactApp({
history,
projectTitle,
filesInfo,
lazyComponents,
render,
...otherWorkshopOptions,
})
}
history.listen(handleLocationChange)
// kick it off to get us started
handleLocationChange()
}
// React.lazy *requires* that you pass it a promise that resolves to a default export
// of a function that returns JSX.Element. But we want to be able to dynamically
// import a function that we don't actually render (because that file will render itself manually)
// so we use this as the fallback for that situation and explicitely do not bother rendering it
function DO_NOT_RENDER() {
return <></>
}
function moduleWithDefaultExport(
imports: Imports,
filePath: string,
): DefaultDynamicImportFn {
const importFn = imports[filePath]
if (!importFn) throw new Error(`'${filePath}' does not exist in imports.`)
if (filePath.endsWith('html')) {
return importFn as DefaultDynamicImportFn
}
return function importJS() {
return importFn().then(
module => {
if (filePath.match(/\.mdx?$/)) targetBlankifyInstructionLinks()
return {default: module.App ?? module.default ?? DO_NOT_RENDER}
},
error => {
console.error('Error importing a JS file', filePath, error)
return {default: () => <div>{(error as Error).message}</div>}
},
)
}
}
// this is a pain, but we need to add target="_blank" to all the links
// in the markdown and even though I tried with useEffect, I couldn't
// get my useEffect to run *after* the markdown was rendered, so we're
// pulling this hack together 🙄
function targetBlankifyInstructionLinks() {
setTimeout(() => {
const instructionContainer = document.querySelector(
'.instruction-container',
)
// this shouldn't happen, but it could...
if (!instructionContainer) return
const anchors = Array.from(instructionContainer.querySelectorAll('a'))
for (const anchor of anchors) {
anchor.setAttribute('target', '_blank')
anchor.setAttribute('rel', 'noopener noreferrer nofollow')
}
}, 200)
}
export {makeKCDWorkshopApp}
/*
eslint
babel/no-unused-expressions: "off",
@typescript-eslint/no-explicit-any: "off",
@typescript-eslint/prefer-regexp-exec: "off",
react/jsx-no-useless-fragment: "off",
no-void: "off"
*/ | the_stack |
// IMPORTANT
// This file was generated by https://github.com/Bolisov/google-api-typings-generator. Please do not edit it manually.
// In case of any problems please post issue to https://github.com/Bolisov/google-api-typings-generator
// Generated from: https://translation.googleapis.com/$discovery/rest?version=v2
/// <reference types="gapi.client" />
declare namespace gapi.client {
/** Load Google Cloud Translation API v2 */
function load(name: "translate", version: "v2"): PromiseLike<void>;
function load(name: "translate", version: "v2", callback: () => any): void;
const detections: translate.DetectionsResource;
const languages: translate.LanguagesResource;
const translations: translate.TranslationsResource;
namespace translate {
interface DetectLanguageRequest {
/**
* The input text upon which to perform language detection. Repeat this
* parameter to perform language detection on multiple text inputs.
*/
q?: string[];
}
interface DetectionsListResponse {
/** A detections contains detection results of several text */
detections?: any[];
}
interface GetSupportedLanguagesRequest {
/**
* The language to use to return localized, human readable names of supported
* languages.
*/
target?: string;
}
interface LanguagesListResponse {
/**
* List of source/target languages supported by the translation API. If target parameter is unspecified, the list is sorted by the ASCII code point order
* of the language code. If target parameter is specified, the list is sorted by the collation order of the language name in the target language.
*/
languages?: LanguagesResource[];
}
interface LanguagesResource {
/**
* Supported language code, generally consisting of its ISO 639-1
* identifier. (E.g. 'en', 'ja'). In certain cases, BCP-47 codes including
* language + region identifiers are returned (e.g. 'zh-TW' and 'zh-CH')
*/
language?: string;
/** Human readable name of the language localized to the target language. */
name?: string;
}
interface TranslateTextRequest {
/**
* The format of the source text, in either HTML (default) or plain-text. A
* value of "html" indicates HTML and a value of "text" indicates plain-text.
*/
format?: string;
/**
* The `model` type requested for this translation. Valid values are
* listed in public documentation.
*/
model?: string;
/**
* The input text to translate. Repeat this parameter to perform translation
* operations on multiple text inputs.
*/
q?: string[];
/**
* The language of the source text, set to one of the language codes listed in
* Language Support. If the source language is not specified, the API will
* attempt to identify the source language automatically and return it within
* the response.
*/
source?: string;
/**
* The language to use for translation of the input text, set to one of the
* language codes listed in Language Support.
*/
target?: string;
}
interface TranslationsListResponse {
/** Translations contains list of translation results of given text */
translations?: TranslationsResource[];
}
interface TranslationsResource {
/**
* The source language of the initial request, detected automatically, if
* no source language was passed within the initial request. If the
* source language was passed, auto-detection of the language will not
* occur and this field will be empty.
*/
detectedSourceLanguage?: string;
/**
* The `model` type used for this translation. Valid values are
* listed in public documentation. Can be different from requested `model`.
* Present only if specific model type was explicitly requested.
*/
model?: string;
/** Text translated into the target language. */
translatedText?: string;
}
interface DetectionsResource {
/** Detects the language of text within a request. */
detect(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/**
* Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* Overrides userIp if both are provided.
*/
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<DetectionsListResponse>;
/** Detects the language of text within a request. */
list(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/**
* The input text upon which to perform language detection. Repeat this
* parameter to perform language detection on multiple text inputs.
*/
q: string;
/**
* Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* Overrides userIp if both are provided.
*/
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<DetectionsListResponse>;
}
interface LanguagesResource {
/** Returns a list of supported languages for translation. */
list(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** The model type for which supported languages should be returned. */
model?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/**
* Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* Overrides userIp if both are provided.
*/
quotaUser?: string;
/**
* The language to use to return localized, human readable names of supported
* languages.
*/
target?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<LanguagesListResponse>;
}
interface TranslationsResource {
/** Translates input text, returning translated text. */
list(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** The customization id for translate */
cid?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/**
* The format of the source text, in either HTML (default) or plain-text. A
* value of "html" indicates HTML and a value of "text" indicates plain-text.
*/
format?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/**
* The `model` type requested for this translation. Valid values are
* listed in public documentation.
*/
model?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/**
* The input text to translate. Repeat this parameter to perform translation
* operations on multiple text inputs.
*/
q: string;
/**
* Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* Overrides userIp if both are provided.
*/
quotaUser?: string;
/**
* The language of the source text, set to one of the language codes listed in
* Language Support. If the source language is not specified, the API will
* attempt to identify the source language automatically and return it within
* the response.
*/
source?: string;
/**
* The language to use for translation of the input text, set to one of the
* language codes listed in Language Support.
*/
target: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<TranslationsListResponse>;
/** Translates input text, returning translated text. */
translate(request: {
/** V1 error format. */
"$.xgafv"?: string;
/** OAuth access token. */
access_token?: string;
/** Data format for response. */
alt?: string;
/** OAuth bearer token. */
bearer_token?: string;
/** JSONP */
callback?: string;
/** Selector specifying which fields to include in a partial response. */
fields?: string;
/** API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. */
key?: string;
/** OAuth 2.0 token for the current user. */
oauth_token?: string;
/** Pretty-print response. */
pp?: boolean;
/** Returns response with indentations and line breaks. */
prettyPrint?: boolean;
/**
* Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* Overrides userIp if both are provided.
*/
quotaUser?: string;
/** Legacy upload protocol for media (e.g. "media", "multipart"). */
uploadType?: string;
/** Upload protocol for media (e.g. "raw", "multipart"). */
upload_protocol?: string;
}): Request<TranslationsListResponse>;
}
}
} | the_stack |
import {
DinoRouter,
IRouterConfig,
ObjectUtility,
DinoParser,
DinoUtility
} from '../../index';
describe('modules.router.dino.router.spec', () => {
it('static_create.invoke_constructor', () => {
let config: IRouterConfig = {
routerCb: () => null
} as any;
let router = DinoRouter.create(config);
expect(router instanceof DinoRouter).toBeTruthy();
});
it('resolve.enableTaskContext_false', () => {
let config: IRouterConfig = {
diContainer: {
resolve: m => {
expect(m).toBe(String);
return 'resolved';
}
},
enableTaskContext: false,
routerCb: () => null
} as any;
spyOn(ObjectUtility, 'replaceObjectReferences')
.and.callFake(() => 'replaced');
let dinoRouter = new DinoRouter(config);
let o = dinoRouter.resolve(String, { context: 'test' });
expect(o).toBe('resolved');
expect(ObjectUtility.replaceObjectReferences).toHaveBeenCalledTimes(0);
});
it('resolve.enableTaskContext_true', () => {
let middleware;
let config: IRouterConfig = {
diContainer: {
resolve: m => {
middleware = m;
return 'resolved';
}
},
enableTaskContext: true,
routerCb: () => null
} as any;
spyOn(ObjectUtility, 'replaceObjectReferences')
.and.callFake(() => 'replaced');
let dinoRouter = new DinoRouter(config);
let o = dinoRouter.resolve(String, { context: 'test' });
expect(o).toBe('replaced');
expect(middleware).toBe(String);
});
it('expressRouter.return_this.router', () => {
let middleware;
let config: IRouterConfig = { routerCb: () => 45 } as any;
let dinoRouter = new DinoRouter(config);
let o = dinoRouter.expressRouter();
expect(o).toBe(45);
});
it('registerMiddleware.when_isSyncMiddleWare', () => {
let invoked = false;
let callback;
let request = { path: 'test' };
let config: IRouterConfig = {
routerCb: () => {
return { use: cb => callback = cb };
}
} as any;
let provider = { useClass: Function, data: 'sampledata' };
let res = { locals: { dino: 45 } };
spyOn(DinoParser, 'parseMiddlewareProvider').and.callFake(a => a);
spyOn(DinoUtility, 'isSyncMiddleWare').and.callFake(() => true);
let dinoRouter = new DinoRouter(config);
// Spy on the same object method itself
spyOn(dinoRouter, 'resolve')
.and.callFake((middleware, dino) => {
expect(middleware).toBe(Function);
expect(dino).toBe(res.locals.dino);
return {
invoke: (req, resp, next, data) => {
// Should invoke all expects
expect(req).toBe(request);
expect(resp).toBe(res);
expect(next()).toBe('invoked');
expect(data).toBe(provider.data);
invoked = true;
}
};
});
dinoRouter.registerMiddleware(provider);
callback(request, res, () => 'invoked');
expect(DinoUtility.isSyncMiddleWare).toHaveBeenCalledTimes(1);
expect(DinoParser.parseMiddlewareProvider).toHaveBeenCalledTimes(1);
expect(dinoRouter.resolve).toHaveBeenCalledTimes(1);
expect(invoked).toBeTruthy();
});
it('registerMiddleware.when_isAsyncMiddleWare', async () => {
let callback;
let request = { path: 'test' };
let invoked = false;
let config: IRouterConfig = {
routerCb: () => {
return { use: cb => callback = cb };
}
} as any;
let provider = { useClass: Function, data: 'sampledata' };
let res = { locals: { dino: 45 } };
spyOn(DinoParser, 'parseMiddlewareProvider').and.callFake(a => a);
spyOn(DinoUtility, 'isSyncMiddleWare').and.callFake(() => false);
spyOn(DinoUtility, 'isAsyncMiddleWare').and.callFake(() => true);
let dinoRouter = new DinoRouter(config);
// Spy on the same object method itself
spyOn(dinoRouter, 'resolve')
.and.callFake((middleware, dino) => {
expect(middleware).toBe(Function);
expect(dino).toBe(res.locals.dino);
return {
invoke: (req, resp, next, data) => {
// Should invoke all expects
expect(req).toBe(request);
expect(resp).toBe(res);
expect(next()).toBe('invoked');
expect(data).toBe(provider.data);
invoked = true;
}
};
});
dinoRouter.registerMiddleware(provider);
await callback(request, res, () => 'invoked');
expect(DinoUtility.isSyncMiddleWare).toHaveBeenCalledTimes(1);
expect(DinoUtility.isAsyncMiddleWare).toHaveBeenCalledTimes(1);
expect(DinoParser.parseMiddlewareProvider).toHaveBeenCalledTimes(1);
expect(dinoRouter.resolve).toHaveBeenCalledTimes(1);
expect(invoked).toBeTruthy();
});
it('registerMiddleware.throwsError_when_isAsyncMiddleWare', async () => {
let callback;
let err;
let request = { path: 'test' };
let config: IRouterConfig = {
routerCb: () => {
return { use: cb => callback = cb };
}
} as any;
let provider = { useClass: Function, data: 'sampledata' };
let res = { locals: { dino: 45 } };
spyOn(DinoParser, 'parseMiddlewareProvider').and.callFake(a => a);
spyOn(DinoUtility, 'isSyncMiddleWare').and.callFake(() => false);
spyOn(DinoUtility, 'isAsyncMiddleWare').and.callFake(() => true);
let dinoRouter = new DinoRouter(config);
// Spy on the same object method itself
spyOn(dinoRouter, 'resolve')
.and.callFake((middleware, dino) => {
expect(middleware).toBe(Function);
expect(dino).toBe(res.locals.dino);
return {
invoke: (req, resp, next, data) => {
expect(req).toBe(request);
expect(resp).toBe(res);
expect(data).toBe(provider.data);
throw new Error('TestError');
}
};
});
dinoRouter.registerMiddleware(provider);
await callback(request, res, e => err = e);
expect(DinoUtility.isSyncMiddleWare).toHaveBeenCalledTimes(1);
expect(DinoUtility.isAsyncMiddleWare).toHaveBeenCalledTimes(1);
expect(DinoParser.parseMiddlewareProvider).toHaveBeenCalledTimes(1);
expect(dinoRouter.resolve).toHaveBeenCalledTimes(1);
expect(err).toEqual(new Error('TestError'));
});
it('registerMiddleware.when_not_a_middleware', () => {
let callback;
let config: IRouterConfig = {
routerCb: () => {
return { use: cb => callback = cb };
}
} as any;
spyOn(DinoParser, 'parseMiddlewareProvider').and.callFake(a => a);
spyOn(DinoUtility, 'isSyncMiddleWare').and.callFake(() => false);
spyOn(DinoUtility, 'isAsyncMiddleWare').and.callFake(() => false);
let dinoRouter = new DinoRouter(config);
dinoRouter.registerMiddleware(String);
expect(DinoUtility.isSyncMiddleWare).toHaveBeenCalledTimes(1);
expect(DinoUtility.isAsyncMiddleWare).toHaveBeenCalledTimes(1);
expect(DinoParser.parseMiddlewareProvider).toHaveBeenCalledTimes(1);
expect(callback).toBeUndefined();
});
it('registerBeginActionFilter.when_isSyncActionFilter', () => {
let invoked = false;
let callback;
let request = { path: 'test' };
let config: IRouterConfig = {
routerCb: () => {
return { use: cb => callback = cb };
}
} as any;
let provider = { useClass: Function, data: 'sampledata' };
let res = { locals: { dino: 45 } };
spyOn(DinoParser, 'parseMiddlewareProvider').and.callFake(a => a);
spyOn(DinoUtility, 'isSyncActionFilter').and.callFake(() => true);
let dinoRouter = new DinoRouter(config);
// Spy on the same object method itself
spyOn(dinoRouter, 'resolve')
.and.callFake((middleware, dino) => {
expect(middleware).toBe(Function);
expect(dino).toBe(res.locals.dino);
return {
beforeExecution: (req, resp, next, data) => {
expect(req).toBe(request);
expect(resp).toBe(res);
expect(next()).toBe('invoked');
expect(data).toBe(provider.data);
invoked = true;
}
};
});
dinoRouter.registerBeginActionFilter(provider);
callback(request, res, () => 'invoked');
expect(DinoUtility.isSyncActionFilter).toHaveBeenCalledTimes(1);
expect(DinoParser.parseMiddlewareProvider).toHaveBeenCalledTimes(1);
expect(invoked).toBeTruthy();
});
it('registerBeginActionFilter.when_isAsyncActionFilter', async () => {
let callback;
let invoked = false;
let request = { path: 'test' };
let config: IRouterConfig = {
routerCb: () => {
return { use: cb => callback = cb };
}
} as any;
let provider = { useClass: Function, data: 'sampledata' };
let res = { locals: { dino: 45 } };
spyOn(DinoParser, 'parseMiddlewareProvider').and.callFake(a => a);
spyOn(DinoUtility, 'isSyncActionFilter').and.callFake(() => false);
spyOn(DinoUtility, 'isAsyncActionFilter').and.callFake(() => true);
let dinoRouter = new DinoRouter(config);
// Spy on the same object method itself
spyOn(dinoRouter, 'resolve')
.and.callFake((middleware, dino) => {
expect(middleware).toBe(Function);
expect(dino).toBe(res.locals.dino);
return {
beforeExecution: (req, resp, next, data) => {
expect(req).toBe(request);
expect(resp).toBe(res);
expect(next()).toBe('invoked');
expect(data).toBe(provider.data);
invoked = true;
}
};
});
dinoRouter.registerBeginActionFilter(provider);
await callback(request, res, () => 'invoked');
expect(DinoUtility.isSyncActionFilter).toHaveBeenCalledTimes(1);
expect(DinoUtility.isAsyncActionFilter).toHaveBeenCalledTimes(1);
expect(DinoParser.parseMiddlewareProvider).toHaveBeenCalledTimes(1);
expect(invoked).toBeTruthy();
});
it('registerBeginActionFilter.throwsError_when_isAsyncActionFilter', async () => {
let callback;
let err;
let request = { path: 'test' };
let config: IRouterConfig = {
routerCb: () => {
return { use: cb => callback = cb };
}
} as any;
let provider = { useClass: Function, data: 'sampledata' };
let res = { locals: { dino: 45 } };
spyOn(DinoParser, 'parseMiddlewareProvider').and.callFake(a => a);
spyOn(DinoUtility, 'isSyncActionFilter').and.callFake(() => false);
spyOn(DinoUtility, 'isAsyncActionFilter').and.callFake(() => true);
let dinoRouter = new DinoRouter(config);
// Spy on the same object method itself
spyOn(dinoRouter, 'resolve')
.and.callFake((middleware, dino) => {
expect(middleware).toBe(Function);
expect(dino).toBe(res.locals.dino);
return {
beforeExecution: (req, resp, next, data) => {
expect(req).toBe(request);
expect(resp).toBe(res);
// expect(c) not invoked because it should invoke next(err)
expect(data).toBe(provider.data);
throw new Error('TestError');
}
};
});
dinoRouter.registerBeginActionFilter(provider);
await callback(request, res, e => err = e);
expect(DinoUtility.isSyncActionFilter).toHaveBeenCalledTimes(1);
expect(DinoUtility.isAsyncActionFilter).toHaveBeenCalledTimes(1);
expect(DinoParser.parseMiddlewareProvider).toHaveBeenCalledTimes(1);
expect(err).toEqual(new Error('TestError'));
});
it('registerBeginActionFilter.when_not_an_actionFilter', () => {
let callback;
let config: IRouterConfig = {
routerCb: () => {
return { use: cb => callback = cb };
}
} as any;
spyOn(DinoParser, 'parseMiddlewareProvider').and.callFake(a => a);
spyOn(DinoUtility, 'isSyncActionFilter').and.callFake(() => false);
spyOn(DinoUtility, 'isAsyncActionFilter').and.callFake(() => false);
let dinoRouter = new DinoRouter(config);
dinoRouter.registerBeginActionFilter(String);
expect(DinoUtility.isSyncActionFilter).toHaveBeenCalledTimes(1);
expect(DinoUtility.isAsyncActionFilter).toHaveBeenCalledTimes(1);
expect(DinoParser.parseMiddlewareProvider).toHaveBeenCalledTimes(1);
expect(callback).toBeUndefined();
});
it('registerAfterActionFilter.when_isSyncActionFilter', () => {
let invoked = false;
let callback;
let request = { path: 'test' };
let config: IRouterConfig = {
routerCb: () => {
return { use: cb => callback = cb };
}
} as any;
let res = { locals: { dino: { result: 45 } } };
let provider = { useClass: Function, data: 'sampledata' };
spyOn(DinoParser, 'parseMiddlewareProvider').and.callFake(a => a);
spyOn(DinoUtility, 'isSyncActionFilter').and.callFake(() => true);
let dinoRouter = new DinoRouter(config);
// Spy on the same object method itself
spyOn(dinoRouter, 'resolve')
.and.callFake((middleware, dino) => {
expect(middleware).toBe(Function);
expect(dino).toBe(res.locals.dino);
return {
afterExecution: (req, resp, next, result, data) => {
expect(req).toBe(request);
expect(res).toBe(res);
expect(next()).toBe('invoked');
expect(result).toBe(res.locals.dino.result);
expect(data).toBe(provider.data);
invoked = true;
}
};
});
dinoRouter.registerAfterActionFilter(provider);
callback(request, res, () => 'invoked');
expect(DinoUtility.isSyncActionFilter).toHaveBeenCalledTimes(1);
expect(DinoParser.parseMiddlewareProvider).toHaveBeenCalledTimes(1);
expect(invoked).toBeTruthy();
});
it('registerAfterActionFilter.when_isAsyncActionFilter', async () => {
let callback;
let request = { path: 'test' };
let invoked = false;
let config: IRouterConfig = {
routerCb: () => {
return { use: cb => callback = cb };
}
} as any;
let provider = { useClass: Function, data: 'sampledata' };
let res = { locals: { dino: { result: 45 } } };
spyOn(DinoParser, 'parseMiddlewareProvider').and.callFake(a => a);
spyOn(DinoUtility, 'isSyncActionFilter').and.callFake(() => false);
spyOn(DinoUtility, 'isAsyncActionFilter').and.callFake(() => true);
let dinoRouter = new DinoRouter(config);
// Spy on the same object method itself
spyOn(dinoRouter, 'resolve')
.and.callFake((middleware, dino) => {
expect(middleware).toBe(Function);
expect(dino).toBe(res.locals.dino);
return {
afterExecution: (req, resp, next, result, data) => {
expect(req).toBe(request);
expect(resp).toBe(res);
expect(next()).toBe('invoked');
expect(result).toBe(res.locals.dino.result);
expect(data).toBe(provider.data);
invoked = true;
}
};
});
dinoRouter.registerAfterActionFilter(provider);
await callback(request, res, () => 'invoked');
expect(DinoUtility.isSyncActionFilter).toHaveBeenCalledTimes(1);
expect(DinoUtility.isAsyncActionFilter).toHaveBeenCalledTimes(1);
expect(DinoParser.parseMiddlewareProvider).toHaveBeenCalledTimes(1);
expect(invoked).toBeTruthy();
});
it('registerAfterActionFilter.throwsError_when_isAsyncActionFilter', async () => {
let callback;
let err;
let request = { path: 'test' };
let config: IRouterConfig = {
routerCb: () => {
return { use: cb => callback = cb };
}
} as any;
let provider = { useClass: Function, data: 'sampledata' };
let res = { locals: { dino: { result: 45 } } };
spyOn(DinoParser, 'parseMiddlewareProvider').and.callFake(a => a);
spyOn(DinoUtility, 'isSyncActionFilter').and.callFake(() => false);
spyOn(DinoUtility, 'isAsyncActionFilter').and.callFake(() => true);
let dinoRouter = new DinoRouter(config);
// Spy on the same object method itself
spyOn(dinoRouter, 'resolve')
.and.callFake((middleware, dino) => {
expect(middleware).toBe(Function);
expect(dino).toBe(res.locals.dino);
return {
afterExecution: (req, resp, next, result, data) => {
expect(req).toBe(request);
expect(resp).toBe(res);
// expect(c) not invoked because it should invoke next(err)
expect(result).toBe(res.locals.dino.result);
expect(data).toBe(provider.data);
throw new Error('TestError');
}
};
});
dinoRouter.registerAfterActionFilter(provider);
await callback(request, res, e => err = e);
expect(DinoUtility.isSyncActionFilter).toHaveBeenCalledTimes(1);
expect(DinoUtility.isAsyncActionFilter).toHaveBeenCalledTimes(1);
expect(DinoParser.parseMiddlewareProvider).toHaveBeenCalledTimes(1);
expect(err).toEqual(new Error('TestError'));
});
it('registerAfterActionFilter.when_not_an_actionFilter', () => {
let callback;
let config: IRouterConfig = {
routerCb: () => {
return { use: cb => callback = cb };
}
} as any;
spyOn(DinoParser, 'parseMiddlewareProvider').and.callFake(a => a);
spyOn(DinoUtility, 'isSyncActionFilter').and.callFake(() => false);
spyOn(DinoUtility, 'isAsyncActionFilter').and.callFake(() => false);
let dinoRouter = new DinoRouter(config);
dinoRouter.registerAfterActionFilter(String);
expect(DinoUtility.isSyncActionFilter).toHaveBeenCalledTimes(1);
expect(DinoUtility.isAsyncActionFilter).toHaveBeenCalledTimes(1);
expect(DinoParser.parseMiddlewareProvider).toHaveBeenCalledTimes(1);
expect(callback).toBeUndefined();
});
it('registerResultFilter.when_isSyncResultFilter', () => {
let invoked = false;
let callback;
let request = { path: 'test' };
let config: IRouterConfig = {
routerCb: () => {
return { use: cb => callback = cb };
}
} as any;
let res = { locals: { dino: { result: 45 } } };
let provider = { useClass: Function, data: 'sampledata' };
spyOn(DinoParser, 'parseMiddlewareProvider').and.callFake(a => a);
spyOn(DinoUtility, 'isSyncResultFilter').and.callFake(() => true);
let dinoRouter = new DinoRouter(config);
// Spy on the same object method itself
spyOn(dinoRouter, 'resolve')
.and.callFake((middleware, dino) => {
expect(middleware).toBe(Function);
expect(dino).toBe(res.locals.dino);
return {
invoke: (req, resp, next, result, data) => {
expect(req).toBe(request);
expect(res).toBe(res);
expect(next()).toBe('invoked');
expect(result).toBe(res.locals.dino.result);
expect(data).toBe(provider.data);
invoked = true;
}
};
});
dinoRouter.registerResultFilter(provider);
callback(request, res, () => 'invoked');
expect(DinoUtility.isSyncResultFilter).toHaveBeenCalledTimes(1);
expect(DinoParser.parseMiddlewareProvider).toHaveBeenCalledTimes(1);
expect(invoked).toBeTruthy();
});
it('registerResultFilter.when_isAsyncResultFilter', async () => {
let callback;
let request = { path: 'test' };
let invoked = false;
let config: IRouterConfig = {
routerCb: () => {
return { use: cb => callback = cb };
}
} as any;
let provider = { useClass: Function, data: 'sampledata' };
let res = { locals: { dino: { result: 45 } } };
spyOn(DinoParser, 'parseMiddlewareProvider').and.callFake(a => a);
spyOn(DinoUtility, 'isSyncResultFilter').and.callFake(() => false);
spyOn(DinoUtility, 'isAsyncResultFilter').and.callFake(() => true);
let dinoRouter = new DinoRouter(config);
// Spy on the same object method itself
spyOn(dinoRouter, 'resolve')
.and.callFake((middleware, dino) => {
expect(middleware).toBe(Function);
expect(dino).toBe(res.locals.dino);
return {
invoke: (req, resp, next, result, data) => {
expect(req).toBe(request);
expect(resp).toBe(res);
expect(next()).toBe('invoked');
expect(result).toBe(res.locals.dino.result);
expect(data).toBe(provider.data);
invoked = true;
}
};
});
dinoRouter.registerResultFilter(provider);
await callback(request, res, () => 'invoked');
expect(DinoUtility.isSyncResultFilter).toHaveBeenCalledTimes(1);
expect(DinoUtility.isAsyncResultFilter).toHaveBeenCalledTimes(1);
expect(DinoParser.parseMiddlewareProvider).toHaveBeenCalledTimes(1);
expect(invoked).toBeTruthy();
});
it('registerResultFilter.throwsError_when_isAsyncResultFilter', async () => {
let callback;
let err;
let request = { path: 'test' };
let config: IRouterConfig = {
routerCb: () => {
return { use: cb => callback = cb };
}
} as any;
let provider = { useClass: Function, data: 'sampledata' };
let res = { locals: { dino: { result: 45 } } };
spyOn(DinoParser, 'parseMiddlewareProvider').and.callFake(a => a);
spyOn(DinoUtility, 'isSyncResultFilter').and.callFake(() => false);
spyOn(DinoUtility, 'isAsyncResultFilter').and.callFake(() => true);
let dinoRouter = new DinoRouter(config);
// Spy on the same object method itself
spyOn(dinoRouter, 'resolve')
.and.callFake((middleware, dino) => {
expect(middleware).toBe(Function);
expect(dino).toBe(res.locals.dino);
return {
invoke: (req, resp, next, result, data) => {
expect(req).toBe(request);
expect(resp).toBe(res);
// expect(c) not invoked because it should invoke next(err)
expect(result).toBe(res.locals.dino.result);
expect(data).toBe(provider.data);
throw new Error('TestError');
}
};
});
dinoRouter.registerResultFilter(provider);
await callback(request, res, e => err = e);
expect(DinoUtility.isSyncResultFilter).toHaveBeenCalledTimes(1);
expect(DinoUtility.isAsyncResultFilter).toHaveBeenCalledTimes(1);
expect(DinoParser.parseMiddlewareProvider).toHaveBeenCalledTimes(1);
expect(err).toEqual(new Error('TestError'));
});
it('registerResultFilter.when_not_an_resultFilter', () => {
let callback;
let config: IRouterConfig = {
routerCb: () => {
return { use: cb => callback = cb };
}
} as any;
spyOn(DinoParser, 'parseMiddlewareProvider').and.callFake(a => a);
spyOn(DinoUtility, 'isSyncResultFilter').and.callFake(() => false);
spyOn(DinoUtility, 'isAsyncResultFilter').and.callFake(() => false);
let dinoRouter = new DinoRouter(config);
dinoRouter.registerResultFilter(String);
expect(DinoUtility.isSyncResultFilter).toHaveBeenCalledTimes(1);
expect(DinoUtility.isAsyncResultFilter).toHaveBeenCalledTimes(1);
expect(DinoParser.parseMiddlewareProvider).toHaveBeenCalledTimes(1);
expect(callback).toBeUndefined();
});
it('registerExceptionFilter.when_isSyncExceptionFilter', () => {
let invoked = false;
let callback;
let request = { path: 'test' };
let config: IRouterConfig = { routerCb: () => null } as any;
let ruri = 'sampleuri';
let app = {
use: (uri, cb) => {
expect(uri).toBe(ruri);
callback = cb;
}
};
let res = { locals: { dino: 45 } };
let provider = { useClass: Function, data: 'sampledata' };
spyOn(DinoParser, 'parseMiddlewareProvider').and.callFake(a => a);
spyOn(DinoUtility, 'isSyncExceptionFilter').and.callFake(() => true);
let dinoRouter = new DinoRouter(config);
// Spy on the same object method itself
spyOn(dinoRouter, 'resolve')
.and.callFake((middleware, dino) => {
expect(middleware).toBe(Function);
expect(dino).toBe(res.locals.dino);
return {
invoke: (err, req, resp, next) => {
expect(err).toEqual(new Error('TestError'));
expect(req).toBe(request);
expect(resp).toBe(res);
expect(next()).toBe('invoked');
invoked = true;
}
};
});
dinoRouter.registerExceptionFilter(app as any, ruri, provider);
callback(new Error('TestError'), request, res, () => 'invoked');
expect(DinoUtility.isSyncExceptionFilter).toHaveBeenCalledTimes(1);
expect(DinoParser.parseMiddlewareProvider).toHaveBeenCalledTimes(1);
expect(invoked).toBeTruthy();
});
it('registerExceptionFilter.when_isAsyncExceptionFilter', async () => {
let invoked = false;
let callback;
let request = { path: 'test' };
let config: IRouterConfig = { routerCb: () => null } as any;
let ruri = 'sampleuri';
let app = {
use: (uri, cb) => {
expect(uri).toBe(ruri);
callback = cb;
}
};
let res = { locals: { dino: 45 } };
let provider = { useClass: Function, data: 'sampledata' };
spyOn(DinoParser, 'parseMiddlewareProvider').and.callFake(a => a);
spyOn(DinoUtility, 'isSyncExceptionFilter').and.callFake(() => false);
spyOn(DinoUtility, 'isAsyncExceptionFilter').and.callFake(() => true);
let dinoRouter = new DinoRouter(config);
// Spy on the same object method itself
spyOn(dinoRouter, 'resolve')
.and.callFake((middleware, dino) => {
expect(middleware).toBe(Function);
expect(dino).toBe(res.locals.dino);
return {
invoke: (err, req, resp, next) => {
expect(err).toEqual(new Error('TestError'));
expect(req).toBe(request);
expect(resp).toBe(res);
expect(next()).toBe('invoked');
invoked = true;
}
};
});
dinoRouter.registerExceptionFilter(app as any, ruri, provider);
await callback(new Error('TestError'), request, res, () => 'invoked');
expect(DinoUtility.isSyncExceptionFilter).toHaveBeenCalledTimes(1);
expect(DinoUtility.isAsyncExceptionFilter).toHaveBeenCalledTimes(1);
expect(DinoParser.parseMiddlewareProvider).toHaveBeenCalledTimes(1);
expect(invoked).toBeTruthy();
});
it('registerExceptionFilter.throwsError_when_isAsyncExceptionFilter', async () => {
let invoked = false;
let callback;
let request = { path: 'test' };
let err;
let config: IRouterConfig = { routerCb: () => null } as any;
let ruri = 'sampleuri';
let app = {
use: (uri, cb) => {
expect(uri).toBe(ruri);
callback = cb;
}
};
let res = { locals: { dino: 45 } };
let provider = { useClass: Function, data: 'sampledata' };
spyOn(DinoParser, 'parseMiddlewareProvider').and.callFake(a => a);
spyOn(DinoUtility, 'isSyncExceptionFilter').and.callFake(() => false);
spyOn(DinoUtility, 'isAsyncExceptionFilter').and.callFake(() => true);
let dinoRouter = new DinoRouter(config);
// Spy on the same object method itself
spyOn(dinoRouter, 'resolve')
.and.callFake((middleware, dino) => {
expect(middleware).toBe(Function);
expect(dino).toBe(res.locals.dino);
return {
invoke: (err, req, resp, next) => {
expect(err).toEqual(new Error('TestError'));
expect(req).toBe(request);
expect(resp).toBe(res);
throw new Error('TestErrorThrown');
}
};
});
dinoRouter.registerExceptionFilter(app as any, ruri, provider);
await callback(new Error('TestError'), request, res, e => err = e);
expect(DinoUtility.isSyncExceptionFilter).toHaveBeenCalledTimes(1);
expect(DinoUtility.isAsyncExceptionFilter).toHaveBeenCalledTimes(1);
expect(DinoParser.parseMiddlewareProvider).toHaveBeenCalledTimes(1);
expect(err).toEqual(new Error('TestErrorThrown'));
});
it('registerExceptionFilter.when_not_an_exceptionfilter', () => {
let callback;
let config: IRouterConfig = { routerCb: () => null } as any;
let app = {
use: (uri, cb) => {
callback = cb;
}
};
spyOn(DinoParser, 'parseMiddlewareProvider').and.callFake(a => a);
spyOn(DinoUtility, 'isSyncExceptionFilter').and.callFake(() => false);
spyOn(DinoUtility, 'isAsyncExceptionFilter').and.callFake(() => false);
let dinoRouter = new DinoRouter(config);
dinoRouter.registerExceptionFilter(app as any, 'test', String);
expect(DinoUtility.isSyncExceptionFilter).toHaveBeenCalledTimes(1);
expect(DinoUtility.isAsyncExceptionFilter).toHaveBeenCalledTimes(1);
expect(DinoParser.parseMiddlewareProvider).toHaveBeenCalledTimes(1);
expect(callback).toBeUndefined();
});
it('registerExceptionFilters.verify_exceptionFilters', () => {
let config: IRouterConfig = { routerCb: () => null } as any;
let dinoRouter = new DinoRouter(config);
let app = { express: true };
let provider = [
{ useClass: Function, data: 'sampledata' },
{ useClass: String, data: 'sampledata' }
];
let arr = [];
// Spy on the same object method itself
spyOn(dinoRouter, 'registerExceptionFilter')
.and.callFake((lapp, uri, filter) => {
expect(lapp).toBe(app);
expect(uri).toBe('testUri');
arr.push(filter.useClass);
});
dinoRouter.registerExceptionFilters(app as any, 'testUri', provider);
expect(arr.includes(Function)).toBeTruthy();
expect(arr.includes(String)).toBeTruthy();
});
it('registerMiddlewares.verify_middlewares', () => {
let config: IRouterConfig = { routerCb: () => null } as any;
let dinoRouter = new DinoRouter(config);
let app = { express: true };
let provider = [
{ useClass: Number, data: 'sampledata' },
{ useClass: String, data: 'sampledata' }
];
let arr = [];
// Spy on the same object method itself
spyOn(dinoRouter, 'registerMiddleware')
.and.callFake(filter => {
arr.push(filter.useClass);
});
dinoRouter.registerMiddlewares(provider);
expect(arr.includes(Number)).toBeTruthy();
expect(arr.includes(String)).toBeTruthy();
});
it('registerBeginActionFilters.verify_beginActionFilters', () => {
let config: IRouterConfig = { routerCb: () => null } as any;
let dinoRouter = new DinoRouter(config);
let app = { express: true };
let provider = [
{ useClass: Object, data: 'sampledata' },
{ useClass: String, data: 'sampledata' }
];
let arr = [];
// Spy on the same object method itself
spyOn(dinoRouter, 'registerBeginActionFilter')
.and.callFake(filter => {
arr.push(filter.useClass);
});
dinoRouter.registerBeginActionFilters(provider);
expect(arr.includes(Object)).toBeTruthy();
expect(arr.includes(String)).toBeTruthy();
});
it('registerAfterActionFilters.verify_afterActionFilters', () => {
let config: IRouterConfig = { routerCb: () => null } as any;
let dinoRouter = new DinoRouter(config);
let app = { express: true };
let provider = [
{ useClass: Object, data: 'sampledata' },
{ useClass: Array, data: 'sampledata' }
];
let arr = [];
// Spy on the same object method itself
spyOn(dinoRouter, 'registerAfterActionFilter')
.and.callFake(filter => {
arr.push(filter.useClass);
});
dinoRouter.registerAfterActionFilters(provider);
expect(arr.includes(Object)).toBeTruthy();
expect(arr.includes(Array)).toBeTruthy();
});
it('registerResultFilters.verify_resultFilters', () => {
let config: IRouterConfig = { routerCb: () => null } as any;
let dinoRouter = new DinoRouter(config);
let app = { express: true };
let provider = [
{ useClass: Object, data: 'sampledata' },
{ useClass: Function, data: 'sampledata' }
];
let arr = [];
// Spy on the same object method itself
spyOn(dinoRouter, 'registerResultFilter')
.and.callFake(filter => {
arr.push(filter.useClass);
});
dinoRouter.registerResultFilters(provider);
expect(arr.includes(Object)).toBeTruthy();
expect(arr.includes(Function)).toBeTruthy();
});
}); | the_stack |
'use strict'
/// <reference path="../../client/typings/ycm.d.ts" />
import {
IPCMessageReader, IPCMessageWriter,
createConnection, IConnection, TextDocumentSyncKind,
TextDocuments, TextDocument, Diagnostic, DiagnosticSeverity,
InitializeParams, InitializeResult, TextDocumentPositionParams,
CompletionItem, CompletionItemKind, Hover, SignatureHelp,
Command, CodeActionParams, NotificationType
} from 'vscode-languageserver'
import Ycm, {Settings} from './ycm'
import * as _ from 'lodash'
import {logger, loggerInit, crossPlatformUri} from './utils'
process.on('uncaughtException', err => {
logger('!!!uncaughtException!!!', err)
})
// Create a connection for the server. The connection uses Node's IPC as a transport
let connection: IConnection = createConnection(new IPCMessageReader(process), new IPCMessageWriter(process))
// Create a simple text document manager. The text document manager
// supports full document sync only
let documents: TextDocuments = new TextDocuments()
// Make the text document manager listen on the connection
// for open, change and close text document events
documents.listen(connection)
// After the server has started the client sends an initilize request. The server receives
// in the passed params the rootPath of the workspace plus the client capabilites.
let workspaceRoot: string
let workspaceConfiguration: Settings
connection.onInitialize((params): InitializeResult => {
workspaceRoot = crossPlatformUri(params.rootUri)
return {
capabilities: {
// Tell the client that the server works in FULL text document sync mode
textDocumentSync: documents.syncKind,
// Tell the client that the server support code complete
completionProvider: {
resolveProvider: true,
triggerCharacters: ['.', ':', '<', '"', '=', '/', '>', '*', '&']
},
hoverProvider: true,
definitionProvider: true,
// signatureHelpProvider: {
// triggerCharacters: ['(']
// }
codeActionProvider: true
}
}
})
connection.onCodeAction(async (param) => {
logger('onCodeAction', JSON.stringify(param))
try {
const ycm = await getYcm()
const fixs = await ycm.fixIt(param.textDocument.uri, param.range.start, documents)
return fixs.map(it => {
return {
title: `Fix: ${it.text}`,
command: 'ycm.FixIt',
arguments: [it]
}
}) as Command[]
} catch (e) {
logger('onCodeAction', e)
}
return []
})
connection.onNotification<YcmFixIt, string>(new NotificationType<YcmFixIt, string>('FixIt'), async (args) => {
logger('On FixIt', JSON.stringify(args))
})
connection.onHover(async (event): Promise<Hover> => {
const ycm = await getYcm()
try {
return await ycm.getDocHover(event.textDocument.uri, event.position, documents, workspaceConfiguration.ycmd.use_imprecise_get_type)
}
catch (getDocErr) {
try {
// libclang does not return anything for getDoc if there is no doxygen
// documentation for an identifier, so we next try to see if we
// can get a member function signature from a completion (since getType()
// returns an unhelpful "<bound member function>" for them). This isn't
// ideal since we won't be able to tell exactly which overload is being
// called.
let matchingCompletion = await ycm.getExactMatchingCompletion(event.textDocument.uri, event.position, documents)
if ( matchingCompletion ) {
return {
contents: {
language: documents.get(event.textDocument.uri).languageId,
// we put the signature at the top of the completion detail
value: matchingCompletion.documentation
}
} as Hover
}
else {
// We're either on a non-member function or a variable, so just use getType().
// We called getDoc above with workspaceConfiguration.ycmd.use_imprecise_get_type
// so, no reason to have ycmd parse the file again.
return await ycm.getType(event.textDocument.uri, event.position, documents, true)
}
}
catch (err) {
logger(`onHover error`, err)
}
}
})
connection.onDefinition(async (event) => {
const ycm = await getYcm()
try {
return await ycm.goTo(event.textDocument.uri, event.position, documents)
} catch (err) {
logger(`onDefinition error`, err)
}
})
// The content of a text document has changed. This event is emitted
// when the text document first opened or when its content has changed.
documents.onDidChangeContent(async (change) => {
logger(`onDidChangeContent ${JSON.stringify(change.document.uri)}`)
const ycm = await getYcm()
})
// The settings interface describe the server relevant settings part
async function getYcm(): Promise<Ycm> {
if (!workspaceRoot || !workspaceConfiguration)
return await new Promise<Ycm>((resolve, reject) => setTimeout(() => getYcm(), 100))
try {
return await Ycm.getInstance(workspaceRoot, workspaceConfiguration, connection.window)
} catch (err) {
logger('getYcm error', err)
connection.window.showErrorMessage(`Ycm startup failed. Please check your ycmd or python path. Detail: ${err.message || err}`)
}
}
async function getIssues(document: TextDocument) {
const ycm = await getYcm()
connection.sendDiagnostics({
uri: document.uri,
diagnostics: await ycm.readyToParse(document.uri, documents)
})
}
// connection.onSignatureHelp((event) => {
// logger(`onSignatureHelp: ${JSON.stringify(event)}`)
// return {
// signatures: [{
// label: 'test1',
// documentation: ' test1 test1 test1 test1 test1 test1 test1 test1',
// parameters: [{
// label: 'string',
// documentation: 'string string string'
// }, {
// label: 'int',
// documentation: 'int int int'
// }]
// }, {
// label: 'test2',
// documentation: ' test2 test2 test2 test2 test2 test2 test2 test2',
// parameters: [{
// label: 'int',
// documentation: 'string string string'
// }, {
// label: 'string',
// documentation: 'int int int'
// }]
// }]
// } as SignatureHelp
// // try {
// // // const ycm = await getYcm()
// // // await ycm.getDocQuick(event.textDocument.uri, event.position, documents)
// // } catch (err) {
// // logger('onSignatureHelp error', err)
// // }
// })
// The settings have changed. Is send on server activation
// as well.
connection.onDidChangeConfiguration(async (change) => {
let settings = <Settings>change.settings
loggerInit(settings.ycmd.debug)
logger(`onDidChangeConfiguration settings`, JSON.stringify(settings))
try {
ensureValidConfiguration(settings)
workspaceConfiguration = settings
} catch (err) {
connection.window.showErrorMessage(`[Ycm] ${err.message || err}`)
}
await getYcm()
})
function ensureValidConfiguration(settings: Settings) {
if (!settings.ycmd || !settings.ycmd.path)
throw new Error('Invalid ycm path')
}
documents.onDidOpen(async (event) => {
logger(`onDidOpen`, event.document.uri)
const ycm = await getYcm()
try {
await ycm.getReady(event.document.uri, documents)
} catch (err) {
logger('onDidOpen error', err)
}
})
// This handler provides the initial list of the completion items.
connection.onCompletion(async (textDocumentPosition: TextDocumentPositionParams): Promise<CompletionItem[]> => {
logger(`onCompletion: ${textDocumentPosition.textDocument.uri}`)
const ycm = await getYcm()
// await ycm.insertLeave(documents.get(textDocumentPosition.textDocument.uri), documents)
// await ycm.currentIdentifierFinished(documents.get(textDocumentPosition.textDocument.uri), documents)
// await ycm.readyToParse(documents.get(textDocumentPosition.textDocument.uri), documents)
try {
const latestCompletions = await ycm.completion(textDocumentPosition.textDocument.uri, textDocumentPosition.position, documents)
return latestCompletions
} catch (err) {
return null
}
})
connection.onShutdown(async () => {
logger('onShutdown')
await Ycm.reset()
})
// connection.onExit(async () => {
// logger('onExit')
// Ycm.reset()
// })
// This handler resolve additional information for the item selected in
// the completion list.
connection.onCompletionResolve((item: CompletionItem): CompletionItem => {
return item
})
// connection.onDidOpenTextDocument((params) => {
// // A text document got opened in VSCode.
// // params.uri uniquely identifies the document. For documents store on disk this is a file URI.
// // params.text the initial full content of the document.
// ycm.readyToParse(documents.get(params.textDocument.uri))
// })
// connection.onDidChangeTextDocument((params) => {
// // The content of a text document did change in VSCode.
// // params.uri uniquely identifies the document.
// // params.contentChanges describe the content changes to the document.
// connection.logger(`onDidChangeTextDocument: ${JSON.stringify(params.textDocument.version)}`)
// })
/*
connection.onDidCloseTextDocument((params) => {
// A text document got closed in VSCode.
// params.uri uniquely identifies the document.
connection.logger(`${params.uri} closed.`);
});
*/
connection.onNotification('lint', (uri) => {
getIssues(documents.get(uri))
})
// Listen on the connection
connection.listen() | the_stack |
import { wrapped } from "../functions";
import { withPossibleRepresentations, FunctionMap, PossibleRepresentation, ProtocolConformance, ReifiedType } from "../reified";
import { addVariable, lookup, uniqueName, DeclarationFlags, Scope } from "../scope";
import { Function } from "../types";
import { concat, lookupForMap } from "../utils";
import { binary, call, callable, conditional, conformance, expr, expressionLiteralValue, functionValue, ignore, literal, logical, member, read, reuse, set, statements, tuple, unary, ArgGetter, Value } from "../values";
import { applyDefaultConformances, binaryBuiltin, resolveMethod, reuseArgs, updateBuiltin, voidType } from "./common";
import { blockStatement, identifier, returnStatement, updateExpression, whileStatement } from "@babel/types";
interface NumericRange {
min: Value;
max: Value;
}
function rangeForNumericType(type: Value, scope: Scope): NumericRange {
return {
min: call(resolveMethod(type, "min", scope), [], [], scope),
max: call(resolveMethod(type, "max", scope), [], [], scope),
};
}
function possiblyGreaterThan(left: NumericRange, right: NumericRange, scope: Scope): boolean {
const leftMax = expressionLiteralValue(read(left.max, scope));
const rightMax = expressionLiteralValue(read(right.max, scope));
return typeof leftMax !== "number" || typeof rightMax !== "number" || leftMax > rightMax;
}
function possiblyLessThan(left: NumericRange, right: NumericRange, scope: Scope): boolean {
const leftMin = expressionLiteralValue(read(left.min, scope));
const rightMin = expressionLiteralValue(read(right.min, scope));
return typeof leftMin !== "number" || typeof rightMin !== "number" || leftMin < rightMin;
}
function integerRangeCheck(scope: Scope, value: Value, source: NumericRange, dest: NumericRange) {
const requiresGreaterThanCheck = possiblyGreaterThan(source, dest, scope);
const requiresLessThanCheck = possiblyLessThan(source, dest, scope);
if (!requiresGreaterThanCheck && !requiresLessThanCheck) {
return value;
}
const expression = read(value, scope);
const constant = expressionLiteralValue(expression);
const constantMin = expressionLiteralValue(read(dest.min, scope));
const constantMax = expressionLiteralValue(read(dest.max, scope));
if (typeof constant === "number" && typeof constantMin === "number" && typeof constantMax === "number" && constant >= constantMin && constant <= constantMax) {
return expr(expression);
}
return reuse(expr(expression), scope, "integer", (reusableValue) => {
let check;
if (requiresGreaterThanCheck && requiresLessThanCheck) {
check = logical(
"||",
binary("<", reusableValue, dest.min, scope),
binary(">", reusableValue, dest.max, scope),
scope,
);
} else if (requiresGreaterThanCheck) {
check = binary(">", reusableValue, dest.max, scope);
} else {
check = binary("<", reusableValue, dest.min, scope);
}
const functionType: Function = { kind: "function", arguments: { kind: "tuple", types: [] }, return: voidType, throws: true, rethrows: false, attributes: [] };
return conditional(
check,
call(functionValue("Swift.(swift-to-js).numericRangeFailed()", undefined, functionType), [], [], scope),
reusableValue,
scope,
);
});
}
export function buildIntegerType(globalScope: Scope, min: number, max: number, bitWidth: number, checked: boolean, wrap: (value: Value, scope: Scope) => Value): ReifiedType {
const range: NumericRange = { min: literal(min), max: literal(max) };
const widerHigh: NumericRange = checked ? { min: literal(min), max: literal(max + 1) } : range;
const widerLow: NumericRange = checked ? { min: literal(min - 1), max: literal(max) } : range;
const widerBoth: NumericRange = checked ? { min: literal(min - 1), max: literal(max + 1) } : range;
const integerTypeName = min < 0 ? "SignedInteger" : "UnsignedInteger";
function initExactly(outerScope: Scope, outerArg: ArgGetter): Value {
const destTypeArg = outerArg(1, "T");
return callable((scope: Scope, arg: ArgGetter) => {
const destIntConformance = conformance(destTypeArg, integerTypeName, scope);
const dest = rangeForNumericType(destIntConformance, scope);
const requiresGreaterThanCheck = possiblyGreaterThan(range, dest, scope);
const requiresLessThanCheck = possiblyLessThan(range, dest, scope);
if (!requiresGreaterThanCheck && !requiresLessThanCheck) {
return arg(0, "value");
}
return reuseArgs(arg, 0, scope, ["value"], (value) => {
let check;
if (requiresGreaterThanCheck && requiresLessThanCheck) {
check = logical(
"||",
binary(">", value, dest.min, scope),
binary("<", value, dest.max, scope),
scope,
);
} else if (requiresGreaterThanCheck) {
check = binary(">", value, dest.max, scope);
} else if (requiresLessThanCheck) {
check = binary("<", value, dest.min, scope);
} else {
return value;
}
return conditional(
check,
literal(null),
value,
scope,
);
});
}, "(Self) -> Self");
}
const customStringConvertibleConformance: ProtocolConformance = {
functions: {
description: wrapped((scope, arg) => call(expr(identifier("String")), [arg(0, "self")], ["Self"], scope), "(Self) -> String"),
},
requirements: [],
};
const hashableConformance: ProtocolConformance = {
functions: {
hashValue: wrapped((scope, arg) => arg(0, "self"), "(Self) -> Int"),
},
requirements: [],
};
const equatableConformance: ProtocolConformance = {
functions: {
"==": wrapped(binaryBuiltin("===", 0), "(Self, Self) -> Bool"),
"!=": wrapped(binaryBuiltin("!==", 0), "(Self, Self) -> Bool"),
},
requirements: [],
};
const additiveArithmeticConformance: ProtocolConformance = {
functions: {
"zero": wrapped(() => literal(0), "() -> Self"),
"+": wrapped(binaryBuiltin("+", 0, (value, scope) => integerRangeCheck(scope, value, widerHigh, range)), "(Self, Self) -> Self"),
"-": wrapped(binaryBuiltin("-", 0, (value, scope) => integerRangeCheck(scope, value, widerLow, range)), "(Self, Self) -> Self"),
},
requirements: [],
};
const numericConformance: ProtocolConformance = {
functions: {
"init(exactly:)": initExactly,
"*": wrapped(binaryBuiltin("*", 0, (value, scope) => integerRangeCheck(scope, value, widerBoth, range)), "(Self, Self) -> Self"),
},
requirements: [],
};
const signedNumericConformance: ProtocolConformance = {
functions: {
"-": wrapped((scope, arg) => unary("-", arg(0, "value"), scope), "(Self) -> Self"),
},
requirements: [],
};
const comparableConformance: ProtocolConformance = {
functions: {
"<": wrapped(binaryBuiltin("<", 0), "(Self, Self) -> Bool"),
">": wrapped(binaryBuiltin(">", 0), "(Self, Self) -> Bool"),
"<=": wrapped(binaryBuiltin("<=", 0), "(Self, Self) -> Bool"),
">=": wrapped(binaryBuiltin(">=", 0), "(Self, Self) -> Bool"),
},
requirements: [],
};
const strideableConformance: ProtocolConformance = {
functions: {
"+": wrapped((scope, arg) => integerRangeCheck(scope, binary("+", arg(0, "lhs"), arg(1, "rhs"), scope), widerHigh, range), "(Self, Self) -> Self"),
"-": wrapped((scope, arg) => integerRangeCheck(scope, binary("-", arg(0, "lhs"), arg(1, "rhs"), scope), widerLow, range), "(Self, Self) -> Self"),
"...": wrapped((scope, arg) => {
return tuple([arg(0, "start"), arg(1, "end")]);
}, "(Self, Self) -> Self"),
"==": wrapped(binaryBuiltin("===", 0), "(Self, Self) -> Bool"),
},
requirements: [],
};
const binaryIntegerConformance: ProtocolConformance = {
functions: {
"init(exactly:)": initExactly,
"init(truncatingIfNeeded:)": wrapped((scope: Scope, arg: ArgGetter) => {
return wrap(arg(0, "source"), scope);
}, "(T) -> Self"),
"init(clamping:)": (scope: Scope, arg: ArgGetter, name: string) => {
const dest = rangeForNumericType(conformance(arg(1, "T"), integerTypeName, scope), scope);
return callable((innerScope, innerArg) => {
const requiresGreaterThanCheck = possiblyGreaterThan(range, dest, scope);
const requiresLessThanCheck = possiblyLessThan(range, dest, scope);
if (!requiresGreaterThanCheck && !requiresLessThanCheck) {
return innerArg(0, "value");
}
return reuse(innerArg(0, "value"), innerScope, "value", (value) => {
if (requiresGreaterThanCheck && requiresLessThanCheck) {
return conditional(
binary(">", value, dest.max, innerScope),
dest.max,
conditional(
binary("<", value, dest.min, innerScope),
dest.min,
value,
innerScope,
),
innerScope,
);
} else if (requiresGreaterThanCheck) {
return conditional(
binary(">", value, dest.max, innerScope),
dest.max,
value,
innerScope,
);
} else {
return conditional(
binary("<", value, dest.min, innerScope),
dest.min,
value,
innerScope,
);
}
});
}, "(Self) -> Self");
},
"/": wrapped((scope, arg) => binary("|", binary("/", arg(0, "lhs"), arg(1, "rhs"), scope), literal(0), scope), "(Self, Self) -> Self"),
"%": wrapped((scope, arg) => binary("%", arg(0, "lhs"), arg(1, "rhs"), scope), "(Self, Self) -> Self"),
"+": wrapped((scope, arg) => integerRangeCheck(scope, binary("+", arg(0, "lhs"), arg(1, "rhs"), scope), widerHigh, range), "(Self, Self) -> Self"),
"-": wrapped((scope, arg) => integerRangeCheck(scope, binary("-", arg(0, "lhs"), arg(1, "rhs"), scope), widerLow, range), "(Self, Self) -> Self"),
"*": wrapped((scope, arg) => integerRangeCheck(scope, binary("*", arg(0, "lhs"), arg(1, "rhs"), scope), widerBoth, range), "(Self, Self) -> Self"),
"~": wrapped((scope, arg) => wrap(unary("~", arg(0, "self"), scope), scope), "(Self) -> Self"),
">>": wrapped((scope, arg) => binary(">>", arg(0, "lhs"), arg(1, "rhs"), scope), "(Self, Self) -> Self"),
"<<": wrapped((scope, arg) => binary("<<", arg(0, "lhs"), arg(1, "rhs"), scope), "(Self, Self) -> Self"), // TODO: Implement shift left
"<": wrapped(binaryBuiltin("<", 0), "(Self, Self) -> Bool"),
">": wrapped(binaryBuiltin(">", 0), "(Self, Self) -> Bool"),
"<=": wrapped(binaryBuiltin("<=", 0), "(Self, Self) -> Bool"),
">=": wrapped(binaryBuiltin(">=", 0), "(Self, Self) -> Bool"),
"&": wrapped(binaryBuiltin("&", 0), "(Self, Self) -> Self"),
"|": wrapped(binaryBuiltin("|", 0), "(Self, Self) -> Self"),
"^": wrapped(binaryBuiltin("^", 0), "(Self, Self) -> Self"),
"quotientAndRemainder(dividingBy:)": wrapped((scope, arg) => {
return reuseArgs(arg, 0, scope, ["lhs", "rhs"], (lhs, rhs) => {
return tuple([
binary("|", binary("/", lhs, rhs, scope), literal(0), scope),
binary("%", lhs, rhs, scope),
]);
});
}, "(Self, Self) -> (Self, Self)"),
"signum": wrapped((scope, arg) => {
return reuseArgs(arg, 0, scope, ["self"], (int) => {
if (min < 0) {
return conditional(
binary(">", int, literal(0), scope),
literal(1),
conditional(
binary("<", int, literal(0), scope),
literal(-1),
int,
scope,
),
scope,
);
} else {
return conditional(
binary(">", int, literal(0), scope),
literal(1),
int,
scope,
);
}
});
}, "(Self) -> Self"),
"isSigned": wrapped((scope, arg) => {
return literal(min < 0);
}, "() -> Bool"),
},
requirements: [],
};
const byteSwapped = wrapped((scope, arg) => {
if (bitWidth <= 8) {
return arg(0, "value");
}
return reuseArgs(arg, 0, scope, ["value"], (self) => {
let result: Value = literal(0);
for (let i = 0; i < bitWidth; i += 8) {
const shiftAmount = bitWidth - 8 - i * 2;
const shifted = binary(shiftAmount > 0 ? ">>" : "<<", self, literal(shiftAmount > 0 ? shiftAmount : -shiftAmount), scope);
result = binary("|",
result,
shiftAmount !== -24 ? binary("&", shifted, literal(0xFF << i), scope) : shifted,
scope,
);
}
return result;
});
}, "(Self) -> Self");
const fixedWidthIntegerConformance: ProtocolConformance = {
functions: {
"init(_:radix:)": wrapped((scope, arg) => {
const input = read(arg(0, "text"), scope);
const result = uniqueName(scope, "integer");
return statements([
addVariable(scope, result, "Int", call(expr(identifier("parseInt")), [
expr(input),
arg(1, "radix"),
], ["String", "Int"], scope), DeclarationFlags.Const),
returnStatement(
read(conditional(
binary("!==",
lookup(result, scope),
lookup(result, scope),
scope,
),
literal(null),
lookup(result, scope),
scope,
), scope),
),
]);
}, "(String, Int) -> Self?"),
"min": wrapped((scope, arg) => literal(min), "() -> Self"),
"max": wrapped((scope, arg) => literal(max), "() -> Self"),
"littleEndian": wrapped((scope, arg) => arg(0, "self"), "(Self) -> Self"),
"bigEndian": byteSwapped,
"byteSwapped": byteSwapped,
"bitWidth": wrapped((scope, arg) => literal(bitWidth), "() -> Self"),
"&+": wrapped(binaryBuiltin("+", 0, wrap), "(Self, Self) -> Self"),
"&*": wrapped(binaryBuiltin("*", 0, wrap), "(Self, Self) -> Self"),
"&-": wrapped(binaryBuiltin("-", 0, wrap), "(Self, Self) -> Self"),
"&<<": wrapped(binaryBuiltin("<<", 0, wrap), "(Self, Self) -> Self"),
"&>>": wrapped(binaryBuiltin(">>", 0, wrap), "(Self, Self) -> Self"),
"addingReportingOverflow(_:)": wrapped((scope, arg) => reuse(binary("+", arg(0, "lhs"), arg(1, "rhs"), scope), scope, "full", (full) => {
return reuse(wrap(full, scope), scope, "truncated", (truncated) => {
return tuple([truncated, binary("!==", truncated, full, scope)]);
});
}), "(Self, Self) -> (Self, Bool)"),
"subtractingReportingOverflow(_:)": wrapped((scope, arg) => reuse(binary("-", arg(0, "lhs"), arg(1, "rhs"), scope), scope, "full", (full) => {
return reuse(wrap(full, scope), scope, "truncated", (truncated) => {
return tuple([truncated, binary("!==", truncated, full, scope)]);
});
}), "(Self, Self) -> (Self, Bool)"),
"multipliedReportingOverflow(by:)": wrapped((scope, arg) => reuse(binary("*", arg(0, "lhs"), arg(1, "rhs"), scope), scope, "full", (full) => {
return reuse(wrap(full, scope), scope, "truncated", (truncated) => {
return tuple([truncated, binary("!==", truncated, full, scope)]);
});
}), "(Self, Self) -> (Self, Bool)"),
"dividedReportingOverflow(by:)": wrapped((scope, arg) => reuse(binary("|", binary("/", arg(0, "lhs"), arg(1, "rhs"), scope), literal(0), scope), scope, "full", (full) => {
return reuse(wrap(full, scope), scope, "truncated", (truncated) => {
return tuple([truncated, binary("!==", truncated, full, scope)]);
});
}), "(Self, Self) -> (Self, Bool)"),
"remainderReportingOverflow(dividingBy:)": wrapped((scope, arg) => reuse(binary("%", arg(0, "lhs"), arg(1, "rhs"), scope), scope, "full", (full) => {
return reuse(wrap(full, scope), scope, "truncated", (truncated) => {
return tuple([truncated, binary("!==", truncated, full, scope)]);
});
}), "(Self, Self) -> (Self, Bool)"),
"nonzeroBitCount": wrapped((scope, arg) => reuse(arg(0, "value"), scope, "value", (value, literalValue) => {
if (typeof literalValue === "number") {
// Population count of a literal
let count: number = 0;
let current = literalValue;
while (current) {
count++;
current &= current - 1;
}
return literal(count);
}
// Population count at runtime
const currentName = uniqueName(scope, "current");
const currentDeclaration = addVariable(scope, currentName, "Self", value);
const countName = uniqueName(scope, "count");
const countDeclaration = addVariable(scope, countName, "Self", literal(0));
return statements([
currentDeclaration,
countDeclaration,
whileStatement(
identifier(currentName),
blockStatement(concat(
ignore(set(
lookup(countName, scope),
literal(1),
scope,
"+=",
), scope),
ignore(set(
lookup(currentName, scope),
binary("-", lookup(currentName, scope), literal(1), scope),
scope,
"&=",
), scope),
)),
),
returnStatement(identifier(countName)),
]);
}), "(Self) -> Self"),
"leadingZeroBitCount": wrapped((scope, arg) => reuse(arg(0, "value"), scope, "value", (value, literalValue) => {
if (typeof literalValue === "number") {
// Count leading zero bits of literal
let shift = bitWidth;
// tslint:disable-next-line:no-empty
while (literalValue >> --shift === 0 && shift >= 0) {
}
return literal(bitWidth - 1 - shift);
}
// Count leading zero bits at runtime
const shiftName = uniqueName(scope, "shift");
const shiftDeclaration = addVariable(scope, shiftName, "Self", literal(bitWidth));
return statements([
shiftDeclaration,
whileStatement(
read(
logical("&&",
binary("===",
binary(">>",
value,
expr(updateExpression("--", identifier(shiftName), true)),
scope,
),
literal(0),
scope,
),
binary(">=",
lookup(shiftName, scope),
literal(0),
scope,
),
scope,
),
scope,
),
blockStatement([]),
),
returnStatement(read(binary("-", literal(bitWidth - 1), lookup(shiftName, scope), scope), scope)),
]);
}), "(Self) -> Self"),
"multipliedFullWidth(by:)": wrapped((scope, arg) => {
const magnitudeBitWidth = min < 0 ? bitWidth - 1 : bitWidth;
if (bitWidth <= 16) {
return reuse(binary("*", arg(0, "lhs"), arg(1, "rhs"), scope), scope, "multiplied", (multiplied) => {
return tuple([
binary(">>", multiplied, literal(magnitudeBitWidth), scope),
binary("&", multiplied, literal((1 << magnitudeBitWidth) - 1), scope),
]);
});
}
return reuse(arg(0, "lhs"), scope, "lhs", (lhs, lhsLiteral) => {
return reuse(arg(1, "rhs"), scope, "rhs", (rhs, rhsLiteral) => {
return tuple([
binary("|", binary("/", binary("*", lhs, rhs, scope), literal(Math.pow(2, 32)), scope), literal(0), scope),
typeof lhsLiteral === "number" && typeof rhsLiteral === "number" ?
literal(Math.imul(lhsLiteral, rhsLiteral)) :
call(member(expr(identifier("Math")), "imul", scope), [
lhs,
rhs,
], ["String", "Int"], scope),
]);
});
});
}, "(Self, Self) -> Self"),
"dividingFullWidth(_:)": wrapped((scope) => {
return call(functionValue("Swift.(swift-to-js).notImplemented()", undefined, { kind: "function", arguments: voidType, return: voidType, throws: true, rethrows: false, attributes: [] }), [], [], scope);
}, "((Self, Self)) -> (Self, Self)"),
},
requirements: [],
};
const integerConformance: ProtocolConformance = {
functions: {
"min": wrapped(() => {
return literal(min);
}, "() -> Int"),
"max": wrapped(() => {
return literal(max);
}, "() -> Int"),
"init(_:)": (outerScope, outerArg) => {
const sourceTypeArg = outerArg(1, "T");
return callable((scope, arg) => {
const sourceType = conformance(sourceTypeArg, integerTypeName, scope);
return integerRangeCheck(
scope,
arg(0, "value"),
range,
rangeForNumericType(sourceType, scope),
);
}, "(Self) -> Self");
},
"init(exactly:)": initExactly,
},
requirements: [],
};
if (min < 0) {
// Only SignedInteger has these methods
integerConformance.functions["&+"] = wrapped(binaryBuiltin("+", 0, wrap), "(Self, Self) -> Self");
integerConformance.functions["&-"] = wrapped(binaryBuiltin("-", 0, wrap), "(Self, Self) -> Self");
}
const reifiedType: ReifiedType = {
functions: lookupForMap({
"init(_builtinIntegerLiteral:)": wrapped((scope, arg) => arg(0, "value"), "(Self) -> Self"),
"+": wrapped((scope, arg) => integerRangeCheck(scope, binary("+", arg(0, "lhs"), arg(1, "rhs"), scope), widerHigh, range), "(Self, Self) -> Self"),
"-": wrapped((scope, arg, type, argTypes) => {
if (argTypes.length === 1) {
return integerRangeCheck(scope, unary("-", arg(0, "value"), scope), widerLow, range);
}
return integerRangeCheck(scope, binary("-", arg(0, "lhs"), arg(1, "rhs"), scope), widerLow, range);
}, "(Self) -> Self"),
"*": wrapped((scope, arg) => integerRangeCheck(scope, binary("*", arg(0, "lhs"), arg(1, "rhs"), scope), widerBoth, range), "(Self, Self) -> Self"),
"/": wrapped((scope, arg) => binary("|", binary("/", arg(0, "lhs"), arg(1, "rhs"), scope), literal(0), scope), "(Self, Self) -> Self"),
"%": wrapped(binaryBuiltin("%", 0), "(Self, Self) -> Self"),
"<": wrapped(binaryBuiltin("<", 0), "(Self, Self) -> Bool"),
">": wrapped(binaryBuiltin(">", 0), "(Self, Self) -> Bool"),
"<=": wrapped(binaryBuiltin("<=", 0), "(Self, Self) -> Bool"),
">=": wrapped(binaryBuiltin(">=", 0), "(Self, Self) -> Bool"),
"&": wrapped(binaryBuiltin("&", 0), "(Self, Self) -> Self"),
"|": wrapped(binaryBuiltin("|", 0), "(Self, Self) -> Self"),
"^": wrapped(binaryBuiltin("^", 0), "(Self, Self) -> Self"),
"==": wrapped(binaryBuiltin("===", 0), "(Self, Self) -> Bool"),
"!=": wrapped(binaryBuiltin("!==", 0), "(Self, Self) -> Bool"),
"+=": wrapped(updateBuiltin("+", 0), "(inout Self, Self) -> Void"),
"-=": wrapped(updateBuiltin("-", 0), "(inout Self, Self) -> Void"),
"*=": wrapped(updateBuiltin("*", 0), "(inout Self, Self) -> Void"),
"...": wrapped((scope, arg) => {
return tuple([arg(0, "start"), arg(1, "end")]);
}, "(Self, Self) -> Self.Stride"),
"hashValue": wrapped((scope, arg) => {
return arg(0, "self");
}, "(Self) -> Int"),
"min": wrapped(() => {
return literal(min);
}, "(Type) -> Self"),
"max": wrapped(() => {
return literal(max);
}, "(Type) -> Self"),
} as FunctionMap),
conformances: withPossibleRepresentations(applyDefaultConformances({
Hashable: hashableConformance,
Equatable: equatableConformance,
Comparable: comparableConformance,
BinaryInteger: binaryIntegerConformance,
AdditiveArithmetic: additiveArithmeticConformance,
Numeric: numericConformance,
[integerTypeName]: integerConformance,
SignedNumeric: signedNumericConformance,
FixedWidthInteger: fixedWidthIntegerConformance,
Strideable: strideableConformance,
CustomStringConvertible: customStringConvertibleConformance,
LosslessStringConvertible: {
functions: {
"init(_:)": wrapped((scope, arg) => {
const input = read(arg(0, "description"), scope);
const value = expressionLiteralValue(input);
if (typeof value === "string") {
const convertedValue = parseInt(value, 10);
return literal(isNaN(convertedValue) ? null : convertedValue);
}
const result = uniqueName(scope, "integer");
return statements([
addVariable(scope, result, "Int", call(expr(identifier("parseInt")), [
expr(input),
literal(10),
], ["String", "Int"], scope), DeclarationFlags.Const),
returnStatement(
read(conditional(
binary("!==",
lookup(result, scope),
lookup(result, scope),
scope,
),
literal(null),
lookup(result, scope),
scope,
), scope),
),
]);
}, "(String) -> Self?"),
},
requirements: [],
},
}, globalScope), PossibleRepresentation.Number),
defaultValue() {
return literal(0);
},
innerTypes: {
},
};
return reifiedType;
} | the_stack |
import './TreeDrawer/Column';
import './TreeDrawer/TreeLine';
import './TreeDrawer/Table';
import {
defineComponent,
FunctionalComponent,
html,
observable,
TemplateResult,
updated,
} from '@vuerd/lit-observable';
import { classMap } from 'lit-html/directives/class-map';
import { LineShape } from '@/components/drawer/TreeDrawer/TreeLine';
import {
ColumnAdd,
ColumnModify,
ColumnRemove,
TableAdd,
TableModify,
TableRemove,
} from '@/core/diff';
import { calculateLatestDiff } from '@/core/diff/helper';
import { getData } from '@/core/helper';
import { useContext } from '@/core/hooks/context.hook';
import { Changes } from '@/core/tableTree';
import { generateRoot, TreeNode } from '@/core/tableTree/tableTree';
import { css } from '@/core/tagged';
import { Column } from '@@types/engine/store/table.state';
declare global {
interface HTMLElementTagNameMap {
'vuerd-tree-drawer': TreeDrawerElement;
}
}
export interface TreeDrawerProps {
width: number;
visible: boolean;
}
export interface TreeDrawerElement extends TreeDrawerProps, HTMLElement {}
interface TreeDrawerState {
tree: TemplateResult[];
root: TreeNode | null;
forbidUpdate: boolean;
}
const TreeDrawer: FunctionalComponent<TreeDrawerProps, TreeDrawerElement> = (
props,
ctx
) => {
const contextRef = useContext(ctx);
const state = observable<TreeDrawerState>({
tree: [],
root: null,
forbidUpdate: false,
});
/**
* Draws entire tree of tables
*/
const refresh = () => {
state.root = generateRoot(contextRef.value, state.root || undefined);
refreshDiff();
};
const updateTree = () => {
state.tree = [];
if (state.root?.children.length) {
state.tree.push(...showChildren(state.root));
} else {
state.tree[0] = html`No table found`;
}
};
const refreshDiff = () => {
const diffs = calculateLatestDiff(contextRef.value);
// @ts-ignore
const tableDiffs: (TableModify | TableAdd | TableRemove)[] = diffs.filter(
diff => diff.type === 'table'
);
// @ts-ignore
const columnDiffs: (ColumnModify | ColumnAdd | ColumnRemove)[] =
diffs.filter(diff => diff.type === 'column');
state.root?.children.forEach(child => {
child.changes = 'none';
child.nestedChanges = 'none';
child.diffs = [];
tableDiffs.forEach(diff => {
if (diff.changes === 'modify' && child.id === diff.newTable.id) {
child.changes = 'modify';
child.diffs.push(diff);
} else if (diff.changes === 'add' && child.id === diff.newTable.id) {
child.changes = 'add';
child.diffs.push(diff);
} else if (diff.changes === 'remove' && child.id === diff.oldTable.id) {
child.changes = 'remove';
child.diffs.push(diff);
}
});
columnDiffs.forEach(diff => {
if (child.id === diff.table.id) {
child.nestedChanges = diff.changes;
child.diffs.push(diff);
}
});
});
tableDiffs.forEach(diff => {
if (diff.changes === 'remove') {
var node: TreeNode = new TreeNode(
contextRef.value,
diff.oldTable.id,
diff.oldTable,
state.root,
state.root,
[]
);
node.changes = 'remove';
node.diffs = [diff];
const duplicate = state.root?.children.some(node => {
if (node.id === diff.oldTable.id) return true;
});
if (!duplicate) state.root?.children.push(node);
}
});
updateTree();
};
/**
* Returns array of html children of one node
* @param node Node of which children will be returned
* @param lines Lines before this node
* @returns Array of html containing rows with tables/columns
*/
const showChildren = (
node: TreeNode,
lines: LineShape[] = []
): TemplateResult[] => {
if (node.children.length) {
const lastChild = node.children[node.children.length - 1];
function tableRow(changes: Changes, node: TreeNode) {
return html`<div
class=${classMap({
'vuerd-tree-row': true,
'diff-modify': changes === 'modify',
'diff-add': changes === 'add',
'diff-remove': changes === 'remove',
})}
>
${makeTreeLines(lines)}
<vuerd-tree-table-name
.node=${node}
.update=${updateTree}
></vuerd-tree-table-name>
</div>`;
}
var rows = node.children.map(child => {
if (child === lastChild) lines[lines.length - 1] = 'L';
const primaryNode = getData(child.root?.children || [], child.id);
if (primaryNode && !child.disabled) {
child.changes = primaryNode.changes;
child.nestedChanges = primaryNode.nestedChanges;
}
var childRows: TemplateResult[] = [];
childRows.push(tableRow(child.changes, child));
if (child.open) {
if (lastChild.id === child.id) {
lines[lines.length - 1] = 'NULL';
} else {
lines[lines.length - 1] = 'I';
}
childRows.push(...showColumns(child, [...lines, 'I']));
childRows.push(...showChildren(child, [...lines, 'X']));
}
return childRows;
});
// const removedTables: TemplateResult[] = node.diffs;
return rows.reduce((acc, val) => acc.concat(val), []); // flatten array [][] --> []
} else return [];
};
/**
* Returns array of html columns belonging to one table inside node
* @param node Node of which columns will be returned
* @param lines Lines to draw
* @returns Array of html containing rows of columns
*/
const showColumns = (
node: TreeNode,
lines: LineShape[]
): TemplateResult[] => {
var columns: TemplateResult[] = [];
function columnRow(changes: Changes, column: Column) {
return html`
<div
class=${classMap({
'vuerd-tree-row': true,
'diff-modify': changes === 'modify',
'diff-add': changes === 'add',
'diff-remove': changes === 'remove',
})}
>
${makeTreeLines(lines)}
<vuerd-tree-column-name
.tableId=${node.id}
.changes=${changes}
.column=${column}
.update=${updateTree}
></vuerd-tree-column-name>
</div>
`;
}
const primaryNode = getData(node.root?.children || [], node.id);
if (primaryNode?.table) {
columns = primaryNode.table?.columns.map(col => {
for (let diff of primaryNode.diffs) {
if (
diff.type === 'table' &&
(diff.changes === 'add' || diff.changes === 'remove')
) {
return columnRow(diff.changes, col);
} else if (diff.type === 'column') {
if (diff.changes === 'add' && diff.newColumn.id === col.id) {
return columnRow('add', col);
} else if (
diff.changes === 'modify' &&
diff.newColumn.id === col.id
) {
return columnRow('modify', col);
} else if (
diff.changes === 'remove' &&
diff.oldColumn.id === col.id
) {
return columnRow('remove', col);
}
}
}
return columnRow('none', col);
});
//@ts-ignore
const removedColumns: TemplateResult[] = primaryNode.diffs
.map(diff => {
if (
diff.changes === 'remove' &&
diff.type === 'column' &&
diff.oldColumn
)
return columnRow('remove', diff.oldColumn);
})
.filter(row => row);
columns.push(...removedColumns);
}
return columns;
};
/**
* Creates lines
* @param lines Array of lines to draw
* @returns Array of lines
*/
const makeTreeLines = (lines: LineShape[]) => {
return lines.map(
line => html`<vuerd-tree-line .shape=${line}></vuerd-tree-line>`
);
};
const showAll = () => {
refresh();
state.root?.children.forEach(child => {
if (!child.table.visible) {
child.toggleVisible();
}
});
updateTree();
};
/**
* Hides all tables
*/
const hideAll = () => {
refresh();
state.root?.children.forEach(child => {
if (child.table.visible) {
child.toggleVisible();
}
});
updateTree();
};
const onClose = () => ctx.dispatchEvent(new CustomEvent('close'));
updated(() => {
// S-R latch so we dont create infinite loop of updates
if (props.visible === true && state.forbidUpdate === false) {
state.forbidUpdate = true;
if (!state.root?.children.length) {
refresh();
}
refreshDiff();
} else if (props.visible === false && state.forbidUpdate === true) {
state.forbidUpdate = false;
}
});
return () => {
return html`
<vuerd-drawer
name="Table Tree"
.width=${props.width}
.visible=${props.visible}
@close=${onClose}
>
<div class="vuerd-tree-refresh" @click=${refresh}>
<span>Refresh</span>
<vuerd-icon name="sync-alt" size="12"></vuerd-icon>
</div>
<div class="vuerd-tree-hideall" @click=${showAll}>
<span>Show all</span>
<vuerd-icon name="eye" size="14"></vuerd-icon>
</div>
<div class="vuerd-tree-hideall" @click=${hideAll}>
<span>Hide all</span>
<vuerd-icon name="eye-slash" size="14"></vuerd-icon>
</div>
<div class="vuerd-tree-diff" @click=${refreshDiff}>
<span>Get diff</span>
<vuerd-icon name="sync-alt" size="12"></vuerd-icon>
</div>
${state.tree}
</vuerd-drawer>
`;
};
};
const style = css`
.vuerd-tree-row {
display: flex;
flex-direction: row;
}
.vuerd-tree-refresh,
.vuerd-tree-hideall,
.vuerd-tree-diff {
box-sizing: border-box;
padding: 5px;
display: inline-block;
cursor: pointer;
fill: var(--vuerd-color-font);
font-size: 15px;
}
.vuerd-tree-refresh:hover,
.vuerd-tree-hideall:hover,
.vuerd-tree-diff:hover {
color: var(--vuerd-color-font-active);
background-color: var(--vuerd-color-contextmenu-active);
fill: var(--vuerd-color-font-active);
}
.vuerd-tree-row.diff-add {
background-color: var(--vuerd-color-diff-add);
}
.vuerd-tree-row.diff-modify {
background-color: var(--vuerd-color-diff-modify);
}
.vuerd-tree-row.diff-remove {
background-color: var(--vuerd-color-diff-remove);
}
`;
defineComponent('vuerd-tree-drawer', {
observedProps: ['width', 'visible'],
shadow: false,
style,
render: TreeDrawer,
}); | the_stack |
module WinJSTests {
"use strict";
var previousTracingOptions;
// Store edit result, use "recordEditSuccess" callback, clear it before.
var editSucceeded = false;
function clearLastEdit() {
editSucceeded = false;
}
function recordEditSuccess() {
editSucceeded = true;
}
function verifyLastEditSuccessful() {
LiveUnit.Assert.isTrue(editSucceeded, "Edit did not succeed");
}
function testEditing(signalTestCaseCompleted, synchronous) {
var dataSource = Helper.ItemsManager.simpleAsynchronousDataSource(0),
handler = Helper.ItemsManager.simpleListNotificationHandler(),
listBinding = dataSource.createListBinding(handler);
if (synchronous) {
dataSource.testDataAdapter.directives.callMethodsSynchronously = true;
} else {
Helper.ItemsManager.ensureAllAsynchronousRequestsFulfilled(dataSource);
}
var state0 = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
Helper.ItemsManager.setState(dataSource, state0);
// Fetch the first item
var itemPromise = listBinding.first();
handler.appendItemPromise(itemPromise);
itemPromise.then(function (item) {
handler.updateItem(item);
handler.verifyItem(item, 0);
Helper.ItemsManager.setImmediate(function () {
handler.verifyExpectedNotifications([
"beginNotifications",
"countChanged",
"endNotifications"
]);
var promises = [];
// Fetch the remaining 9 items
for (var i = 1; i < 10; i++) {
itemPromise = listBinding.next();
handler.appendItemPromise(itemPromise);
(function (i) {
promises.push(itemPromise.then(function (item2) {
handler.updateItem(item2);
handler.verifyItem(item2, i);
}));
})(i);
}
WinJS.Promise.join(promises).then(function () {
Helper.ItemsManager.verifyRequestCount(dataSource, 0);
handler.verifyExpectedNotifications([]);
handler.verifyState(state0, dataSource);
// Now try an insertion at the start of the list
var state1 = [10, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
clearLastEdit();
var insertPromise = dataSource.insertAtStart(null, Helper.ItemsManager.simpleItem(10)).then(function () {
recordEditSuccess();
});
handler.verifyExpectedNotifications([
"beginNotifications",
"inserted",
"indexChanged",
"indexChanged",
"indexChanged",
"indexChanged",
"indexChanged",
"indexChanged",
"indexChanged",
"indexChanged",
"indexChanged",
"indexChanged",
"countChanged",
"endNotifications"
]);
handler.verifyState(state1, null, true);
insertPromise.then(function (item2) {
// After an asynchronous insertions, a changed notification will be sent when the edit
// completes.
handler.verifyExpectedNotifications(synchronous ?
[] :
[
"beginNotifications",
"changed",
"endNotifications"
]
);
if (item2) {
LiveUnit.Assert.isTrue(typeof item.key === "string");
}
// Don't have to tolerate null keys now
handler.verifyState(state1);
// Force a refresh, and continue the test once that has completed
dataSource.invalidateAll().then(function () {
verifyLastEditSuccessful();
// No notifications should have been sent
handler.verifyExpectedNotifications([]);
handler.verifyState(state1, dataSource);
// Try other kinds of edits. Don't bother to verify success from now on.
var state2 = [10, 0, 1, 3, 4, 5, 6, 7, 8, 9, 11],
newData = "A new string";
dataSource.beginEdits();
dataSource.remove("2");
dataSource.change("4", newData);
dataSource.insertAtEnd(null, Helper.ItemsManager.simpleItem(11));
handler.verifyExpectedNotifications([
"beginNotifications",
"removed",
"changed",
"inserted",
]);
dataSource.endEdits();
handler.verifyExpectedNotifications([
"indexChanged",
"indexChanged",
"indexChanged",
"indexChanged",
"indexChanged",
"indexChanged",
"indexChanged",
"indexChanged",
"endNotifications"
]);
// Change item 4 back, so verifyState finds the expected value
var changePromise = dataSource.change("4", Helper.ItemsManager.simpleItem(4));
handler.verifyExpectedNotifications([
"beginNotifications",
"changed",
"endNotifications"
]);
handler.verifyState(state2, null, true);
changePromise.then(function (item3) {
// After an asynchronous insertions, a changed notification will be sent when the edit
// completes.
handler.verifyExpectedNotifications(synchronous ?
[] :
[
"beginNotifications",
"changed",
"endNotifications"
]
);
// Don't have to tolerate null keys now
handler.verifyState(state2);
var state3 = [13, 12, 9, 10, 1, 3, 4, 5, 6, 0, 7, 11, 8];
// Try all the remaining possible singleton edits
dataSource.beginEdits();
dataSource.moveAfter("0", "6");
dataSource.moveToStart("9");
dataSource.insertAfter(null, Helper.ItemsManager.simpleItem("12"), "1").then(function (item4) {
dataSource.moveBefore("12", "9");
dataSource.insertBefore(null, Helper.ItemsManager.simpleItem("13"), "12");
var movePromise = dataSource.moveToEnd("8");
dataSource.endEdits();
var expectedNotifications = [
"beginNotifications",
"moved",
"moved",
"inserted",
"moved",
"inserted",
"moved",
"indexChanged",
"indexChanged",
"indexChanged",
"indexChanged",
"indexChanged",
"indexChanged",
"indexChanged",
"indexChanged",
"indexChanged",
"indexChanged",
"indexChanged",
"indexChanged",
"indexChanged",
"countChanged",
"endNotifications"
];
if (!synchronous) {
// After an asynchronous insertion, a changed notification will be sent when
// the edit completes.
expectedNotifications.splice(4, 0, "changed");
}
handler.verifyExpectedNotifications(expectedNotifications);
handler.verifyState(state3, null, true);
movePromise.then(function (item5) {
// After an asynchronous insertion, a changed notification will be sent when
// the edit completes.
handler.verifyExpectedNotifications(synchronous ?
[] :
[
"beginNotifications",
"changed",
"endNotifications"
]
);
handler.verifyState(state3);
// Force a refresh, and continue the test once that has completed
dataSource.invalidateAll().then(function () {
// Again, no notifications should have been sent
handler.verifyExpectedNotifications([]);
handler.verifyState(state3, dataSource);
signalTestCaseCompleted();
});
});
});
});
});
});
});
});
});
}
export class EditingTests {
setUp() {
previousTracingOptions = VDSLogging.options;
VDSLogging.options = {
log: function (message) { LiveUnit.Assert.fail(message); },
include: /createListBinding|_retainItem|_releaseItem|release/,
handleTracking: true,
logVDS: true,
stackTraceLimit: 0 // set this to 100 to get good stack traces if you run into a failure.
};
VDSLogging.on();
}
tearDown() {
VDSLogging.off();
VDSLogging.options = previousTracingOptions;
}
testEditingAsynchronous(signalTestCaseCompleted) {
testEditing(signalTestCaseCompleted, false);
}
testEditingSynchronous(signalTestCaseCompleted) {
testEditing(signalTestCaseCompleted, true);
}
// this test will verify the edit error code(notPermitted) by simulating readOnly data source
testEditErrorCodes_NotPermitted(signalTestCaseCompleted) {
var dataSource = Helper.ItemsManager.simpleAsynchronousDataSource(100),
handler = Helper.ItemsManager.simpleListNotificationHandler(),
listBinding = dataSource.createListBinding(handler);
dataSource.testDataAdapter.setProperty("readOnly", true);
Helper.ItemsManager.ensureAllAsynchronousRequestsFulfilled(dataSource);
// Fetch the first item
var itemPromise = listBinding.first();
itemPromise.then(itemPromiseHandler);
function itemPromiseHandler(item) {
Helper.ItemsManager.verifyItemData(item, 0);
Helper.ItemsManager.setImmediate(function () {
handler.verifyExpectedNotifications([
"beginNotifications",
"countChanged",
"endNotifications"
]);
});
clearLastEdit();
var newData = "NewData0"
dataSource.change("0", newData).then(editSuccess, editError);
function editSuccess() {
recordEditSuccess();
LiveUnit.Assert.fail("Expecting an exception when trying to edit a read only data source..");
}
function editError(e) {
LiveUnit.Assert.areEqual("notPermitted", e.name, "Expecting error message while trying to edit a read only data source");
// Change the data source to allow edits now.
dataSource.testDataAdapter.setProperty("readOnly", false);
dataSource.change("0", newData).then(
function () {
LiveUnit.LoggingCore.logComment("edit is successful after data source is made editable at run time");
signalTestCaseCompleted();
},
function (error) {
LiveUnit.Assert.fail("Edit unsuccessful:" + error.name);
signalTestCaseCompleted();
}
);
}
};
}//end of test function
// Testing the edit error code: noLongerMeaningful by changing the deleted item
testEditErrorCodes_NoLongerMeaningful(signalTestCaseCompleted) {
var dataSource = Helper.ItemsManager.simpleAsynchronousDataSource(100),
handler = Helper.ItemsManager.simpleListNotificationHandler(),
listBinding = dataSource.createListBinding(handler);
Helper.ItemsManager.ensureAllAsynchronousRequestsFulfilled(dataSource);
// Fetch the first item
var itemPromise = listBinding.first();
itemPromise.then(function (item) {
Helper.ItemsManager.verifyItemData(item, 0);
handler.verifyExpectedNotifications([
"beginNotifications",
"countChanged",
"endNotifications"
]);
clearLastEdit();
var newData = "NewData0"
dataSource.remove("0").then(editSuccess, removeError);
function removeError(error) {
LiveUnit.Assert.fail("Remove operation failed:" + error.name);
}
function editSuccess() {
recordEditSuccess();
dataSource.testDataAdapter.setProperty("notMeaningfulEdit", true);
dataSource.change("0", newData).then(
function () {
LiveUnit.Assert.fail("Exception is expected for noLongerMeaningful edits");
},
function (e) {
LiveUnit.Assert.areEqual("noLongerMeaningful", e.name, "Expected exception is thrown from VDS");
signalTestCaseCompleted();
}
);
} //end editSuccess
});
}//end of test function
// this test will verify the edit error code(noResponse) by simulating DS communication Failure
testEditErrorCodes_NoResponse(signalTestCaseCompleted) {
var dataSource = Helper.ItemsManager.simpleAsynchronousDataSource(100),
handler = Helper.ItemsManager.simpleListNotificationHandler(),
listBinding = dataSource.createListBinding(handler);
Helper.ItemsManager.ensureAllAsynchronousRequestsFulfilled(dataSource);
// Fetch the first item
var itemPromise = listBinding.first();
itemPromise.then(function (item) {
Helper.ItemsManager.verifyItemData(item, 0);
handler.verifyExpectedNotifications([
"beginNotifications",
"countChanged",
"endNotifications"
]);
// This will caue the data adapter to return the error when trying to edit the data.
dataSource.testDataAdapter.setProperty("communicationFailure", true);
clearLastEdit();
var newData = "NewData0"
dataSource.change("0", newData).then(editSuccess, editError);
function editSuccess() {
recordEditSuccess();
LiveUnit.Assert.fail("Expecting an exception when trying to edit a data source while communication failure occurs");
}
function editError(e) {
LiveUnit.Assert.areEqual("noResponse", e.name, "Expecting error message while trying to edit a data source when communication to data fails");
// Reestablish data source connection
dataSource.testDataAdapter.setProperty("communicationFailure", false);
dataSource.change("0", newData).then(function () {
LiveUnit.LoggingCore.logComment("edit is successful after data source is made editable at run time");
signalTestCaseCompleted();
},
function (error) {
LiveUnit.Assert.fail("Edit unsuccessful:" + error.name);
});
}
});
}//end of test function noresponse
xtestCountError_NoResponse(signalTestCaseCompleted) {
var dataSource = Helper.ItemsManager.simpleAsynchronousDataSource(100),
handler = Helper.ItemsManager.simpleListNotificationHandler(),
listBinding = dataSource.createListBinding(handler);
// track how mnay times countChanged notification is thrown
var countChanged = 0;
Helper.ItemsManager.ensureAllAsynchronousRequestsFulfilled(dataSource);
dataSource.testDataAdapter.setProperty("count_NoResponse", true);
handler.countChanged = function (newCount, oldCount) {
countChanged++;
LiveUnit.Assert.fail("CountChanged Handler should not be thrown when count is not returned from the data source as DS cannot be communicated");
};
dataSource.getCount().then(countSuccess, countError);
function countSuccess(count) {
LiveUnit.Assert.fail("countSuccess handler should not be called when error is returned from the data adapter");
}
function countError(e) {
LiveUnit.Assert.areEqual("noResponse", e.name, "Wrong Error code.");
LiveUnit.Assert.areEqual(0, countChanged, "countChanged notification should not be thrown");
setTimeout(function () {
signalTestCaseCompleted();
}, 2000);
}
}//end of test function fetch-noresponse
testCountError_Unknown(signalTestCaseCompleted) {
var dataSource = Helper.ItemsManager.simpleAsynchronousDataSource(100),
handler = Helper.ItemsManager.simpleListNotificationHandler(),
listBinding = dataSource.createListBinding(handler);
// track how mnay times countChanged notification is thrown
var countChanged = 0;
Helper.ItemsManager.ensureAllAsynchronousRequestsFulfilled(dataSource);
dataSource.testDataAdapter.setProperty("countUnknown", true);
handler.countChanged = function (newCount, oldCount) {
countChanged++;
LiveUnit.Assert.fail("CountChanged Handler should not be thrown when unknown count is returned");
};
dataSource.getCount().then(countSuccess, countError);
function countSuccess(count) {
LiveUnit.Assert.fail("countSuccess handler should not be called when error is returned from the data adapter");
}
function countError(e) {
LiveUnit.Assert.areEqual("unknown", e.name, "Wrong Error code.");
LiveUnit.Assert.areEqual(0, countChanged, "countChanged notification should not be thrown");
setTimeout(function () {
signalTestCaseCompleted();
}, 2000);
}
}//end of test function fetch-noresponse
};
}
// Register the object as a test class by passing in the name
LiveUnit.registerTestClass("WinJSTests.EditingTests"); | the_stack |
import * as msRest from "@azure/ms-rest-js";
import * as Models from "../models";
import * as Mappers from "../models/tasksMappers";
import * as Parameters from "../models/parameters";
import { DataMigrationServiceClientContext } from "../dataMigrationServiceClientContext";
/** Class representing a Tasks. */
export class Tasks {
private readonly client: DataMigrationServiceClientContext;
/**
* Create a Tasks.
* @param {DataMigrationServiceClientContext} client Reference to the service client.
*/
constructor(client: DataMigrationServiceClientContext) {
this.client = client;
}
/**
* The services resource is the top-level resource that represents the Database Migration Service.
* This method returns a list of tasks owned by a service resource. Some tasks may have a status of
* Unknown, which indicates that an error occurred while querying the status of that task.
* @summary Get tasks in a service
* @param groupName Name of the resource group
* @param serviceName Name of the service
* @param projectName Name of the project
* @param [options] The optional parameters
* @returns Promise<Models.TasksListResponse>
*/
list(groupName: string, serviceName: string, projectName: string, options?: Models.TasksListOptionalParams): Promise<Models.TasksListResponse>;
/**
* @param groupName Name of the resource group
* @param serviceName Name of the service
* @param projectName Name of the project
* @param callback The callback
*/
list(groupName: string, serviceName: string, projectName: string, callback: msRest.ServiceCallback<Models.TaskList>): void;
/**
* @param groupName Name of the resource group
* @param serviceName Name of the service
* @param projectName Name of the project
* @param options The optional parameters
* @param callback The callback
*/
list(groupName: string, serviceName: string, projectName: string, options: Models.TasksListOptionalParams, callback: msRest.ServiceCallback<Models.TaskList>): void;
list(groupName: string, serviceName: string, projectName: string, options?: Models.TasksListOptionalParams | msRest.ServiceCallback<Models.TaskList>, callback?: msRest.ServiceCallback<Models.TaskList>): Promise<Models.TasksListResponse> {
return this.client.sendOperationRequest(
{
groupName,
serviceName,
projectName,
options
},
listOperationSpec,
callback) as Promise<Models.TasksListResponse>;
}
/**
* The tasks resource is a nested, proxy-only resource representing work performed by a DMS
* instance. The PUT method creates a new task or updates an existing one, although since tasks
* have no mutable custom properties, there is little reason to update an existing one.
* @summary Create or update task
* @param parameters Information about the task
* @param groupName Name of the resource group
* @param serviceName Name of the service
* @param projectName Name of the project
* @param taskName Name of the Task
* @param [options] The optional parameters
* @returns Promise<Models.TasksCreateOrUpdateResponse>
*/
createOrUpdate(parameters: Models.ProjectTask, groupName: string, serviceName: string, projectName: string, taskName: string, options?: msRest.RequestOptionsBase): Promise<Models.TasksCreateOrUpdateResponse>;
/**
* @param parameters Information about the task
* @param groupName Name of the resource group
* @param serviceName Name of the service
* @param projectName Name of the project
* @param taskName Name of the Task
* @param callback The callback
*/
createOrUpdate(parameters: Models.ProjectTask, groupName: string, serviceName: string, projectName: string, taskName: string, callback: msRest.ServiceCallback<Models.ProjectTask>): void;
/**
* @param parameters Information about the task
* @param groupName Name of the resource group
* @param serviceName Name of the service
* @param projectName Name of the project
* @param taskName Name of the Task
* @param options The optional parameters
* @param callback The callback
*/
createOrUpdate(parameters: Models.ProjectTask, groupName: string, serviceName: string, projectName: string, taskName: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.ProjectTask>): void;
createOrUpdate(parameters: Models.ProjectTask, groupName: string, serviceName: string, projectName: string, taskName: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.ProjectTask>, callback?: msRest.ServiceCallback<Models.ProjectTask>): Promise<Models.TasksCreateOrUpdateResponse> {
return this.client.sendOperationRequest(
{
parameters,
groupName,
serviceName,
projectName,
taskName,
options
},
createOrUpdateOperationSpec,
callback) as Promise<Models.TasksCreateOrUpdateResponse>;
}
/**
* The tasks resource is a nested, proxy-only resource representing work performed by a DMS
* instance. The GET method retrieves information about a task.
* @summary Get task information
* @param groupName Name of the resource group
* @param serviceName Name of the service
* @param projectName Name of the project
* @param taskName Name of the Task
* @param [options] The optional parameters
* @returns Promise<Models.TasksGetResponse>
*/
get(groupName: string, serviceName: string, projectName: string, taskName: string, options?: Models.TasksGetOptionalParams): Promise<Models.TasksGetResponse>;
/**
* @param groupName Name of the resource group
* @param serviceName Name of the service
* @param projectName Name of the project
* @param taskName Name of the Task
* @param callback The callback
*/
get(groupName: string, serviceName: string, projectName: string, taskName: string, callback: msRest.ServiceCallback<Models.ProjectTask>): void;
/**
* @param groupName Name of the resource group
* @param serviceName Name of the service
* @param projectName Name of the project
* @param taskName Name of the Task
* @param options The optional parameters
* @param callback The callback
*/
get(groupName: string, serviceName: string, projectName: string, taskName: string, options: Models.TasksGetOptionalParams, callback: msRest.ServiceCallback<Models.ProjectTask>): void;
get(groupName: string, serviceName: string, projectName: string, taskName: string, options?: Models.TasksGetOptionalParams | msRest.ServiceCallback<Models.ProjectTask>, callback?: msRest.ServiceCallback<Models.ProjectTask>): Promise<Models.TasksGetResponse> {
return this.client.sendOperationRequest(
{
groupName,
serviceName,
projectName,
taskName,
options
},
getOperationSpec,
callback) as Promise<Models.TasksGetResponse>;
}
/**
* The tasks resource is a nested, proxy-only resource representing work performed by a DMS
* instance. The DELETE method deletes a task, canceling it first if it's running.
* @summary Delete task
* @param groupName Name of the resource group
* @param serviceName Name of the service
* @param projectName Name of the project
* @param taskName Name of the Task
* @param [options] The optional parameters
* @returns Promise<msRest.RestResponse>
*/
deleteMethod(groupName: string, serviceName: string, projectName: string, taskName: string, options?: Models.TasksDeleteMethodOptionalParams): Promise<msRest.RestResponse>;
/**
* @param groupName Name of the resource group
* @param serviceName Name of the service
* @param projectName Name of the project
* @param taskName Name of the Task
* @param callback The callback
*/
deleteMethod(groupName: string, serviceName: string, projectName: string, taskName: string, callback: msRest.ServiceCallback<void>): void;
/**
* @param groupName Name of the resource group
* @param serviceName Name of the service
* @param projectName Name of the project
* @param taskName Name of the Task
* @param options The optional parameters
* @param callback The callback
*/
deleteMethod(groupName: string, serviceName: string, projectName: string, taskName: string, options: Models.TasksDeleteMethodOptionalParams, callback: msRest.ServiceCallback<void>): void;
deleteMethod(groupName: string, serviceName: string, projectName: string, taskName: string, options?: Models.TasksDeleteMethodOptionalParams | msRest.ServiceCallback<void>, callback?: msRest.ServiceCallback<void>): Promise<msRest.RestResponse> {
return this.client.sendOperationRequest(
{
groupName,
serviceName,
projectName,
taskName,
options
},
deleteMethodOperationSpec,
callback);
}
/**
* The tasks resource is a nested, proxy-only resource representing work performed by a DMS
* instance. The PATCH method updates an existing task, but since tasks have no mutable custom
* properties, there is little reason to do so.
* @summary Create or update task
* @param parameters Information about the task
* @param groupName Name of the resource group
* @param serviceName Name of the service
* @param projectName Name of the project
* @param taskName Name of the Task
* @param [options] The optional parameters
* @returns Promise<Models.TasksUpdateResponse>
*/
update(parameters: Models.ProjectTask, groupName: string, serviceName: string, projectName: string, taskName: string, options?: msRest.RequestOptionsBase): Promise<Models.TasksUpdateResponse>;
/**
* @param parameters Information about the task
* @param groupName Name of the resource group
* @param serviceName Name of the service
* @param projectName Name of the project
* @param taskName Name of the Task
* @param callback The callback
*/
update(parameters: Models.ProjectTask, groupName: string, serviceName: string, projectName: string, taskName: string, callback: msRest.ServiceCallback<Models.ProjectTask>): void;
/**
* @param parameters Information about the task
* @param groupName Name of the resource group
* @param serviceName Name of the service
* @param projectName Name of the project
* @param taskName Name of the Task
* @param options The optional parameters
* @param callback The callback
*/
update(parameters: Models.ProjectTask, groupName: string, serviceName: string, projectName: string, taskName: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.ProjectTask>): void;
update(parameters: Models.ProjectTask, groupName: string, serviceName: string, projectName: string, taskName: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.ProjectTask>, callback?: msRest.ServiceCallback<Models.ProjectTask>): Promise<Models.TasksUpdateResponse> {
return this.client.sendOperationRequest(
{
parameters,
groupName,
serviceName,
projectName,
taskName,
options
},
updateOperationSpec,
callback) as Promise<Models.TasksUpdateResponse>;
}
/**
* The tasks resource is a nested, proxy-only resource representing work performed by a DMS
* instance. This method cancels a task if it's currently queued or running.
* @summary Cancel a task
* @param groupName Name of the resource group
* @param serviceName Name of the service
* @param projectName Name of the project
* @param taskName Name of the Task
* @param [options] The optional parameters
* @returns Promise<Models.TasksCancelResponse>
*/
cancel(groupName: string, serviceName: string, projectName: string, taskName: string, options?: msRest.RequestOptionsBase): Promise<Models.TasksCancelResponse>;
/**
* @param groupName Name of the resource group
* @param serviceName Name of the service
* @param projectName Name of the project
* @param taskName Name of the Task
* @param callback The callback
*/
cancel(groupName: string, serviceName: string, projectName: string, taskName: string, callback: msRest.ServiceCallback<Models.ProjectTask>): void;
/**
* @param groupName Name of the resource group
* @param serviceName Name of the service
* @param projectName Name of the project
* @param taskName Name of the Task
* @param options The optional parameters
* @param callback The callback
*/
cancel(groupName: string, serviceName: string, projectName: string, taskName: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.ProjectTask>): void;
cancel(groupName: string, serviceName: string, projectName: string, taskName: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.ProjectTask>, callback?: msRest.ServiceCallback<Models.ProjectTask>): Promise<Models.TasksCancelResponse> {
return this.client.sendOperationRequest(
{
groupName,
serviceName,
projectName,
taskName,
options
},
cancelOperationSpec,
callback) as Promise<Models.TasksCancelResponse>;
}
/**
* The tasks resource is a nested, proxy-only resource representing work performed by a DMS
* instance. This method executes a command on a running task.
* @summary Execute a command on a task
* @param groupName Name of the resource group
* @param serviceName Name of the service
* @param projectName Name of the project
* @param taskName Name of the Task
* @param parameters Command to execute
* @param [options] The optional parameters
* @returns Promise<Models.TasksCommandResponse>
*/
command(groupName: string, serviceName: string, projectName: string, taskName: string, parameters: Models.CommandPropertiesUnion, options?: msRest.RequestOptionsBase): Promise<Models.TasksCommandResponse>;
/**
* @param groupName Name of the resource group
* @param serviceName Name of the service
* @param projectName Name of the project
* @param taskName Name of the Task
* @param parameters Command to execute
* @param callback The callback
*/
command(groupName: string, serviceName: string, projectName: string, taskName: string, parameters: Models.CommandPropertiesUnion, callback: msRest.ServiceCallback<Models.CommandPropertiesUnion>): void;
/**
* @param groupName Name of the resource group
* @param serviceName Name of the service
* @param projectName Name of the project
* @param taskName Name of the Task
* @param parameters Command to execute
* @param options The optional parameters
* @param callback The callback
*/
command(groupName: string, serviceName: string, projectName: string, taskName: string, parameters: Models.CommandPropertiesUnion, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.CommandPropertiesUnion>): void;
command(groupName: string, serviceName: string, projectName: string, taskName: string, parameters: Models.CommandPropertiesUnion, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.CommandPropertiesUnion>, callback?: msRest.ServiceCallback<Models.CommandPropertiesUnion>): Promise<Models.TasksCommandResponse> {
return this.client.sendOperationRequest(
{
groupName,
serviceName,
projectName,
taskName,
parameters,
options
},
commandOperationSpec,
callback) as Promise<Models.TasksCommandResponse>;
}
/**
* The services resource is the top-level resource that represents the Database Migration Service.
* This method returns a list of tasks owned by a service resource. Some tasks may have a status of
* Unknown, which indicates that an error occurred while querying the status of that task.
* @summary Get tasks in a service
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param [options] The optional parameters
* @returns Promise<Models.TasksListNextResponse>
*/
listNext(nextPageLink: string, options?: msRest.RequestOptionsBase): Promise<Models.TasksListNextResponse>;
/**
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param callback The callback
*/
listNext(nextPageLink: string, callback: msRest.ServiceCallback<Models.TaskList>): void;
/**
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param options The optional parameters
* @param callback The callback
*/
listNext(nextPageLink: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.TaskList>): void;
listNext(nextPageLink: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.TaskList>, callback?: msRest.ServiceCallback<Models.TaskList>): Promise<Models.TasksListNextResponse> {
return this.client.sendOperationRequest(
{
nextPageLink,
options
},
listNextOperationSpec,
callback) as Promise<Models.TasksListNextResponse>;
}
}
// Operation Specifications
const serializer = new msRest.Serializer(Mappers);
const listOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks",
urlParameters: [
Parameters.subscriptionId,
Parameters.groupName,
Parameters.serviceName,
Parameters.projectName
],
queryParameters: [
Parameters.apiVersion,
Parameters.taskType
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.TaskList
},
default: {
bodyMapper: Mappers.ApiError
}
},
serializer
};
const createOrUpdateOperationSpec: msRest.OperationSpec = {
httpMethod: "PUT",
path: "subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks/{taskName}",
urlParameters: [
Parameters.subscriptionId,
Parameters.groupName,
Parameters.serviceName,
Parameters.projectName,
Parameters.taskName
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
requestBody: {
parameterPath: "parameters",
mapper: {
...Mappers.ProjectTask,
required: true
}
},
responses: {
200: {
bodyMapper: Mappers.ProjectTask
},
201: {
bodyMapper: Mappers.ProjectTask
},
default: {
bodyMapper: Mappers.ApiError
}
},
serializer
};
const getOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks/{taskName}",
urlParameters: [
Parameters.subscriptionId,
Parameters.groupName,
Parameters.serviceName,
Parameters.projectName,
Parameters.taskName
],
queryParameters: [
Parameters.expand,
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.ProjectTask
},
default: {
bodyMapper: Mappers.ApiError
}
},
serializer
};
const deleteMethodOperationSpec: msRest.OperationSpec = {
httpMethod: "DELETE",
path: "subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks/{taskName}",
urlParameters: [
Parameters.subscriptionId,
Parameters.groupName,
Parameters.serviceName,
Parameters.projectName,
Parameters.taskName
],
queryParameters: [
Parameters.deleteRunningTasks,
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {},
204: {},
default: {
bodyMapper: Mappers.ApiError
}
},
serializer
};
const updateOperationSpec: msRest.OperationSpec = {
httpMethod: "PATCH",
path: "subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks/{taskName}",
urlParameters: [
Parameters.subscriptionId,
Parameters.groupName,
Parameters.serviceName,
Parameters.projectName,
Parameters.taskName
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
requestBody: {
parameterPath: "parameters",
mapper: {
...Mappers.ProjectTask,
required: true
}
},
responses: {
200: {
bodyMapper: Mappers.ProjectTask
},
default: {
bodyMapper: Mappers.ApiError
}
},
serializer
};
const cancelOperationSpec: msRest.OperationSpec = {
httpMethod: "POST",
path: "subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks/{taskName}/cancel",
urlParameters: [
Parameters.subscriptionId,
Parameters.groupName,
Parameters.serviceName,
Parameters.projectName,
Parameters.taskName
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.ProjectTask
},
default: {
bodyMapper: Mappers.ApiError
}
},
serializer
};
const commandOperationSpec: msRest.OperationSpec = {
httpMethod: "POST",
path: "subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/projects/{projectName}/tasks/{taskName}/command",
urlParameters: [
Parameters.subscriptionId,
Parameters.groupName,
Parameters.serviceName,
Parameters.projectName,
Parameters.taskName
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
requestBody: {
parameterPath: "parameters",
mapper: {
...Mappers.CommandProperties,
required: true
}
},
responses: {
200: {
bodyMapper: Mappers.CommandProperties
},
default: {
bodyMapper: Mappers.ApiError
}
},
serializer
};
const listNextOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
baseUrl: "https://management.azure.com",
path: "{nextLink}",
urlParameters: [
Parameters.nextPageLink
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.TaskList
},
default: {
bodyMapper: Mappers.ApiError
}
},
serializer
}; | the_stack |
import {
API,
APIEvent,
Characteristic,
DynamicPlatformPlugin,
Logger,
PlatformAccessory,
Service,
} from 'homebridge';
import { PLATFORM_NAME, PLUGIN_NAME } from './settings';
import * as path from 'path';
import { PermitJoinAccessory } from './accessories/permit-join-accessory';
import { sleep } from './utils/sleep';
import { parseModelName } from './utils/parse-model-name';
import { ZigBeeAccessory } from './accessories/zig-bee-accessory';
import {
createAccessoryInstance,
isAccessorySupported,
registerAccessoryFactory,
} from './registry';
import { ZigBeeClient } from './zigbee/zig-bee-client';
import { TouchlinkAccessory } from './accessories/touchlink-accessory';
import {
DeviceAnnouncePayload,
DeviceInterviewPayload,
DeviceJoinedPayload,
DeviceLeavePayload,
MessagePayload,
} from 'zigbee-herdsman/dist/controller/events';
import { Device } from 'zigbee-herdsman/dist/controller/model';
import { HttpServer } from './web/api/http-server';
import { DeviceState } from './zigbee/types';
import * as fs from 'fs';
import { ZigBeeNTPlatformConfig } from './types';
import { ConfigurableAccessory } from './accessories/configurable-accessory';
import { difference } from 'lodash';
const PERMIT_JOIN_ACCESSORY_NAME = 'zigbee:permit-join';
const TOUCH_LINK_ACCESSORY_NAME = 'zigbee:touchlink';
const DEFAULT_PAN_ID = 0x1a62;
export class ZigbeeNTHomebridgePlatform implements DynamicPlatformPlugin {
public readonly Service: typeof Service = this.api.hap.Service;
public readonly Characteristic: typeof Characteristic = this.api.hap.Characteristic;
private readonly accessories: Map<string, PlatformAccessory>;
private readonly homekitAccessories: Map<string, ZigBeeAccessory>;
private permitJoinAccessory: PermitJoinAccessory;
public readonly PlatformAccessory: typeof PlatformAccessory;
private client: ZigBeeClient;
private httpServer: HttpServer;
private touchLinkAccessory: TouchlinkAccessory;
constructor(
public readonly log: Logger,
public readonly config: ZigBeeNTPlatformConfig,
public readonly api: API
) {
const packageJson = JSON.parse(
fs.readFileSync(`${path.resolve(__dirname, '../package.json')}`, 'utf-8')
);
this.accessories = new Map<string, PlatformAccessory>();
this.homekitAccessories = new Map<string, ZigBeeAccessory>();
this.permitJoinAccessory = null;
this.PlatformAccessory = this.api.platformAccessory;
this.log.info(
`Initializing platform: ${this.config.name} - v${packageJson.version} (API v${api.version})`
);
if (config.devices) {
config.devices.forEach(config => {
this.log.info(
`Registering custom configured device ${config.manufacturer} - ${config.models.join(
', '
)}`
);
registerAccessoryFactory(
config.manufacturer,
config.models,
(
platform: ZigbeeNTHomebridgePlatform,
accessory: PlatformAccessory,
client: ZigBeeClient,
device: Device
) => new ConfigurableAccessory(platform, accessory, client, device, config.services)
);
});
}
this.api.on(APIEvent.DID_FINISH_LAUNCHING, () => this.startZigBee());
this.api.on(APIEvent.SHUTDOWN, () => this.stopZigbee());
}
get zigBeeClient(): ZigBeeClient {
return this.client;
}
public async startZigBee(): Promise<void> {
// Create client
this.client = new ZigBeeClient(this.log, this.config.customDeviceSettings);
const panId =
this.config.panId && this.config.panId < 0xffff ? this.config.panId : DEFAULT_PAN_ID;
const database = this.config.database || path.join(this.api.user.storagePath(), './zigBee.db');
await this.client.start({
channel: this.config.channel,
secondaryChannel: this.config.secondaryChannel,
port: this.config.port,
panId,
database,
adapter: this.config.adapter,
});
this.zigBeeClient.on('deviceAnnounce', (message: DeviceAnnouncePayload) =>
this.handleDeviceAnnounce(message)
);
this.zigBeeClient.on('deviceInterview', (message: DeviceInterviewPayload) =>
this.handleZigBeeDevInterview(message)
);
this.zigBeeClient.on('deviceJoined', (message: DeviceJoinedPayload) =>
this.handleZigBeeDevJoined(message)
);
this.zigBeeClient.on('deviceLeave', (message: DeviceLeavePayload) =>
this.handleZigBeeDevLeaving(message)
);
this.zigBeeClient.on('message', (message: MessagePayload) => this.handleZigBeeMessage(message));
await this.handleZigBeeReady();
}
async stopZigbee(): Promise<void> {
try {
this.log.info('Stopping zigbee service');
await this.zigBeeClient?.stop();
this.log.info('Stopping http server');
await this.httpServer?.stop();
this.log.info('Successfully stopped ZigBee service');
} catch (e) {
this.log.error('Error while stopping ZigBee service', e);
}
}
/**
* This function is invoked when homebridge restores cached accessories from disk at startup.
* It should be used to setup event handlers for characteristics and update respective values.
*/
configureAccessory(accessory: PlatformAccessory): void {
this.log.info('Loading accessory from cache:', accessory.displayName);
this.accessories.set(accessory.UUID, accessory);
}
async handleZigBeeDevInterview(message: DeviceInterviewPayload): Promise<void> {
const ieeeAddr = message.device.ieeeAddr;
const status = message.status;
switch (status) {
case 'failed':
this.log.error(
`Interview progress ${status} for device ${this.getDeviceFriendlyName(ieeeAddr)}`
);
break;
case 'started':
this.log.info(
`Interview progress ${status} for device ${this.getDeviceFriendlyName(ieeeAddr)}`
);
break;
case 'successful':
this.log.info(
`Successfully interviewed device: ${message.device.manufacturerName} - ${message.device.modelID}`
);
await this.handleDeviceUpdate(message.device);
}
}
async handleZigBeeDevJoined(message: DeviceJoinedPayload): Promise<boolean> {
this.log.info(
`Device joined, Adding ${this.getDeviceFriendlyName(message.device.ieeeAddr)} (${
message.device.manufacturerName
} - ${message.device.modelID})`
);
return await this.handleDeviceUpdate(message.device);
}
private async handleDeviceUpdate(device: Device): Promise<boolean> {
// Ignore if the device exists
const accessory = this.getHomekitAccessoryByIeeeAddr(device.ieeeAddr);
if (!accessory) {
// Wait a little bit for a database sync
await sleep(1500);
const uuid = await this.initDevice(device);
return uuid !== null;
} else {
this.log.debug(
`Not initializing device ${this.getDeviceFriendlyName(
device.ieeeAddr
)}: already mapped in Homebridge`
);
accessory.internalUpdate({});
}
return false;
}
generateUUID(ieeeAddr: string): string {
return this.api.hap.uuid.generate(ieeeAddr);
}
async handleZigBeeDevLeaving(message: DeviceLeavePayload): Promise<boolean> {
const ieeeAddr = message.ieeeAddr;
// Stop permit join
await this.permitJoinAccessory.setPermitJoin(false);
this.log.info(`Device announced leaving and will be removed, id: ${ieeeAddr}`);
return await this.unpairDevice(ieeeAddr);
}
async handleZigBeeReady(): Promise<void> {
const info: Device = this.zigBeeClient.getCoordinator();
this.log.info(`ZigBee platform initialized @ ${info.ieeeAddr}`);
// Set led indicator
await this.zigBeeClient.toggleLed(!this.config.disableLed);
// Init permit join accessory
await this.initPermitJoinAccessory();
// Init switch to reset devices through Touchlink feature
this.initTouchLinkAccessory();
// Init devices
const paired = (
await Promise.all(
this.zigBeeClient.getAllPairedDevices().map(device => this.initDevice(device))
)
).filter(uuid => uuid !== null);
paired.push(this.permitJoinAccessory.accessory.UUID);
paired.push(this.touchLinkAccessory.accessory.UUID);
const missing = difference([...this.accessories.keys()], paired);
missing.forEach(uuid => {
this.api.unregisterPlatformAccessories(PLUGIN_NAME, PLATFORM_NAME, [
this.accessories.get(uuid),
]);
this.accessories.delete(uuid);
this.homekitAccessories.delete(uuid);
});
if (this.config.disableHttpServer !== true) {
try {
this.httpServer = new HttpServer(this.config.httpPort);
this.httpServer.start(this);
} catch (e) {
this.log.error('WEB UI failed to start.', e);
}
} else {
this.log.info('WEB UI disabled.');
}
}
public getAccessoryByIeeeAddr(ieeeAddr: string): PlatformAccessory {
return this.accessories.get(this.generateUUID(ieeeAddr));
}
public getAccessoryByUUID(uuid: string): PlatformAccessory {
return this.accessories.get(uuid);
}
public getHomekitAccessoryByIeeeAddr(ieeeAddr: string): ZigBeeAccessory {
return this.homekitAccessories.get(this.generateUUID(ieeeAddr));
}
public getHomekitAccessoryByUUID(uuid: string) {
return this.homekitAccessories.get(uuid);
}
private async initDevice(device: Device): Promise<string> {
const model = parseModelName(device.modelID);
const manufacturer = device.manufacturerName;
const ieeeAddr = device.ieeeAddr;
const deviceName: string = `${this.getDeviceFriendlyName(
ieeeAddr
)} - ${model} - ${manufacturer}`;
this.log.info(`Initializing ZigBee device: ${deviceName}`);
if (!isAccessorySupported(device)) {
this.log.info(
`Unsupported ZigBee device: ${this.getDeviceFriendlyName(
ieeeAddr
)} - ${model} - ${manufacturer}`
);
return null;
} else {
try {
const accessory = this.createHapAccessory(ieeeAddr);
const homeKitAccessory = createAccessoryInstance(this, accessory, this.client, device);
if (homeKitAccessory) {
this.log.info('Registered device:', homeKitAccessory.friendlyName, manufacturer, model);
await homeKitAccessory.initialize(); // init services
this.homekitAccessories.set(accessory.UUID, homeKitAccessory);
return accessory.UUID;
}
} catch (e) {
this.log.error(`Error initializing device ${deviceName}`, e);
}
return null;
}
}
private async mountDevice(ieeeAddr: string): Promise<void> {
try {
const UUID = this.generateUUID(ieeeAddr);
const zigBeeAccessory = this.getHomekitAccessoryByUUID(UUID);
if (zigBeeAccessory) {
return await zigBeeAccessory.onDeviceMount();
}
} catch (error) {
this.log.warn(
`Unable to initialize device ${this.getDeviceFriendlyName(ieeeAddr)}, ` +
'try to remove it and add it again.\n'
);
this.log.warn('Reason:', error);
}
}
private async initPermitJoinAccessory() {
try {
const accessory = this.createHapAccessory(PERMIT_JOIN_ACCESSORY_NAME);
this.permitJoinAccessory = new PermitJoinAccessory(this, accessory, this.zigBeeClient);
this.log.info('PermitJoin accessory successfully registered');
if (this.config.enablePermitJoin === true) {
await this.permitJoinAccessory.setPermitJoin(true);
}
} catch (e) {
this.log.error('PermitJoin accessory not registered: ', e);
}
}
private initTouchLinkAccessory() {
try {
const accessory = this.createHapAccessory(TOUCH_LINK_ACCESSORY_NAME);
this.touchLinkAccessory = new TouchlinkAccessory(this, accessory, this.zigBeeClient);
this.log.info('TouchLink accessory successfully registered');
} catch (e) {
this.log.error('TouchLink accessory not registered: ', e);
}
}
private createHapAccessory(name: string) {
const uuid = this.generateUUID(name);
const existingAccessory = this.getAccessoryByUUID(uuid);
if (existingAccessory) {
this.log.info(`Reuse accessory from cache with uuid ${uuid} and name ${name}`);
return existingAccessory;
} else {
const accessory = new this.PlatformAccessory(name, uuid);
this.log.warn(`Registering new accessory with uuid ${uuid} and name ${name}`);
this.api.registerPlatformAccessories(PLUGIN_NAME, PLATFORM_NAME, [accessory]);
this.accessories.set(uuid, accessory);
return accessory;
}
}
private removeAccessory(ieeeAddr: string) {
const uuid = this.generateUUID(ieeeAddr);
const accessory = this.accessories.get(uuid);
if (accessory) {
this.accessories.delete(uuid);
this.homekitAccessories.delete(uuid);
}
}
public async unpairDevice(ieeeAddr: string): Promise<boolean> {
const result = await this.zigBeeClient.unpairDevice(ieeeAddr);
if (result) {
this.log.info('Device has been unpaired:', ieeeAddr);
const accessory = this.getAccessoryByIeeeAddr(ieeeAddr);
if (accessory) {
this.api.unregisterPlatformAccessories(PLUGIN_NAME, PLATFORM_NAME, [accessory]);
this.removeAccessory(ieeeAddr);
return true;
}
} else {
this.log.error('Device has NOT been unpaired:', ieeeAddr);
}
return false;
}
private async handleDeviceAnnounce(message: DeviceAnnouncePayload): Promise<void> {
const ieeeAddr = message.device.ieeeAddr;
this.log.info(
`Device announce: ${this.getDeviceFriendlyName(ieeeAddr)} (${
message.device.manufacturerName
} - ${message.device.modelID})`
);
if (message.device.interviewCompleted) {
let uuid = this.getAccessoryByIeeeAddr(ieeeAddr)?.UUID;
if (!uuid) {
// Wait a little bit for a database sync
await sleep(1500);
uuid = await this.initDevice(message.device);
if (!uuid) {
this.log.warn(`Device not recognized: `, message);
return;
}
}
return this.getHomekitAccessoryByUUID(uuid).onDeviceMount();
} else {
this.log.warn(
`Not initializing device ${this.getDeviceFriendlyName(
ieeeAddr
)}: interview process still not completed`
);
}
}
private handleZigBeeMessage(message: MessagePayload) {
this.log.debug(
`Zigbee message from ${this.getDeviceFriendlyName(message.device.ieeeAddr)}`,
message
);
const zigBeeAccessory = this.getHomekitAccessoryByIeeeAddr(message.device.ieeeAddr);
if (zigBeeAccessory) {
this.client.decodeMessage(message, (ieeeAddr: string, state: DeviceState) => {
this.log.debug(`Decoded state from incoming message`, state);
zigBeeAccessory.internalUpdate(state);
}); // if the message is decoded, it will call the statePublisher function
}
}
public getDeviceFriendlyName(ieeeAddr: string): string {
return (
this.config.customDeviceSettings?.find(config => config.ieeeAddr === ieeeAddr)
?.friendlyName || ieeeAddr
);
}
public isDeviceOnline(ieeeAddr: string): boolean {
const zigBeeAccessory: ZigBeeAccessory = this.getHomekitAccessoryByIeeeAddr(ieeeAddr);
if (zigBeeAccessory) {
return zigBeeAccessory.isOnline;
}
return false;
}
} | the_stack |
import { AbortController } from "@azure/abort-controller";
import {
BlobServiceClient,
newPipeline,
StorageSharedKeyCredential
} from "@azure/storage-blob";
import assert = require("assert");
import * as fs from "fs";
import { join } from "path";
import { PassThrough } from "stream";
import { configLogger } from "../../src/common/Logger";
import BlobTestServerFactory from "../BlobTestServerFactory";
import {
createRandomLocalFile,
EMULATOR_ACCOUNT_KEY,
EMULATOR_ACCOUNT_NAME,
getUniqueName,
readStreamToLocalFile,
rmRecursive
} from "../testutils";
// Set true to enable debug log
configLogger(false);
// tslint:disable:no-empty
describe("BlockBlobHighlevel", () => {
const factory = new BlobTestServerFactory();
// Loose model to bypass if-match header used by download retry
const server = factory.createServer(true);
const baseURL = `http://${server.config.host}:${server.config.port}/devstoreaccount1`;
const serviceClient = new BlobServiceClient(
baseURL,
newPipeline(
new StorageSharedKeyCredential(
EMULATOR_ACCOUNT_NAME,
EMULATOR_ACCOUNT_KEY
),
{
retryOptions: { maxTries: 1 },
// Make sure socket is closed once the operation is done.
keepAliveOptions: { enable: false }
}
)
);
let containerName = getUniqueName("container");
let containerClient = serviceClient.getContainerClient(containerName);
let blobName = getUniqueName("blob");
let blobClient = containerClient.getBlobClient(blobName);
let blockBlobClient = blobClient.getBlockBlobClient();
let tempFileSmall: string;
let tempFileSmallLength: number;
let tempFileLarge: string;
let tempFileLargeLength: number;
const tempFolderPath = "temp";
const timeoutForLargeFileUploadingTest = 20 * 60 * 1000;
beforeEach(async () => {
containerName = getUniqueName("container");
containerClient = serviceClient.getContainerClient(containerName);
await containerClient.create();
blobName = getUniqueName("blob");
blobClient = containerClient.getBlobClient(blobName);
blockBlobClient = blobClient.getBlockBlobClient();
});
afterEach(async function () {
await containerClient.delete();
});
before(async () => {
await server.start();
if (!fs.existsSync(tempFolderPath)) {
fs.mkdirSync(tempFolderPath);
}
tempFileLarge = await createRandomLocalFile(
tempFolderPath,
257,
1024 * 1024
);
tempFileLargeLength = 257 * 1024 * 1024;
tempFileSmall = await createRandomLocalFile(
tempFolderPath,
15,
1024 * 1024
);
tempFileSmallLength = 15 * 1024 * 1024;
});
after(async () => {
// TODO: Find out reason of slow close
await server.close();
fs.unlinkSync(tempFileLarge);
fs.unlinkSync(tempFileSmall);
await rmRecursive(tempFolderPath);
// TODO: Find out reason of slow clean up
await server.clean();
});
it("uploadFile should success when blob >= BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES @loki @sql", async () => {
const result = await blockBlobClient.uploadFile(tempFileLarge, {
blockSize: 4 * 1024 * 1024,
concurrency: 20
});
assert.equal(
result._response.request.headers.get("x-ms-client-request-id"),
result.clientRequestId
);
const downloadResponse = await blockBlobClient.download(0);
const downloadedFile = join(tempFolderPath, getUniqueName("downloadfile."));
await readStreamToLocalFile(
downloadResponse.readableStreamBody!,
downloadedFile
);
const downloadedData = await fs.readFileSync(downloadedFile);
const uploadedData = await fs.readFileSync(tempFileLarge);
fs.unlinkSync(downloadedFile);
assert.ok(downloadedData.equals(uploadedData));
}).timeout(timeoutForLargeFileUploadingTest);
it("uploadFile should success when blob < BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES @loki @sql", async () => {
await blockBlobClient.uploadFile(tempFileSmall, {
blockSize: 4 * 1024 * 1024,
concurrency: 20
});
const downloadResponse = await blockBlobClient.download(0);
const downloadedFile = join(tempFolderPath, getUniqueName("downloadfile."));
await readStreamToLocalFile(
downloadResponse.readableStreamBody!,
downloadedFile
);
const downloadedData = await fs.readFileSync(downloadedFile);
const uploadedData = await fs.readFileSync(tempFileSmall);
fs.unlinkSync(downloadedFile);
assert.ok(downloadedData.equals(uploadedData));
});
// tslint:disable-next-line:max-line-length
it("uploadFile should success when blob < BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES and configured maxSingleShotSize @loki @sql", async () => {
await blockBlobClient.uploadFile(tempFileSmall, {
maxSingleShotSize: 0
});
const downloadResponse = await blockBlobClient.download(0);
const downloadedFile = join(tempFolderPath, getUniqueName("downloadfile."));
await readStreamToLocalFile(
downloadResponse.readableStreamBody!,
downloadedFile
);
const downloadedData = await fs.readFileSync(downloadedFile);
const uploadedData = await fs.readFileSync(tempFileSmall);
fs.unlinkSync(downloadedFile);
assert.ok(downloadedData.equals(uploadedData));
});
// tslint:disable-next-line: max-line-length
it("uploadFile should update progress when blob >= BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES @loki @sql", async () => {
let eventTriggered = false;
const aborter = new AbortController();
try {
await blockBlobClient.uploadFile(tempFileLarge, {
blockSize: 4 * 1024 * 1024,
concurrency: 20,
onProgress: (ev: any) => {
assert.ok(ev.loadedBytes);
eventTriggered = true;
aborter.abort();
}
});
} catch (err) {}
assert.ok(eventTriggered);
}).timeout(timeoutForLargeFileUploadingTest);
// tslint:disable-next-line: max-line-length
it("uploadFile should update progress when blob < BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES @loki @sql", async () => {
let eventTriggered = false;
const aborter = new AbortController();
try {
await blockBlobClient.uploadFile(tempFileSmall, {
blockSize: 4 * 1024 * 1024,
concurrency: 20,
onProgress: (ev: any) => {
assert.ok(ev.loadedBytes);
eventTriggered = true;
aborter.abort();
}
});
} catch (err) {}
assert.ok(eventTriggered);
});
it("uploadStream should success @loki @sql", async () => {
const rs = fs.createReadStream(tempFileLarge);
const result = await blockBlobClient.uploadStream(rs, 4 * 1024 * 1024, 20);
assert.equal(
result._response.request.headers.get("x-ms-client-request-id"),
result.clientRequestId
);
const downloadResponse = await blockBlobClient.download(0);
const downloadFilePath = join(
tempFolderPath,
getUniqueName("downloadFile")
);
await readStreamToLocalFile(
downloadResponse.readableStreamBody!,
downloadFilePath
);
const downloadedBuffer = fs.readFileSync(downloadFilePath);
const uploadedBuffer = fs.readFileSync(tempFileLarge);
assert.ok(uploadedBuffer.equals(downloadedBuffer));
fs.unlinkSync(downloadFilePath);
});
it("uploadStream should success for tiny buffers @loki @sql", async () => {
const buf = Buffer.from([0x62, 0x75, 0x66, 0x66, 0x65, 0x72]);
const bufferStream = new PassThrough();
bufferStream.end(buf);
await blockBlobClient.uploadStream(bufferStream, 4 * 1024 * 1024, 20);
const downloadResponse = await blockBlobClient.download(0);
const downloadFilePath = join(
tempFolderPath,
getUniqueName("downloadFile")
);
await readStreamToLocalFile(
downloadResponse.readableStreamBody!,
downloadFilePath
);
const downloadedBuffer = fs.readFileSync(downloadFilePath);
assert.ok(buf.equals(downloadedBuffer));
fs.unlinkSync(downloadFilePath);
});
it("uploadStream should abort @loki @sql", async () => {
const rs = fs.createReadStream(tempFileLarge);
try {
await blockBlobClient.uploadStream(rs, 4 * 1024 * 1024, 20, {
abortSignal: AbortController.timeout(1)
});
assert.fail();
} catch (err) {
assert.ok((err.message as string).toLowerCase().includes("abort"));
}
}).timeout(timeoutForLargeFileUploadingTest);
it("uploadStream should update progress event @loki @sql", async () => {
const rs = fs.createReadStream(tempFileLarge);
let eventTriggered = false;
await blockBlobClient.uploadStream(rs, 4 * 1024 * 1024, 20, {
onProgress: (ev: any) => {
assert.ok(ev.loadedBytes);
eventTriggered = true;
}
});
assert.ok(eventTriggered);
}).timeout(timeoutForLargeFileUploadingTest);
it("downloadToBuffer should success @loki @sql", async () => {
const rs = fs.createReadStream(tempFileLarge);
const result = await blockBlobClient.uploadStream(rs, 4 * 1024 * 1024, 20);
assert.equal(
result._response.request.headers.get("x-ms-client-request-id"),
result.clientRequestId
);
const buf = Buffer.alloc(tempFileLargeLength);
await blockBlobClient.downloadToBuffer(buf, 0, undefined, {
blockSize: 4 * 1024 * 1024,
maxRetryRequestsPerBlock: 5,
concurrency: 20
});
const localFileContent = fs.readFileSync(tempFileLarge);
assert.ok(localFileContent.equals(buf));
}).timeout(timeoutForLargeFileUploadingTest);
it("downloadToBuffer should update progress event @loki @sql", async () => {
const rs = fs.createReadStream(tempFileSmall);
await blockBlobClient.uploadStream(rs, 4 * 1024 * 1024, 10);
let eventTriggered = false;
const buf = Buffer.alloc(tempFileSmallLength);
const aborter = new AbortController();
try {
await blockBlobClient.downloadToBuffer(buf, 0, undefined, {
blockSize: 1 * 1024,
maxRetryRequestsPerBlock: 5,
concurrency: 1,
onProgress: () => {
eventTriggered = true;
aborter.abort();
}
});
} catch (err) {}
assert.ok(eventTriggered);
}).timeout(timeoutForLargeFileUploadingTest);
it("blobclient.download should success when internal stream unexpected ends at the stream end @loki @sql", async () => {
await blockBlobClient.uploadFile(tempFileSmall, {
blockSize: 4 * 1024 * 1024,
concurrency: 20
});
let retirableReadableStreamOptions: any;
const downloadResponse = await blockBlobClient.download(0, undefined, {
conditions: {
// modifiedAccessConditions: {
// ifMatch: uploadResponse.eTag
// }
},
maxRetryRequests: 1,
onProgress: (ev) => {
if (ev.loadedBytes >= tempFileSmallLength) {
retirableReadableStreamOptions.doInjectErrorOnce = true;
}
}
});
assert.equal(
downloadResponse._response.request.headers.get("x-ms-client-request-id"),
downloadResponse.clientRequestId
);
retirableReadableStreamOptions = (downloadResponse.readableStreamBody! as any)
.options;
const downloadedFile = join(tempFolderPath, getUniqueName("downloadfile."));
await readStreamToLocalFile(
downloadResponse.readableStreamBody!,
downloadedFile
);
const downloadedData = await fs.readFileSync(downloadedFile);
const uploadedData = await fs.readFileSync(tempFileSmall);
fs.unlinkSync(downloadedFile);
assert.ok(downloadedData.equals(uploadedData));
});
// tslint:disable-next-line: max-line-length
it("blobclient.download should download full data successfully when internal stream unexpected ends @loki @sql", async () => {
await blockBlobClient.uploadFile(tempFileSmall, {
blockSize: 4 * 1024 * 1024,
concurrency: 20
});
let retirableReadableStreamOptions: any;
let injectedErrors = 0;
const downloadResponse = await blockBlobClient.download(0, undefined, {
conditions: {
// modifiedAccessConditions: {
// ifMatch: uploadResponse.eTag
// }
},
maxRetryRequests: 3,
onProgress: () => {
if (injectedErrors++ < 3) {
retirableReadableStreamOptions.doInjectErrorOnce = true;
}
}
});
retirableReadableStreamOptions = (downloadResponse.readableStreamBody! as any)
.options;
const downloadedFile = join(tempFolderPath, getUniqueName("downloadfile."));
await readStreamToLocalFile(
downloadResponse.readableStreamBody!,
downloadedFile
);
const downloadedData = await fs.readFileSync(downloadedFile);
const uploadedData = await fs.readFileSync(tempFileSmall);
fs.unlinkSync(downloadedFile);
assert.ok(downloadedData.equals(uploadedData));
});
it("blobclient.download should download partial data when internal stream unexpected ends @loki @sql", async () => {
await blockBlobClient.uploadFile(tempFileSmall, {
blockSize: 4 * 1024 * 1024,
concurrency: 20
});
const partialSize = 500 * 1024;
let retirableReadableStreamOptions: any;
let injectedErrors = 0;
const downloadResponse = await blockBlobClient.download(0, partialSize, {
conditions: {
// modifiedAccessConditions: {
// ifMatch: uploadResponse.eTag
// }
},
maxRetryRequests: 3,
onProgress: () => {
if (injectedErrors++ < 3) {
retirableReadableStreamOptions.doInjectErrorOnce = true;
}
}
});
retirableReadableStreamOptions = (downloadResponse.readableStreamBody! as any)
.options;
const downloadedFile = join(tempFolderPath, getUniqueName("downloadfile."));
await readStreamToLocalFile(
downloadResponse.readableStreamBody!,
downloadedFile
);
const downloadedData = await fs.readFileSync(downloadedFile);
const uploadedData = await fs.readFileSync(tempFileSmall);
fs.unlinkSync(downloadedFile);
assert.ok(
downloadedData
.slice(0, partialSize)
.equals(uploadedData.slice(0, partialSize))
);
});
it("blobclient.download should download data failed when exceeding max stream retry requests @loki @sql", async () => {
await blockBlobClient.uploadFile(tempFileSmall, {
blockSize: 4 * 1024 * 1024,
concurrency: 20
});
const downloadedFile = join(tempFolderPath, getUniqueName("downloadfile."));
let retirableReadableStreamOptions: any;
let injectedErrors = 0;
let expectedError = false;
try {
const downloadResponse = await blockBlobClient.download(0, undefined, {
conditions: {
// modifiedAccessConditions: {
// ifMatch: uploadResponse.eTag
// }
},
maxRetryRequests: 0,
onProgress: () => {
if (injectedErrors++ < 1) {
retirableReadableStreamOptions.doInjectErrorOnce = true;
}
}
});
retirableReadableStreamOptions = (downloadResponse.readableStreamBody! as any)
.options;
await readStreamToLocalFile(
downloadResponse.readableStreamBody!,
downloadedFile
);
} catch (error) {
expectedError = true;
}
assert.ok(expectedError);
rmRecursive(downloadedFile);
});
}); | the_stack |
import Logger from "@supercollider/logger";
import { packBundle, packMessage, unpackMessage } from "@supercollider/osc";
import { spawn } from "child_process";
import * as dgram from "dgram";
import { EventEmitter } from "events";
import _ from "lodash";
import path from "path";
import { IDisposable, Observable, Subject } from "rx";
import SendOSC from "./internals/SendOSC";
import Store from "./internals/Store";
import { defaults, resolveOptions, ServerArgs, ServerOptions } from "./options";
import { MsgType, OscType } from "./osc-types";
import { CallAndResponse, notify } from "./osc/msg";
import { parseMessage } from "./osc/utils";
import ServerState from "./ServerState";
interface ServerObservers {
// string | OSCMsg
[name: string]: Observable<any> | IDisposable;
}
/**
* Server - starts a SuperCollider synthesis server (scsynth)
* as a child process. Enables OSC communication, subscribe to process events,
* send call and response OSC messages.
*
* SuperCollider comes with an executable called scsynth
* which can be communicated with via OSC.
*
* To send raw OSC messages:
* ```js
* server.send.msg('/s_new', ['defName', 440])
* ```
*
* Raw OSC responses can be subscribed to:
* ```js
* server.receive.subscribe(function(msg) {
* console.log(msg);
* });
* ```
*/
export default class Server extends EventEmitter {
options: ServerOptions;
address: string;
/**
* The process id that nodejs spawn() returns
*/
process: any;
isRunning: boolean;
/**
* Supports `server.send.msg()` and `server.send.bundle()`
*
* You can also subscribe to it and get the OSC messages
* and bundles that are being sent echoed to you for
* debugging purposes.
*/
send: SendOSC;
/**
* A subscribeable stream of OSC events received.
*/
receive: Subject<MsgType>;
/**
* A subscribeable stream of STDOUT printed by the scsynth process.
*/
stdout: Subject<string>;
/**
* A subscribeable stream of events related to the scsynth process.
* Used internally.
*/
processEvents: Subject<string | Error>;
/**
* Holds the mutable server state
* including allocators and the node state watcher.
* If a parent stateStore is supplied then it will store within that.
*/
state: ServerState;
/**
* The logger used to print messages to the console.
*/
log: Logger;
private osc?: dgram.Socket;
private _serverObservers: ServerObservers;
/**
* @param stateStore - optional parent Store for allocators and node watchers
*/
constructor(options: ServerArgs = {}, stateStore?: Store) {
super();
this.options = resolveOptions(options);
this.address = this.options.host + ":" + this.options.serverPort;
this.process = null;
this.isRunning = false;
this.send = new SendOSC();
this.receive = new Subject();
this.stdout = new Subject();
this.processEvents = new Subject();
this.log = this._initLogger();
this._initEmitter();
this._initSender();
this._serverObservers = {};
this.state = new ServerState(this, stateStore);
}
private _initLogger(): Logger {
// scsynth.server options this Server.options
const log = new Logger(this.options.debug, this.options.echo, this.options.log);
this.send.subscribe(event => {
// will be a type:msg or type:bundle
// if args has a type: Buffer in it then compress that
let out = JSON.stringify(
event.payload || event,
(k: string, v: any): any => {
if (k === "data" && _.isArray(v)) {
return _.reduce(v, (memo: string, n: number): string => memo + n.toString(16), "");
}
return v;
},
2,
);
if (!this.osc) {
out = "[NOT CONNECTED] " + out;
}
log.sendosc(out);
});
this.receive.subscribe(
o => {
log.rcvosc(o);
// log all /fail responses as error
if (o[0] === "/fail") {
log.err(o);
}
},
(err: Error) => log.err(err),
);
this.stdout.subscribe(
o => {
// scsynth doesn't send ERROR messages to stderr
// if ERROR or FAILURE in output then redirect as though it did
// so it shows up in logs
if (o.match(/ERROR|FAILURE/)) {
log.stderr(o);
} else {
log.stdout(o);
}
},
(err: Error) => log.stderr(err),
);
this.processEvents.subscribe(
o => log.dbug(o),
(err: Error) => log.err(err),
);
return log;
}
/**
* Event Emitter emits:
* 'out' - stdout text from the server
* 'error' - stderr text from the server or OSC error messages
* 'exit' - when server exits
* 'close' - when server closes the UDP connection
* 'OSC' - OSC responses from the server
*
* Emit signals are deprecated and will be removed in 1.0
* TODO: remove
*
* @deprecated
*
* Instead use ```server.{channel}.subscribe((event) => { })```
*
*/
private _initEmitter(): void {
this.receive.subscribe(msg => {
this.emit("OSC", msg);
});
this.processEvents.subscribe(
() => {},
err => this.emit("exit", err),
);
this.stdout.subscribe(
out => this.emit("out", out),
out => this.emit("stderr", out),
);
}
private _initSender(): void {
this.send.on("msg", msg => {
if (this.osc) {
const buf = packMessage(msg);
this.osc.send(buf, 0, buf.length, parseInt(this.options.serverPort), this.options.host);
}
});
this.send.on("bundle", bundle => {
if (this.osc) {
const buf = packBundle(bundle);
this.osc.send(buf, 0, buf.length, parseInt(this.options.serverPort), this.options.host);
}
});
}
/**
* Format the command line args for scsynth.
*
* The args built using the options supplied to `Server(options)` or `sc.server.boot(options)`
*
* ```js
* sc.server.boot({device: 'Soundflower (2ch)'});
* sc.server.boot({serverPort: '11211'});
* ```
*
* Supported arguments:
*
* numAudioBusChannels
* numControlBusChannels
* numInputBusChannels
* numOutputBusChannels
* numBuffers
* maxNodes
* maxSynthDefs
* blockSize
* hardwareBufferSize
* memSize
* numRGens - max random generators
* numWireBufs
* sampleRate
* loadDefs - (0 or 1)
* inputStreamsEnabled - "01100" means only the 2nd and 3rd input streams
* on the device will be enabled
* outputStreamsEnabled,
* device - name of hardware device
* or array of names for [inputDevice, outputDevice]
* verbosity: 0 1 2
* restrictedPath
* ugenPluginsPath
* password - for TCP logins open to the internet
* maxLogins - max users that may login
*
* Arbitrary arguments can be passed in as options.commandLineArgs
* which is an array of strings that will be space-concatenated
* and correctly shell-escaped.
*
* Host is currently ignored: it is always local on the same machine.
*
* See ServerOptions documentation: http://danielnouri.org/docs/SuperColliderHelp/ServerArchitecture/ServerOptions.html
*
* @return List of non-default args
*/
args(): string[] {
const flagMap = {
numAudioBusChannels: "-a",
numControlBusChannels: "-c",
numInputBusChannels: "-i",
numOutputBusChannels: "-o",
numBuffers: "-b",
maxNodes: "-n",
maxSynthDefs: "-d",
blockSize: "-z",
hardwareBufferSize: "-Z",
memSize: "-m",
numRGens: "-r",
numWireBufs: "-w",
sampleRate: "-S",
loadDefs: "-D", // boolean
inputStreamsEnabled: "-I",
outputStreamsEnabled: "-O",
device: "-H",
verbosity: "-V",
zeroConf: "-R",
restrictedPath: "-P",
ugenPluginsPath: "-U",
password: "-p",
maxLogins: "-l",
};
const { serverPort, protocol, commandLineOptions } = this.options;
const opts = ["-u", serverPort];
if (protocol === "tcp") {
throw new Error("Only udp sockets are supported at this time.");
}
_.forEach(this.options, (option, argName) => {
const flag = flagMap[argName];
if (flag) {
if (option !== defaults[argName]) {
opts.push(flag);
if (_.isArray(option)) {
opts.push(...option);
} else if (_.isString(option)) {
opts.push(option);
} else {
this.log.err(`Bad type in server options: ${argName} ${option} ${typeof option}`);
}
}
}
});
if (_.isArray(commandLineOptions)) {
opts.push(...commandLineOptions);
}
return opts.map(String);
}
/**
* Boot the server
*
* Start scsynth and establish a pipe connection to receive stdout and stderr.
*
* Does not connect, so UDP is not yet ready for OSC communication.
*
* listen for system events and emit: exit out error
*
* @returns {Promise}
*/
boot(): Promise<Server> {
return new Promise((resolve, reject) => {
this.isRunning = false;
try {
this._spawnProcess();
} catch (e) {
reject(e);
}
this._serverObservers.stdout = Observable.fromEvent(this.process.stdout, "data", data => String(data));
this._serverObservers.stdout.subscribe(e => this.stdout.onNext(e));
this._serverObservers.stderr = Observable.fromEvent(this.process.stderr, "data").subscribe(out => {
// just pipe it into the stdout object's error stream
this.stdout.onError(out);
});
// Keep a local buffer of the stdout text because on Windows it can be split into odd chunks.
let stdoutBuffer = "";
// watch for ready message
this._serverObservers.stdout
.takeWhile((text: string): boolean => {
stdoutBuffer += text;
return !stdoutBuffer.match(/SuperCollider 3 server ready/);
})
.subscribe(
() => {},
this.log.err,
() => {
// onComplete
stdoutBuffer = "";
this.isRunning = true;
resolve(this);
},
);
setTimeout(() => {
if (!this.isRunning) {
reject(new Error("Server failed to start in 3000ms"));
}
}, 3000);
});
}
_spawnProcess(): void {
const execPath = this.options.scsynth,
args = this.args();
if (!execPath) {
throw new Error(`Missing options.scsynth executable path`);
}
const logMsg = "Start process: " + execPath + " " + args.join(" ");
this.processEvents.onNext(logMsg);
const options = {
cwd: this.options.cwd || path.dirname(execPath),
detached: false,
// Environment variables to set for server process
// eg. SC_JACK_DEFAULT_INPUTS: "system:capture_1,system:capture_2"
env: this.options.env ? (this.options.env as NodeJS.ProcessEnv) : undefined,
};
this.log.dbug({ execPath, args, options });
this.process = spawn(execPath, args, options);
if (!this.process.pid) {
const error = `Failed to boot ${execPath}`;
this.processEvents.onError(error);
throw new Error(error);
}
this.processEvents.onNext("pid: " + this.process.pid);
// when this parent process dies, kill child process
const killChild = (): void => {
if (this.process) {
this.process.kill("SIGTERM");
this.process = null;
}
};
process.on("exit", killChild);
this.process.on("error", (err: Error) => {
this.processEvents.onError(err);
this.isRunning = false;
// this.disconnect()
});
this.process.on("close", (code: number | null, signal: string | null) => {
this.processEvents.onError("Server closed. Exit code: " + code + " signal: " + signal);
this.isRunning = false;
// this.disconnect()
});
this.process.on("exit", (code: number | null, signal: string | null) => {
this.processEvents.onError("Server exited. Exit code: " + code + " signal: " + signal);
this.isRunning = false;
// this.disconnect()
});
}
/**
* quit
*
* kill scsynth process
* TODO: should send /quit first for shutting files
*/
quit(): void {
if (this.process) {
this.disconnect();
this.process.kill("SIGTERM");
this.process = null;
}
}
/**
* Establish connection to scsynth via OSC socket
*
* @returns {Promise} - resolves when udp responds
*/
connect(): Promise<Server> {
return new Promise((resolve, reject) => {
const udpListening = "udp is listening";
this.osc = dgram.createSocket("udp4");
this.osc.on("listening", () => {
this.processEvents.onNext(udpListening);
});
this.osc.on("close", e => {
this.processEvents.onNext("udp closed: " + e);
this.disconnect();
});
// pipe events to this.receive
this._serverObservers.oscMessage = Observable.fromEvent(this.osc, "message", msgbuf => unpackMessage(msgbuf));
this._serverObservers.oscMessage.subscribe(e => this.receive.onNext(parseMessage(e)));
this._serverObservers.oscError = Observable.fromEvent(this.osc, "error");
this._serverObservers.oscError.subscribe(e => {
this.receive.onError(e);
reject(e);
});
// this will trigger a response from server
// which will cause a udp listening event.
// After server responds then we are truly connected.
this.callAndResponse(notify()).then(() => {
resolve(this);
});
});
}
private disconnect(): void {
if (this.osc) {
this.osc.close();
delete this.osc;
}
// TODO: its the subscriptions that need to be disposed, these are the Observables
// this._serverObservers.forEach((obs) => obs.dispose());
// for (var key in this._serverObservers) {
// console.log(key, this._serverObservers[key], this._serverObservers[key].dispose);
// this._serverObservers[key].dispose();
// }
this._serverObservers = {};
}
/**
* Send OSC message to server
*
* @deprecated - use: `server.send.msg([address, arg1, arg2])``
* @param {String} address - OSC command string eg. `/s_new` which is referred to in OSC as the address
* @param {Array} args
*/
sendMsg(address: string, args: OscType[]): void {
this.send.msg([address, ...args]);
}
/**
* Wait for a single OSC response from server matching the supplied args.
*
* This is for getting responses async from the server.
* The first part of the message matches the expected args,
* and the rest of the message contains the response.
*
* The Promise fullfills with any remaining payload including in the message.
*
* @param {Array} matchArgs - osc message to match as a single array: `[/done, /notify]`
* @param {int} timeout - in milliseconds before the Promise is rejected
* @returns {Promise}
*/
oscOnce(matchArgs: MsgType, timeout = 4000): Promise<MsgType> {
return new Promise((resolve: Function, reject: Function) => {
const subscription = this.receive.subscribe(msg => {
const command = msg.slice(0, matchArgs.length);
if (_.isEqual(command, matchArgs)) {
const payload = msg.slice(matchArgs.length);
resolve(payload);
dispose();
}
});
// if timeout then reject and dispose
const tid = setTimeout(() => {
dispose();
reject(new Error(`Timed out waiting for OSC response: ${JSON.stringify(matchArgs)}`));
}, timeout);
function dispose(): void {
subscription.dispose();
clearTimeout(tid);
}
});
}
/**
* Send an OSC command that expects a reply from the server,
* returning a `Promise` that resolves with the response.
*
* This is for getting responses async from the server.
* The first part of the message matches the expected args,
* and the rest of the message contains the response.
*
* ```js
* {
* call: ['/some_osc_msg', 1, 2],
* response: ['/expected_osc_response', 1, 2, 3]
* }
* ```
* @param {int} timeout - in milliseconds before rejecting the `Promise`
* @returns {Promise} - resolves with all values the server responsed with after the matched response.
*/
callAndResponse(callAndResponse: CallAndResponse, timeout = 4000): Promise<MsgType> {
const promise = this.oscOnce(callAndResponse.response, timeout);
// if it's valid to send a msg with an array on the end,
// then change the definition of Msg
this.send.msg(callAndResponse.call);
return promise;
}
}
/**
* Boot a server with options and connect
*
* @param {object} options - command line options for server
* @param {Store} store - optional external Store to hold Server state
* @returns {Promise} - resolves with the Server
*/
export async function boot(options: ServerArgs = {}, store: any = null): Promise<Server> {
const s: Server = new Server(options, store);
await s.boot();
await s.connect();
return s;
} | the_stack |
import test, { Macro } from 'ava';
import {
addressContentsToLockingBytecode,
AddressType,
Base58AddressError,
Base58AddressFormatVersion,
base58AddressToLockingBytecode,
CashAddressDecodingError,
CashAddressNetworkPrefix,
cashAddressToLockingBytecode,
hexToBin,
instantiateSha256,
LockingBytecodeEncodingError,
lockingBytecodeToAddressContents,
lockingBytecodeToBase58Address,
lockingBytecodeToCashAddress,
} from '../lib';
const sha256Promise = instantiateSha256();
test('lockingBytecode <-> AddressContents: P2PK', (t) => {
const genesisCoinbase = hexToBin(
'4104678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5fac'
);
const genesisPublicKey = hexToBin(
'04678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5f'
);
t.deepEqual(lockingBytecodeToAddressContents(genesisCoinbase), {
payload: genesisPublicKey,
type: AddressType.p2pk,
});
t.deepEqual(
addressContentsToLockingBytecode({
payload: genesisPublicKey,
type: AddressType.p2pk,
}),
genesisCoinbase
);
const genesisCoinbaseCompressed = hexToBin(
'2103678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb6ac'
);
const compressedPublicKey = hexToBin(
'03678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb6'
);
t.deepEqual(lockingBytecodeToAddressContents(genesisCoinbaseCompressed), {
payload: compressedPublicKey,
type: AddressType.p2pk,
});
t.deepEqual(
addressContentsToLockingBytecode({
payload: compressedPublicKey,
type: AddressType.p2pk,
}),
genesisCoinbaseCompressed
);
});
test('lockingBytecode <-> AddressContents: P2PKH', (t) => {
const p2pkh = hexToBin('76a91465a16059864a2fdbc7c99a4723a8395bc6f188eb88ac');
const expectedPayload = hexToBin('65a16059864a2fdbc7c99a4723a8395bc6f188eb');
t.deepEqual(lockingBytecodeToAddressContents(p2pkh), {
payload: expectedPayload,
type: AddressType.p2pkh,
});
t.deepEqual(
addressContentsToLockingBytecode({
payload: expectedPayload,
type: AddressType.p2pkh,
}),
p2pkh
);
});
test('lockingBytecode <-> AddressContents: P2SH', (t) => {
const p2sh = hexToBin('a91474f209f6ea907e2ea48f74fae05782ae8a66525787');
const expectedPayload = hexToBin('74f209f6ea907e2ea48f74fae05782ae8a665257');
t.deepEqual(lockingBytecodeToAddressContents(p2sh), {
payload: expectedPayload,
type: AddressType.p2sh,
});
t.deepEqual(
addressContentsToLockingBytecode({
payload: expectedPayload,
type: AddressType.p2sh,
}),
p2sh
);
});
test('lockingBytecode <-> AddressContents: unknown', (t) => {
const simpleMath = hexToBin('52935387');
t.deepEqual(lockingBytecodeToAddressContents(simpleMath), {
payload: simpleMath,
type: AddressType.unknown,
});
t.deepEqual(
addressContentsToLockingBytecode({
payload: simpleMath,
type: AddressType.unknown,
}),
simpleMath
);
const almostP2pk = hexToBin('0100ac');
const almostP2pkh = hexToBin('76a9010088ac');
const almostP2sh = hexToBin('a9010087');
t.deepEqual(lockingBytecodeToAddressContents(almostP2pk), {
payload: almostP2pk,
type: AddressType.unknown,
});
t.deepEqual(lockingBytecodeToAddressContents(almostP2pkh), {
payload: almostP2pkh,
type: AddressType.unknown,
});
t.deepEqual(lockingBytecodeToAddressContents(almostP2sh), {
payload: almostP2sh,
type: AddressType.unknown,
});
});
test('lockingBytecodeToAddressContents: improperly sized scripts return AddressType.unknown', (t) => {
const almostP2pk = hexToBin('0100ac');
const almostP2pkh = hexToBin('76a9010088ac');
const almostP2sh = hexToBin('a9010087');
t.deepEqual(lockingBytecodeToAddressContents(almostP2pk), {
payload: almostP2pk,
type: AddressType.unknown,
});
t.deepEqual(lockingBytecodeToAddressContents(almostP2pkh), {
payload: almostP2pkh,
type: AddressType.unknown,
});
t.deepEqual(lockingBytecodeToAddressContents(almostP2sh), {
payload: almostP2sh,
type: AddressType.unknown,
});
});
const cashVectors: Macro<[string, string]> = (t, cashAddress, bytecode) => {
t.deepEqual(cashAddressToLockingBytecode(cashAddress), {
bytecode: hexToBin(bytecode),
prefix: 'bitcoincash',
});
t.deepEqual(
lockingBytecodeToCashAddress(hexToBin(bytecode), 'bitcoincash'),
cashAddress
);
};
// eslint-disable-next-line functional/immutable-data
cashVectors.title = (_, cashAddress) =>
`cashAddressToLockingBytecode <-> lockingBytecodeToCashAddress: ${cashAddress}`;
test(
cashVectors,
'bitcoincash:qpm2qsznhks23z7629mms6s4cwef74vcwvy22gdx6a',
'76a91476a04053bda0a88bda5177b86a15c3b29f55987388ac'
);
test(
cashVectors,
'bitcoincash:qr95sy3j9xwd2ap32xkykttr4cvcu7as4y0qverfuy',
'76a914cb481232299cd5743151ac4b2d63ae198e7bb0a988ac'
);
test(
cashVectors,
'bitcoincash:qqq3728yw0y47sqn6l2na30mcw6zm78dzqre909m2r',
'76a914011f28e473c95f4013d7d53ec5fbc3b42df8ed1088ac'
);
test(
cashVectors,
'bitcoincash:ppm2qsznhks23z7629mms6s4cwef74vcwvn0h829pq',
'a91476a04053bda0a88bda5177b86a15c3b29f55987387'
);
test(
cashVectors,
'bitcoincash:pr95sy3j9xwd2ap32xkykttr4cvcu7as4yc93ky28e',
'a914cb481232299cd5743151ac4b2d63ae198e7bb0a987'
);
test(
cashVectors,
'bitcoincash:pqq3728yw0y47sqn6l2na30mcw6zm78dzq5ucqzc37',
'a914011f28e473c95f4013d7d53ec5fbc3b42df8ed1087'
);
test('lockingBytecodeToCashAddress: P2PK', (t) => {
const genesisCoinbase = hexToBin(
'4104678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5fac'
);
const genesisPublicKey = hexToBin(
'04678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5f'
);
t.deepEqual(
lockingBytecodeToCashAddress(
genesisCoinbase,
CashAddressNetworkPrefix.mainnet
),
{
payload: genesisPublicKey,
type: AddressType.p2pk,
}
);
const genesisCoinbaseCompressed = hexToBin(
'2103678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb6ac'
);
const compressedPublicKey = hexToBin(
'03678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb6'
);
t.deepEqual(
lockingBytecodeToCashAddress(
genesisCoinbaseCompressed,
CashAddressNetworkPrefix.mainnet
),
{
payload: compressedPublicKey,
type: AddressType.p2pk,
}
);
});
test('cashAddressToLockingBytecode <-> lockingBytecodeToCashAddress: P2SH', (t) => {
const p2sh = hexToBin('a91474f209f6ea907e2ea48f74fae05782ae8a66525787');
const address = 'bitcoincash:pp60yz0ka2g8ut4y3a604czhs2hg5ejj2ugn82jfsr';
t.deepEqual(lockingBytecodeToCashAddress(p2sh, 'bitcoincash'), address);
t.deepEqual(cashAddressToLockingBytecode(address), {
bytecode: p2sh,
prefix: 'bitcoincash',
});
});
test('lockingBytecodeToCashAddress: error', (t) => {
const simpleMath = hexToBin('52935387');
t.deepEqual(lockingBytecodeToCashAddress(simpleMath, 'bitcoincash'), {
payload: simpleMath,
type: AddressType.unknown,
});
const genesisCoinbase = hexToBin(
'4104678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5fac'
);
t.deepEqual(lockingBytecodeToCashAddress(genesisCoinbase, 'bitcoincash'), {
payload: hexToBin(
'04678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5f'
),
type: AddressType.p2pk,
});
});
test('cashAddressToLockingBytecode: error', (t) => {
t.deepEqual(
cashAddressToLockingBytecode('bad:address'),
CashAddressDecodingError.invalidChecksum
);
t.deepEqual(
cashAddressToLockingBytecode(
'bitcoincash:dp60yz0ka2g8ut4y3a604czhs2hg5ejj2u6xkulaqj'
),
LockingBytecodeEncodingError.unknownCashAddressType
);
});
test('lockingBytecodeToBase58Address: P2PK', async (t) => {
const sha256 = await sha256Promise;
const genesisCoinbase = hexToBin(
'4104678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5fac'
);
const genesisPublicKey = hexToBin(
'04678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5f'
);
t.deepEqual(
lockingBytecodeToBase58Address(sha256, genesisCoinbase, 'mainnet'),
{
payload: genesisPublicKey,
type: AddressType.p2pk,
}
);
const genesisCoinbaseCompressed = hexToBin(
'2103678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb6ac'
);
const compressedPublicKey = hexToBin(
'03678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb6'
);
t.deepEqual(
lockingBytecodeToBase58Address(
sha256,
genesisCoinbaseCompressed,
'testnet'
),
{
payload: compressedPublicKey,
type: AddressType.p2pk,
}
);
});
test('base58AddressToLockingBytecode <-> lockingBytecodeToBase58Address: P2PKH', async (t) => {
const sha256 = await sha256Promise;
const p2pkh = hexToBin('76a91476a04053bda0a88bda5177b86a15c3b29f55987388ac');
// cspell: disable-next-line
const address = '1BpEi6DfDAUFd7GtittLSdBeYJvcoaVggu';
// cspell: disable-next-line
const addressTestnet = 'mrLC19Je2BuWQDkWSTriGYPyQJXKkkBmCx';
// cspell: disable-next-line
const addressCopay = 'CTH8H8Zj6DSnXFBKQeDG28ogAS92iS16Bp';
t.deepEqual(
lockingBytecodeToBase58Address(sha256, p2pkh, 'mainnet'),
address
);
t.deepEqual(
lockingBytecodeToBase58Address(sha256, p2pkh, 'testnet'),
addressTestnet
);
t.deepEqual(
lockingBytecodeToBase58Address(sha256, p2pkh, 'copay-bch'),
addressCopay
);
t.deepEqual(base58AddressToLockingBytecode(sha256, address), {
bytecode: p2pkh,
version: Base58AddressFormatVersion.p2pkh,
});
t.deepEqual(base58AddressToLockingBytecode(sha256, addressTestnet), {
bytecode: p2pkh,
version: Base58AddressFormatVersion.p2pkhTestnet,
});
t.deepEqual(base58AddressToLockingBytecode(sha256, addressCopay), {
bytecode: p2pkh,
version: Base58AddressFormatVersion.p2pkhCopayBCH,
});
});
test('base58AddressToLockingBytecode <-> lockingBytecodeToBase58Address: P2SH', async (t) => {
const sha256 = await sha256Promise;
const p2sh = hexToBin('a91476a04053bda0a88bda5177b86a15c3b29f55987387');
// cspell: disable-next-line
const address = '3CWFddi6m4ndiGyKqzYvsFYagqDLPVMTzC';
// cspell: disable-next-line
const addressTestnet = '2N44ThNe8NXHyv4bsX8AoVCXquBRW94Ls7W';
// cspell: disable-next-line
const addressCopay = 'HHLN6S9BcP1JLSrMhgD5qe57iVEMFMLCBT';
t.deepEqual(lockingBytecodeToBase58Address(sha256, p2sh, 'mainnet'), address);
t.deepEqual(
lockingBytecodeToBase58Address(sha256, p2sh, 'testnet'),
addressTestnet
);
t.deepEqual(
lockingBytecodeToBase58Address(sha256, p2sh, 'copay-bch'),
addressCopay
);
t.deepEqual(base58AddressToLockingBytecode(sha256, address), {
bytecode: p2sh,
version: Base58AddressFormatVersion.p2sh,
});
t.deepEqual(base58AddressToLockingBytecode(sha256, addressTestnet), {
bytecode: p2sh,
version: Base58AddressFormatVersion.p2shTestnet,
});
t.deepEqual(base58AddressToLockingBytecode(sha256, addressCopay), {
bytecode: p2sh,
version: Base58AddressFormatVersion.p2shCopayBCH,
});
});
test('base58AddressToLockingBytecode: error', async (t) => {
const sha256 = await sha256Promise;
t.deepEqual(
base58AddressToLockingBytecode(sha256, 'bad:address'),
Base58AddressError.unknownCharacter
);
}); | the_stack |
import * as msRest from "@azure/ms-rest-js";
import * as msRestAzure from "@azure/ms-rest-azure-js";
import * as Models from "../models";
import * as Mappers from "../models/fileServersMappers";
import * as Parameters from "../models/parameters";
import { StorSimpleManagementClientContext } from "../storSimpleManagementClientContext";
/** Class representing a FileServers. */
export class FileServers {
private readonly client: StorSimpleManagementClientContext;
/**
* Create a FileServers.
* @param {StorSimpleManagementClientContext} client Reference to the service client.
*/
constructor(client: StorSimpleManagementClientContext) {
this.client = client;
}
/**
* Retrieves all the file servers in a device.
* @param deviceName The device name.
* @param resourceGroupName The resource group name
* @param managerName The manager name
* @param [options] The optional parameters
* @returns Promise<Models.FileServersListByDeviceResponse>
*/
listByDevice(deviceName: string, resourceGroupName: string, managerName: string, options?: msRest.RequestOptionsBase): Promise<Models.FileServersListByDeviceResponse>;
/**
* @param deviceName The device name.
* @param resourceGroupName The resource group name
* @param managerName The manager name
* @param callback The callback
*/
listByDevice(deviceName: string, resourceGroupName: string, managerName: string, callback: msRest.ServiceCallback<Models.FileServerList>): void;
/**
* @param deviceName The device name.
* @param resourceGroupName The resource group name
* @param managerName The manager name
* @param options The optional parameters
* @param callback The callback
*/
listByDevice(deviceName: string, resourceGroupName: string, managerName: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.FileServerList>): void;
listByDevice(deviceName: string, resourceGroupName: string, managerName: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.FileServerList>, callback?: msRest.ServiceCallback<Models.FileServerList>): Promise<Models.FileServersListByDeviceResponse> {
return this.client.sendOperationRequest(
{
deviceName,
resourceGroupName,
managerName,
options
},
listByDeviceOperationSpec,
callback) as Promise<Models.FileServersListByDeviceResponse>;
}
/**
* Returns the properties of the specified file server name.
* @param deviceName The device name.
* @param fileServerName The file server name.
* @param resourceGroupName The resource group name
* @param managerName The manager name
* @param [options] The optional parameters
* @returns Promise<Models.FileServersGetResponse>
*/
get(deviceName: string, fileServerName: string, resourceGroupName: string, managerName: string, options?: msRest.RequestOptionsBase): Promise<Models.FileServersGetResponse>;
/**
* @param deviceName The device name.
* @param fileServerName The file server name.
* @param resourceGroupName The resource group name
* @param managerName The manager name
* @param callback The callback
*/
get(deviceName: string, fileServerName: string, resourceGroupName: string, managerName: string, callback: msRest.ServiceCallback<Models.FileServer>): void;
/**
* @param deviceName The device name.
* @param fileServerName The file server name.
* @param resourceGroupName The resource group name
* @param managerName The manager name
* @param options The optional parameters
* @param callback The callback
*/
get(deviceName: string, fileServerName: string, resourceGroupName: string, managerName: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.FileServer>): void;
get(deviceName: string, fileServerName: string, resourceGroupName: string, managerName: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.FileServer>, callback?: msRest.ServiceCallback<Models.FileServer>): Promise<Models.FileServersGetResponse> {
return this.client.sendOperationRequest(
{
deviceName,
fileServerName,
resourceGroupName,
managerName,
options
},
getOperationSpec,
callback) as Promise<Models.FileServersGetResponse>;
}
/**
* Creates or updates the file server.
* @param deviceName The device name.
* @param fileServerName The file server name.
* @param fileServer The file server.
* @param resourceGroupName The resource group name
* @param managerName The manager name
* @param [options] The optional parameters
* @returns Promise<Models.FileServersCreateOrUpdateResponse>
*/
createOrUpdate(deviceName: string, fileServerName: string, fileServer: Models.FileServer, resourceGroupName: string, managerName: string, options?: msRest.RequestOptionsBase): Promise<Models.FileServersCreateOrUpdateResponse> {
return this.beginCreateOrUpdate(deviceName,fileServerName,fileServer,resourceGroupName,managerName,options)
.then(lroPoller => lroPoller.pollUntilFinished()) as Promise<Models.FileServersCreateOrUpdateResponse>;
}
/**
* Deletes the file server.
* @param deviceName The device name.
* @param fileServerName The file server name.
* @param resourceGroupName The resource group name
* @param managerName The manager name
* @param [options] The optional parameters
* @returns Promise<msRest.RestResponse>
*/
deleteMethod(deviceName: string, fileServerName: string, resourceGroupName: string, managerName: string, options?: msRest.RequestOptionsBase): Promise<msRest.RestResponse> {
return this.beginDeleteMethod(deviceName,fileServerName,resourceGroupName,managerName,options)
.then(lroPoller => lroPoller.pollUntilFinished());
}
/**
* Backup the file server now.
* @param deviceName The device name.
* @param fileServerName The file server name.
* @param resourceGroupName The resource group name
* @param managerName The manager name
* @param [options] The optional parameters
* @returns Promise<msRest.RestResponse>
*/
backupNow(deviceName: string, fileServerName: string, resourceGroupName: string, managerName: string, options?: msRest.RequestOptionsBase): Promise<msRest.RestResponse> {
return this.beginBackupNow(deviceName,fileServerName,resourceGroupName,managerName,options)
.then(lroPoller => lroPoller.pollUntilFinished());
}
/**
* Gets the file server metrics.
* @param deviceName The name of the device.
* @param fileServerName The name of the file server name.
* @param resourceGroupName The resource group name
* @param managerName The manager name
* @param [options] The optional parameters
* @returns Promise<Models.FileServersListMetricsResponse>
*/
listMetrics(deviceName: string, fileServerName: string, resourceGroupName: string, managerName: string, options?: Models.FileServersListMetricsOptionalParams): Promise<Models.FileServersListMetricsResponse>;
/**
* @param deviceName The name of the device.
* @param fileServerName The name of the file server name.
* @param resourceGroupName The resource group name
* @param managerName The manager name
* @param callback The callback
*/
listMetrics(deviceName: string, fileServerName: string, resourceGroupName: string, managerName: string, callback: msRest.ServiceCallback<Models.MetricList>): void;
/**
* @param deviceName The name of the device.
* @param fileServerName The name of the file server name.
* @param resourceGroupName The resource group name
* @param managerName The manager name
* @param options The optional parameters
* @param callback The callback
*/
listMetrics(deviceName: string, fileServerName: string, resourceGroupName: string, managerName: string, options: Models.FileServersListMetricsOptionalParams, callback: msRest.ServiceCallback<Models.MetricList>): void;
listMetrics(deviceName: string, fileServerName: string, resourceGroupName: string, managerName: string, options?: Models.FileServersListMetricsOptionalParams | msRest.ServiceCallback<Models.MetricList>, callback?: msRest.ServiceCallback<Models.MetricList>): Promise<Models.FileServersListMetricsResponse> {
return this.client.sendOperationRequest(
{
deviceName,
fileServerName,
resourceGroupName,
managerName,
options
},
listMetricsOperationSpec,
callback) as Promise<Models.FileServersListMetricsResponse>;
}
/**
* Retrieves metric definitions of all metrics aggregated at the file server.
* @param deviceName The name of the device.
* @param fileServerName The name of the file server.
* @param resourceGroupName The resource group name
* @param managerName The manager name
* @param [options] The optional parameters
* @returns Promise<Models.FileServersListMetricDefinitionResponse>
*/
listMetricDefinition(deviceName: string, fileServerName: string, resourceGroupName: string, managerName: string, options?: msRest.RequestOptionsBase): Promise<Models.FileServersListMetricDefinitionResponse>;
/**
* @param deviceName The name of the device.
* @param fileServerName The name of the file server.
* @param resourceGroupName The resource group name
* @param managerName The manager name
* @param callback The callback
*/
listMetricDefinition(deviceName: string, fileServerName: string, resourceGroupName: string, managerName: string, callback: msRest.ServiceCallback<Models.MetricDefinitionList>): void;
/**
* @param deviceName The name of the device.
* @param fileServerName The name of the file server.
* @param resourceGroupName The resource group name
* @param managerName The manager name
* @param options The optional parameters
* @param callback The callback
*/
listMetricDefinition(deviceName: string, fileServerName: string, resourceGroupName: string, managerName: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.MetricDefinitionList>): void;
listMetricDefinition(deviceName: string, fileServerName: string, resourceGroupName: string, managerName: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.MetricDefinitionList>, callback?: msRest.ServiceCallback<Models.MetricDefinitionList>): Promise<Models.FileServersListMetricDefinitionResponse> {
return this.client.sendOperationRequest(
{
deviceName,
fileServerName,
resourceGroupName,
managerName,
options
},
listMetricDefinitionOperationSpec,
callback) as Promise<Models.FileServersListMetricDefinitionResponse>;
}
/**
* Retrieves all the file servers in a manager.
* @param resourceGroupName The resource group name
* @param managerName The manager name
* @param [options] The optional parameters
* @returns Promise<Models.FileServersListByManagerResponse>
*/
listByManager(resourceGroupName: string, managerName: string, options?: msRest.RequestOptionsBase): Promise<Models.FileServersListByManagerResponse>;
/**
* @param resourceGroupName The resource group name
* @param managerName The manager name
* @param callback The callback
*/
listByManager(resourceGroupName: string, managerName: string, callback: msRest.ServiceCallback<Models.FileServerList>): void;
/**
* @param resourceGroupName The resource group name
* @param managerName The manager name
* @param options The optional parameters
* @param callback The callback
*/
listByManager(resourceGroupName: string, managerName: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.FileServerList>): void;
listByManager(resourceGroupName: string, managerName: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.FileServerList>, callback?: msRest.ServiceCallback<Models.FileServerList>): Promise<Models.FileServersListByManagerResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
managerName,
options
},
listByManagerOperationSpec,
callback) as Promise<Models.FileServersListByManagerResponse>;
}
/**
* Creates or updates the file server.
* @param deviceName The device name.
* @param fileServerName The file server name.
* @param fileServer The file server.
* @param resourceGroupName The resource group name
* @param managerName The manager name
* @param [options] The optional parameters
* @returns Promise<msRestAzure.LROPoller>
*/
beginCreateOrUpdate(deviceName: string, fileServerName: string, fileServer: Models.FileServer, resourceGroupName: string, managerName: string, options?: msRest.RequestOptionsBase): Promise<msRestAzure.LROPoller> {
return this.client.sendLRORequest(
{
deviceName,
fileServerName,
fileServer,
resourceGroupName,
managerName,
options
},
beginCreateOrUpdateOperationSpec,
options);
}
/**
* Deletes the file server.
* @param deviceName The device name.
* @param fileServerName The file server name.
* @param resourceGroupName The resource group name
* @param managerName The manager name
* @param [options] The optional parameters
* @returns Promise<msRestAzure.LROPoller>
*/
beginDeleteMethod(deviceName: string, fileServerName: string, resourceGroupName: string, managerName: string, options?: msRest.RequestOptionsBase): Promise<msRestAzure.LROPoller> {
return this.client.sendLRORequest(
{
deviceName,
fileServerName,
resourceGroupName,
managerName,
options
},
beginDeleteMethodOperationSpec,
options);
}
/**
* Backup the file server now.
* @param deviceName The device name.
* @param fileServerName The file server name.
* @param resourceGroupName The resource group name
* @param managerName The manager name
* @param [options] The optional parameters
* @returns Promise<msRestAzure.LROPoller>
*/
beginBackupNow(deviceName: string, fileServerName: string, resourceGroupName: string, managerName: string, options?: msRest.RequestOptionsBase): Promise<msRestAzure.LROPoller> {
return this.client.sendLRORequest(
{
deviceName,
fileServerName,
resourceGroupName,
managerName,
options
},
beginBackupNowOperationSpec,
options);
}
}
// Operation Specifications
const serializer = new msRest.Serializer(Mappers);
const listByDeviceOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorSimple/managers/{managerName}/devices/{deviceName}/fileservers",
urlParameters: [
Parameters.deviceName,
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.managerName
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.FileServerList
},
default: {
bodyMapper: Mappers.ErrorModel
}
},
serializer
};
const getOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorSimple/managers/{managerName}/devices/{deviceName}/fileservers/{fileServerName}",
urlParameters: [
Parameters.deviceName,
Parameters.fileServerName,
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.managerName
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.FileServer
},
default: {
bodyMapper: Mappers.ErrorModel
}
},
serializer
};
const listMetricsOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorSimple/managers/{managerName}/devices/{deviceName}/fileservers/{fileServerName}/metrics",
urlParameters: [
Parameters.deviceName,
Parameters.fileServerName,
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.managerName
],
queryParameters: [
Parameters.apiVersion,
Parameters.filter
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.MetricList
},
default: {
bodyMapper: Mappers.ErrorModel
}
},
serializer
};
const listMetricDefinitionOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorSimple/managers/{managerName}/devices/{deviceName}/fileservers/{fileServerName}/metricsDefinitions",
urlParameters: [
Parameters.deviceName,
Parameters.fileServerName,
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.managerName
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.MetricDefinitionList
},
default: {
bodyMapper: Mappers.ErrorModel
}
},
serializer
};
const listByManagerOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorSimple/managers/{managerName}/fileservers",
urlParameters: [
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.managerName
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.FileServerList
},
default: {
bodyMapper: Mappers.ErrorModel
}
},
serializer
};
const beginCreateOrUpdateOperationSpec: msRest.OperationSpec = {
httpMethod: "PUT",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorSimple/managers/{managerName}/devices/{deviceName}/fileservers/{fileServerName}",
urlParameters: [
Parameters.deviceName,
Parameters.fileServerName,
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.managerName
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
requestBody: {
parameterPath: "fileServer",
mapper: {
...Mappers.FileServer,
required: true
}
},
responses: {
200: {
bodyMapper: Mappers.FileServer
},
202: {},
default: {
bodyMapper: Mappers.ErrorModel
}
},
serializer
};
const beginDeleteMethodOperationSpec: msRest.OperationSpec = {
httpMethod: "DELETE",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorSimple/managers/{managerName}/devices/{deviceName}/fileservers/{fileServerName}",
urlParameters: [
Parameters.deviceName,
Parameters.fileServerName,
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.managerName
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
202: {},
204: {},
default: {
bodyMapper: Mappers.ErrorModel
}
},
serializer
};
const beginBackupNowOperationSpec: msRest.OperationSpec = {
httpMethod: "POST",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorSimple/managers/{managerName}/devices/{deviceName}/fileservers/{fileServerName}/backup",
urlParameters: [
Parameters.deviceName,
Parameters.fileServerName,
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.managerName
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
202: {},
204: {},
default: {
bodyMapper: Mappers.ErrorModel
}
},
serializer
}; | the_stack |
import {
CdmCorpusDefinition,
CdmDocumentCollection,
CdmDocumentDefinition,
CdmEntityDefinition,
CdmFolderDefinition,
CdmManifestDefinition,
ImportInfo,
resolveOptions
} from '../../internal';
import { testHelper } from '../testHelper';
/**
* Sets the document's isDirty flag to true and reset the importPriority.
*/
function markDocumentsToIndex(documents: CdmDocumentCollection): void {
documents.allItems.forEach((document: CdmDocumentDefinition) => {
document.needsIndexing = true;
document.importPriorities = undefined;
});
}
/**
* Test methods for the CdmDocumentDefinition class.
*/
// tslint:disable-next-line: max-func-body-length
describe('Cdm/CdmDocumentDefinition', () => {
const testsSubpath: string = 'Cdm/Document';
/**
* Test when A -> M/B -> C -> B.
* In this case, although A imports B with a moniker, B should be in the priorityImports because it is imported by C.
*/
it('testCircularImportWithMoniker', async () => {
const corpus: CdmCorpusDefinition = testHelper.getLocalCorpus(testsSubpath, 'testCircularImportWithMoniker');
const folder: CdmFolderDefinition = corpus.storage.fetchRootFolder('local');
const docA: CdmDocumentDefinition = new CdmDocumentDefinition(corpus.ctx, 'A.cdm.json');
folder.documents.push(docA);
docA.imports.push('B.cdm.json', 'moniker');
const docB: CdmDocumentDefinition = new CdmDocumentDefinition(corpus.ctx, 'B.cdm.json');
folder.documents.push(docB);
docB.imports.push('C.cdm.json');
const docC: CdmDocumentDefinition = new CdmDocumentDefinition(corpus.ctx, 'C.cdm.json');
folder.documents.push(docC);
docC.imports.push('B.cdm.json');
// forces docB to be indexed first.
await docB.indexIfNeeded(new resolveOptions(), true);
await docA.indexIfNeeded(new resolveOptions(), true);
// should contain A, B and C.
expect(docA.importPriorities.importPriority.size)
.toEqual(3);
expect(docA.importPriorities.hasCircularImport)
.toBe(false);
// docB and docC should have the hasCircularImport set to true.
expect(docB.importPriorities.hasCircularImport)
.toBe(true);
expect(docC.importPriorities.hasCircularImport)
.toBe(true);
});
/**
* Test when A -> B -> C/M -> D -> C.
* In this case, although B imports C with a moniker, C should be in the A's priorityImports because it is imported by D.
*/
it('testDeeperCircularImportWithMoniker', async () => {
const corpus: CdmCorpusDefinition = testHelper.getLocalCorpus(testsSubpath, 'testDeeperCircularImportWithMoniker');
const folder: CdmFolderDefinition = corpus.storage.fetchRootFolder('local');
const docA: CdmDocumentDefinition = new CdmDocumentDefinition(corpus.ctx, 'A.cdm.json');
folder.documents.push(docA);
docA.imports.push('B.cdm.json');
const docB: CdmDocumentDefinition = new CdmDocumentDefinition(corpus.ctx, 'B.cdm.json');
folder.documents.push(docB);
docB.imports.push('C.cdm.json', 'moniker');
const docC: CdmDocumentDefinition = new CdmDocumentDefinition(corpus.ctx, 'C.cdm.json');
folder.documents.push(docC);
docC.imports.push('D.cdm.json');
const docD: CdmDocumentDefinition = new CdmDocumentDefinition(corpus.ctx, 'D.cdm.json');
folder.documents.push(docD);
docD.imports.push('C.cdm.json');
// indexIfNeeded will internally call prioritizeImports on every documen, truet.
await docA.indexIfNeeded(new resolveOptions(), true);
expect(docA.importPriorities.importPriority.size)
.toBe(4);
// reset the importsPriorities.
markDocumentsToIndex(folder.documents);
// force docC to be indexed first, so the priorityList will be read from the cache this time.
await docC.indexIfNeeded(new resolveOptions(), true);
await docA.indexIfNeeded(new resolveOptions(), true);
expect(docA.importPriorities.importPriority.size)
.toBe(4);
assertImportInfo(docA.importPriorities.importPriority.get(docA), 0, false);
assertImportInfo(docA.importPriorities.importPriority.get(docB), 1, false);
assertImportInfo(docA.importPriorities.importPriority.get(docD), 2, false);
assertImportInfo(docA.importPriorities.importPriority.get(docC), 3, false);
// indexes the rest of the documents.
await docB.indexIfNeeded(new resolveOptions(), true);
await docD.indexIfNeeded(new resolveOptions(), true);
expect(docA.importPriorities.hasCircularImport)
.toBe(false);
expect(docB.importPriorities.hasCircularImport)
.toBe(false);
expect(docC.importPriorities.hasCircularImport)
.toBe(true);
expect(docD.importPriorities.hasCircularImport)
.toBe(true);
});
/**
* Test when A -> B -> C/M -> D.
* Index docB first then docA. Make sure that C does not appear in docA priority list.
*/
it('testReadingCachedImportPriority', async () => {
const corpus: CdmCorpusDefinition = testHelper.getLocalCorpus(testsSubpath, 'testReadingCachedImportPriority');
const folder: CdmFolderDefinition = corpus.storage.fetchRootFolder('local');
const docA: CdmDocumentDefinition = new CdmDocumentDefinition(corpus.ctx, 'A.cdm.json');
folder.documents.push(docA);
docA.imports.push('B.cdm.json');
const docB: CdmDocumentDefinition = new CdmDocumentDefinition(corpus.ctx, 'B.cdm.json');
folder.documents.push(docB);
docB.imports.push('C.cdm.json', 'moniker');
const docC: CdmDocumentDefinition = new CdmDocumentDefinition(corpus.ctx, 'C.cdm.json');
folder.documents.push(docC);
docC.imports.push('D.cdm.json');
const docD: CdmDocumentDefinition = new CdmDocumentDefinition(corpus.ctx, 'D.cdm.json');
folder.documents.push(docD);
// index docB first and check its import priorities.
await docB.indexIfNeeded(new resolveOptions(), true);
expect(docB.importPriorities.importPriority.size)
.toBe(3);
assertImportInfo(docB.importPriorities.importPriority.get(docB), 0, false);
assertImportInfo(docB.importPriorities.importPriority.get(docD), 1, false);
assertImportInfo(docB.importPriorities.importPriority.get(docC), 2, true);
// now index docA, which should read docB's priority list from the cache.
await docA.indexIfNeeded(new resolveOptions(), true);
expect(docA.importPriorities.importPriority.size)
.toBe(3);
assertImportInfo(docA.importPriorities.importPriority.get(docA), 0, false);
assertImportInfo(docA.importPriorities.importPriority.get(docB), 1, false);
assertImportInfo(docA.importPriorities.importPriority.get(docD), 2, false);
});
/**
* Test if monikered imports are added to the end of the priority list.
*/
it('testMonikeredImportIsAddedToEnd', async () => {
const corpus: CdmCorpusDefinition = testHelper.getLocalCorpus(testsSubpath, 'testMonikeredImportIsAddedToEnd');
const folder: CdmFolderDefinition = corpus.storage.fetchRootFolder('local');
const docA: CdmDocumentDefinition = new CdmDocumentDefinition(corpus.ctx, 'A.cdm.json');
folder.documents.push(docA);
docA.imports.push('B.cdm.json', 'moniker');
const docB: CdmDocumentDefinition = new CdmDocumentDefinition(corpus.ctx, 'B.cdm.json');
folder.documents.push(docB);
docB.imports.push('C.cdm.json');
const docC: CdmDocumentDefinition = new CdmDocumentDefinition(corpus.ctx, 'C.cdm.json');
folder.documents.push(docC);
// forces docB to be indexed first, so the priorityList will be read from the cache this time.
await docB.indexIfNeeded(new resolveOptions(docB), true);
await docA.indexIfNeeded(new resolveOptions(docA), true);
// should contain all three documents.
expect(docA.importPriorities.importPriority.size)
.toBe(3);
assertImportInfo(docA.importPriorities.importPriority.get(docA), 0, false);
assertImportInfo(docA.importPriorities.importPriority.get(docC), 1, false);
// docB is monikered so it should appear at the end of the list.
assertImportInfo(docA.importPriorities.importPriority.get(docB), 2, true);
// make sure that the has circular import is set to false.
expect(docA.importPriorities.hasCircularImport)
.toBe(false);
expect(docB.importPriorities.hasCircularImport)
.toBe(false);
expect(docC.importPriorities.hasCircularImport)
.toBe(false);
});
/**
* Setting the forceReload flag to true correctly reloads the document
*/
it('testDocumentForceReload', async () => {
const testName: string = 'testDocumentForceReload';
const corpus: CdmCorpusDefinition = testHelper.getLocalCorpus(testsSubpath, testName);
// load the document and entity the first time
await corpus.fetchObjectAsync('doc.cdm.json/entity');
// reload the same doc and make sure it is reloaded correctly
const reloadedEntity: CdmEntityDefinition = await corpus.fetchObjectAsync('doc.cdm.json/entity', undefined, undefined, true);
// if the reloaded doc is not indexed correctly, the entity will not be able to be found
expect(reloadedEntity).not
.toBeUndefined();
});
/**
* Tests if the DocumentVersion is set on the resolved document
*/
it('testDocumentVersionSetOnResolution', async () => {
const testName: string = "testDocumentVersionSetOnResolution";
const corpus: CdmCorpusDefinition = testHelper.getLocalCorpus(testsSubpath, testName);
const manifest: CdmManifestDefinition = await corpus.fetchObjectAsync("local:/default.manifest.cdm.json");
const document: CdmDocumentDefinition = await corpus.fetchObjectAsync("local:/Person.cdm.json");
expect(manifest.documentVersion)
.toEqual('2.1.3');
expect(document.documentVersion)
.toEqual('1.5');
const resManifest: CdmManifestDefinition = await manifest.createResolvedManifestAsync(`res-${manifest.name}`, null);
const resEntity: CdmEntityDefinition = await corpus.fetchObjectAsync(resManifest.entities.allItems[0].entityPath, resManifest);
var resDocument = resEntity.inDocument;
expect(resManifest.documentVersion)
.toEqual('2.1.3');
expect(resDocument.documentVersion)
.toEqual('1.5');
});
/**
* Helper function to assert the ImportInfo class.
*/
function assertImportInfo(importInfo: ImportInfo, expectedPriority: number, expectedIsMoniker: boolean): void {
expect(importInfo.priority)
.toEqual(expectedPriority);
expect(importInfo.isMoniker)
.toEqual(expectedIsMoniker);
}
}); | the_stack |
import { GlobalProps } from 'ojs/ojvcomponent';
import { ComponentChildren } from 'preact';
import RequiredValidator = require('../ojvalidator-required');
import RegExpValidator = require('../ojvalidator-regexp');
import NumberRangeValidator = require('../ojvalidator-numberrange');
import LengthValidator = require('../ojvalidator-length');
import { IntlNumberConverter, NumberConverter } from '../ojconverter-number';
import AsyncValidator = require('../ojvalidator-async');
import Validator = require('../ojvalidator');
import Converter = require('../ojconverter');
import { Validation } from '../ojvalidationfactory-base';
import { editableValue, editableValueEventMap, editableValueSettableProperties } from '../ojeditablevalue';
import { JetElement, JetSettableProperties, JetElementCustomEvent, JetSetPropertyType } from '..';
export interface ojInputNumber extends editableValue<number | null, ojInputNumberSettableProperties, number | null, string> {
asyncValidators: Array<AsyncValidator<number>>;
autocomplete: 'on' | 'off' | string;
autofocus: boolean;
converter: Promise<Converter<number>> | Converter<number>;
displayOptions?: {
converterHint?: 'display' | 'none';
helpInstruction?: Array<'notewindow' | 'none'> | 'notewindow' | 'none';
messages?: 'display' | 'none';
validatorHint?: 'display' | 'none';
};
labelledBy: string | null;
max: number | null;
min: number | null;
placeholder: string | null;
readonly rawValue: string;
readonly: boolean | null;
required: boolean;
step: number | null;
readonly transientValue: number | null;
validators: Array<Validator<number> | AsyncValidator<number>>;
value: number | null;
virtualKeyboard: 'auto' | 'number' | 'text';
translations: {
numberRange?: {
hint?: {
exact?: string;
inRange?: string;
max?: string;
min?: string;
};
messageDetail?: {
exact?: string;
rangeOverflow?: string;
rangeUnderflow?: string;
};
messageSummary?: {
rangeOverflow?: string;
rangeUnderflow?: string;
};
};
required?: {
hint?: string;
messageDetail?: string;
messageSummary?: string;
};
tooltipDecrement?: string;
tooltipIncrement?: string;
};
addEventListener<T extends keyof ojInputNumberEventMap>(type: T, listener: (this: HTMLElement, ev: ojInputNumberEventMap[T]) => any, options?: (boolean | AddEventListenerOptions)): void;
addEventListener(type: string, listener: EventListenerOrEventListenerObject, options?: (boolean | AddEventListenerOptions)): void;
getProperty<T extends keyof ojInputNumberSettableProperties>(property: T): ojInputNumber[T];
getProperty(property: string): any;
setProperty<T extends keyof ojInputNumberSettableProperties>(property: T, value: ojInputNumberSettableProperties[T]): void;
setProperty<T extends string>(property: T, value: JetSetPropertyType<T, ojInputNumberSettableProperties>): void;
setProperties(properties: ojInputNumberSettablePropertiesLenient): void;
refresh(): void;
stepDown(steps?: number): void;
stepUp(steps?: number): void;
validate(): Promise<string>;
}
export namespace ojInputNumber {
interface ojAnimateEnd extends CustomEvent<{
action: string;
element: Element;
[propName: string]: any;
}> {
}
interface ojAnimateStart extends CustomEvent<{
action: string;
element: Element;
endCallback: (() => void);
[propName: string]: any;
}> {
}
// tslint:disable-next-line interface-over-type-literal
type asyncValidatorsChanged = JetElementCustomEvent<ojInputNumber["asyncValidators"]>;
// tslint:disable-next-line interface-over-type-literal
type autocompleteChanged = JetElementCustomEvent<ojInputNumber["autocomplete"]>;
// tslint:disable-next-line interface-over-type-literal
type autofocusChanged = JetElementCustomEvent<ojInputNumber["autofocus"]>;
// tslint:disable-next-line interface-over-type-literal
type converterChanged = JetElementCustomEvent<ojInputNumber["converter"]>;
// tslint:disable-next-line interface-over-type-literal
type displayOptionsChanged = JetElementCustomEvent<ojInputNumber["displayOptions"]>;
// tslint:disable-next-line interface-over-type-literal
type labelledByChanged = JetElementCustomEvent<ojInputNumber["labelledBy"]>;
// tslint:disable-next-line interface-over-type-literal
type maxChanged = JetElementCustomEvent<ojInputNumber["max"]>;
// tslint:disable-next-line interface-over-type-literal
type minChanged = JetElementCustomEvent<ojInputNumber["min"]>;
// tslint:disable-next-line interface-over-type-literal
type placeholderChanged = JetElementCustomEvent<ojInputNumber["placeholder"]>;
// tslint:disable-next-line interface-over-type-literal
type rawValueChanged = JetElementCustomEvent<ojInputNumber["rawValue"]>;
// tslint:disable-next-line interface-over-type-literal
type readonlyChanged = JetElementCustomEvent<ojInputNumber["readonly"]>;
// tslint:disable-next-line interface-over-type-literal
type requiredChanged = JetElementCustomEvent<ojInputNumber["required"]>;
// tslint:disable-next-line interface-over-type-literal
type stepChanged = JetElementCustomEvent<ojInputNumber["step"]>;
// tslint:disable-next-line interface-over-type-literal
type transientValueChanged = JetElementCustomEvent<ojInputNumber["transientValue"]>;
// tslint:disable-next-line interface-over-type-literal
type validatorsChanged = JetElementCustomEvent<ojInputNumber["validators"]>;
// tslint:disable-next-line interface-over-type-literal
type valueChanged = JetElementCustomEvent<ojInputNumber["value"]>;
// tslint:disable-next-line interface-over-type-literal
type virtualKeyboardChanged = JetElementCustomEvent<ojInputNumber["virtualKeyboard"]>;
//------------------------------------------------------------
// Start: generated events for inherited properties
//------------------------------------------------------------
// tslint:disable-next-line interface-over-type-literal
type describedByChanged = editableValue.describedByChanged<number | null, ojInputNumberSettableProperties, number | null, string>;
// tslint:disable-next-line interface-over-type-literal
type disabledChanged = editableValue.disabledChanged<number | null, ojInputNumberSettableProperties, number | null, string>;
// tslint:disable-next-line interface-over-type-literal
type helpChanged = editableValue.helpChanged<number | null, ojInputNumberSettableProperties, number | null, string>;
// tslint:disable-next-line interface-over-type-literal
type helpHintsChanged = editableValue.helpHintsChanged<number | null, ojInputNumberSettableProperties, number | null, string>;
// tslint:disable-next-line interface-over-type-literal
type labelEdgeChanged = editableValue.labelEdgeChanged<number | null, ojInputNumberSettableProperties, number | null, string>;
// tslint:disable-next-line interface-over-type-literal
type labelHintChanged = editableValue.labelHintChanged<number | null, ojInputNumberSettableProperties, number | null, string>;
// tslint:disable-next-line interface-over-type-literal
type messagesCustomChanged = editableValue.messagesCustomChanged<number | null, ojInputNumberSettableProperties, number | null, string>;
// tslint:disable-next-line interface-over-type-literal
type userAssistanceDensityChanged = editableValue.userAssistanceDensityChanged<number | null, ojInputNumberSettableProperties, number | null, string>;
// tslint:disable-next-line interface-over-type-literal
type validChanged = editableValue.validChanged<number | null, ojInputNumberSettableProperties, number | null, string>;
//------------------------------------------------------------
// End: generated events for inherited properties
//------------------------------------------------------------
}
export interface ojInputNumberEventMap extends editableValueEventMap<number | null, ojInputNumberSettableProperties, number | null, string> {
'ojAnimateEnd': ojInputNumber.ojAnimateEnd;
'ojAnimateStart': ojInputNumber.ojAnimateStart;
'asyncValidatorsChanged': JetElementCustomEvent<ojInputNumber["asyncValidators"]>;
'autocompleteChanged': JetElementCustomEvent<ojInputNumber["autocomplete"]>;
'autofocusChanged': JetElementCustomEvent<ojInputNumber["autofocus"]>;
'converterChanged': JetElementCustomEvent<ojInputNumber["converter"]>;
'displayOptionsChanged': JetElementCustomEvent<ojInputNumber["displayOptions"]>;
'labelledByChanged': JetElementCustomEvent<ojInputNumber["labelledBy"]>;
'maxChanged': JetElementCustomEvent<ojInputNumber["max"]>;
'minChanged': JetElementCustomEvent<ojInputNumber["min"]>;
'placeholderChanged': JetElementCustomEvent<ojInputNumber["placeholder"]>;
'rawValueChanged': JetElementCustomEvent<ojInputNumber["rawValue"]>;
'readonlyChanged': JetElementCustomEvent<ojInputNumber["readonly"]>;
'requiredChanged': JetElementCustomEvent<ojInputNumber["required"]>;
'stepChanged': JetElementCustomEvent<ojInputNumber["step"]>;
'transientValueChanged': JetElementCustomEvent<ojInputNumber["transientValue"]>;
'validatorsChanged': JetElementCustomEvent<ojInputNumber["validators"]>;
'valueChanged': JetElementCustomEvent<ojInputNumber["value"]>;
'virtualKeyboardChanged': JetElementCustomEvent<ojInputNumber["virtualKeyboard"]>;
'describedByChanged': JetElementCustomEvent<ojInputNumber["describedBy"]>;
'disabledChanged': JetElementCustomEvent<ojInputNumber["disabled"]>;
'helpChanged': JetElementCustomEvent<ojInputNumber["help"]>;
'helpHintsChanged': JetElementCustomEvent<ojInputNumber["helpHints"]>;
'labelEdgeChanged': JetElementCustomEvent<ojInputNumber["labelEdge"]>;
'labelHintChanged': JetElementCustomEvent<ojInputNumber["labelHint"]>;
'messagesCustomChanged': JetElementCustomEvent<ojInputNumber["messagesCustom"]>;
'userAssistanceDensityChanged': JetElementCustomEvent<ojInputNumber["userAssistanceDensity"]>;
'validChanged': JetElementCustomEvent<ojInputNumber["valid"]>;
}
export interface ojInputNumberSettableProperties extends editableValueSettableProperties<number | null, number | null, string> {
asyncValidators: Array<AsyncValidator<number>>;
autocomplete: 'on' | 'off' | string;
autofocus: boolean;
converter: Promise<Converter<number>> | Converter<number>;
displayOptions?: {
converterHint?: 'display' | 'none';
helpInstruction?: Array<'notewindow' | 'none'> | 'notewindow' | 'none';
messages?: 'display' | 'none';
validatorHint?: 'display' | 'none';
};
labelledBy: string | null;
max: number | null;
min: number | null;
placeholder: string | null;
readonly rawValue: string;
readonly: boolean | null;
required: boolean;
step: number | null;
readonly transientValue: number | null;
validators: Array<Validator<number> | AsyncValidator<number>>;
value: number | null;
virtualKeyboard: 'auto' | 'number' | 'text';
translations: {
numberRange?: {
hint?: {
exact?: string;
inRange?: string;
max?: string;
min?: string;
};
messageDetail?: {
exact?: string;
rangeOverflow?: string;
rangeUnderflow?: string;
};
messageSummary?: {
rangeOverflow?: string;
rangeUnderflow?: string;
};
};
required?: {
hint?: string;
messageDetail?: string;
messageSummary?: string;
};
tooltipDecrement?: string;
tooltipIncrement?: string;
};
}
export interface ojInputNumberSettablePropertiesLenient extends Partial<ojInputNumberSettableProperties> {
[key: string]: any;
}
export type InputNumberElement = ojInputNumber;
export namespace InputNumberElement {
interface ojAnimateEnd extends CustomEvent<{
action: string;
element: Element;
[propName: string]: any;
}> {
}
interface ojAnimateStart extends CustomEvent<{
action: string;
element: Element;
endCallback: (() => void);
[propName: string]: any;
}> {
}
// tslint:disable-next-line interface-over-type-literal
type asyncValidatorsChanged = JetElementCustomEvent<ojInputNumber["asyncValidators"]>;
// tslint:disable-next-line interface-over-type-literal
type autocompleteChanged = JetElementCustomEvent<ojInputNumber["autocomplete"]>;
// tslint:disable-next-line interface-over-type-literal
type autofocusChanged = JetElementCustomEvent<ojInputNumber["autofocus"]>;
// tslint:disable-next-line interface-over-type-literal
type converterChanged = JetElementCustomEvent<ojInputNumber["converter"]>;
// tslint:disable-next-line interface-over-type-literal
type displayOptionsChanged = JetElementCustomEvent<ojInputNumber["displayOptions"]>;
// tslint:disable-next-line interface-over-type-literal
type labelledByChanged = JetElementCustomEvent<ojInputNumber["labelledBy"]>;
// tslint:disable-next-line interface-over-type-literal
type maxChanged = JetElementCustomEvent<ojInputNumber["max"]>;
// tslint:disable-next-line interface-over-type-literal
type minChanged = JetElementCustomEvent<ojInputNumber["min"]>;
// tslint:disable-next-line interface-over-type-literal
type placeholderChanged = JetElementCustomEvent<ojInputNumber["placeholder"]>;
// tslint:disable-next-line interface-over-type-literal
type rawValueChanged = JetElementCustomEvent<ojInputNumber["rawValue"]>;
// tslint:disable-next-line interface-over-type-literal
type readonlyChanged = JetElementCustomEvent<ojInputNumber["readonly"]>;
// tslint:disable-next-line interface-over-type-literal
type requiredChanged = JetElementCustomEvent<ojInputNumber["required"]>;
// tslint:disable-next-line interface-over-type-literal
type stepChanged = JetElementCustomEvent<ojInputNumber["step"]>;
// tslint:disable-next-line interface-over-type-literal
type transientValueChanged = JetElementCustomEvent<ojInputNumber["transientValue"]>;
// tslint:disable-next-line interface-over-type-literal
type validatorsChanged = JetElementCustomEvent<ojInputNumber["validators"]>;
// tslint:disable-next-line interface-over-type-literal
type valueChanged = JetElementCustomEvent<ojInputNumber["value"]>;
// tslint:disable-next-line interface-over-type-literal
type virtualKeyboardChanged = JetElementCustomEvent<ojInputNumber["virtualKeyboard"]>;
//------------------------------------------------------------
// Start: generated events for inherited properties
//------------------------------------------------------------
// tslint:disable-next-line interface-over-type-literal
type describedByChanged = editableValue.describedByChanged<number | null, ojInputNumberSettableProperties, number | null, string>;
// tslint:disable-next-line interface-over-type-literal
type disabledChanged = editableValue.disabledChanged<number | null, ojInputNumberSettableProperties, number | null, string>;
// tslint:disable-next-line interface-over-type-literal
type helpChanged = editableValue.helpChanged<number | null, ojInputNumberSettableProperties, number | null, string>;
// tslint:disable-next-line interface-over-type-literal
type helpHintsChanged = editableValue.helpHintsChanged<number | null, ojInputNumberSettableProperties, number | null, string>;
// tslint:disable-next-line interface-over-type-literal
type labelEdgeChanged = editableValue.labelEdgeChanged<number | null, ojInputNumberSettableProperties, number | null, string>;
// tslint:disable-next-line interface-over-type-literal
type labelHintChanged = editableValue.labelHintChanged<number | null, ojInputNumberSettableProperties, number | null, string>;
// tslint:disable-next-line interface-over-type-literal
type messagesCustomChanged = editableValue.messagesCustomChanged<number | null, ojInputNumberSettableProperties, number | null, string>;
// tslint:disable-next-line interface-over-type-literal
type userAssistanceDensityChanged = editableValue.userAssistanceDensityChanged<number | null, ojInputNumberSettableProperties, number | null, string>;
// tslint:disable-next-line interface-over-type-literal
type validChanged = editableValue.validChanged<number | null, ojInputNumberSettableProperties, number | null, string>;
//------------------------------------------------------------
// End: generated events for inherited properties
//------------------------------------------------------------
}
export interface InputNumberIntrinsicProps extends Partial<Readonly<ojInputNumberSettableProperties>>, GlobalProps, Pick<preact.JSX.HTMLAttributes, 'ref' | 'key'> {
onojAnimateEnd?: (value: ojInputNumberEventMap['ojAnimateEnd']) => void;
onojAnimateStart?: (value: ojInputNumberEventMap['ojAnimateStart']) => void;
onasyncValidatorsChanged?: (value: ojInputNumberEventMap['asyncValidatorsChanged']) => void;
onautocompleteChanged?: (value: ojInputNumberEventMap['autocompleteChanged']) => void;
onautofocusChanged?: (value: ojInputNumberEventMap['autofocusChanged']) => void;
onconverterChanged?: (value: ojInputNumberEventMap['converterChanged']) => void;
ondisplayOptionsChanged?: (value: ojInputNumberEventMap['displayOptionsChanged']) => void;
onlabelledByChanged?: (value: ojInputNumberEventMap['labelledByChanged']) => void;
onmaxChanged?: (value: ojInputNumberEventMap['maxChanged']) => void;
onminChanged?: (value: ojInputNumberEventMap['minChanged']) => void;
onplaceholderChanged?: (value: ojInputNumberEventMap['placeholderChanged']) => void;
onrawValueChanged?: (value: ojInputNumberEventMap['rawValueChanged']) => void;
onreadonlyChanged?: (value: ojInputNumberEventMap['readonlyChanged']) => void;
onrequiredChanged?: (value: ojInputNumberEventMap['requiredChanged']) => void;
onstepChanged?: (value: ojInputNumberEventMap['stepChanged']) => void;
ontransientValueChanged?: (value: ojInputNumberEventMap['transientValueChanged']) => void;
onvalidatorsChanged?: (value: ojInputNumberEventMap['validatorsChanged']) => void;
onvalueChanged?: (value: ojInputNumberEventMap['valueChanged']) => void;
onvirtualKeyboardChanged?: (value: ojInputNumberEventMap['virtualKeyboardChanged']) => void;
ondescribedByChanged?: (value: ojInputNumberEventMap['describedByChanged']) => void;
ondisabledChanged?: (value: ojInputNumberEventMap['disabledChanged']) => void;
onhelpChanged?: (value: ojInputNumberEventMap['helpChanged']) => void;
onhelpHintsChanged?: (value: ojInputNumberEventMap['helpHintsChanged']) => void;
onlabelEdgeChanged?: (value: ojInputNumberEventMap['labelEdgeChanged']) => void;
onlabelHintChanged?: (value: ojInputNumberEventMap['labelHintChanged']) => void;
onmessagesCustomChanged?: (value: ojInputNumberEventMap['messagesCustomChanged']) => void;
onuserAssistanceDensityChanged?: (value: ojInputNumberEventMap['userAssistanceDensityChanged']) => void;
onvalidChanged?: (value: ojInputNumberEventMap['validChanged']) => void;
children?: ComponentChildren;
}
declare global {
namespace preact.JSX {
interface IntrinsicElements {
"oj-input-number": InputNumberIntrinsicProps;
}
}
} | the_stack |
import { strict as assert } from "assert";
import { IGCTestProvider, runGCTests } from "@fluid-internal/test-dds-utils";
import {
MockFluidDataStoreRuntime,
MockContainerRuntimeFactory,
MockContainerRuntimeFactoryForReconnection,
MockContainerRuntimeForReconnection,
MockStorage,
MockSharedObjectServices,
} from "@fluidframework/test-runtime-utils";
import { SharedCell } from "../cell";
import { CellFactory } from "../cellFactory";
import { ISharedCell } from "../interfaces";
function createConnectedCell(id: string, runtimeFactory: MockContainerRuntimeFactory) {
// Create and connect a second SharedCell.
const dataStoreRuntime = new MockFluidDataStoreRuntime();
const containerRuntime = runtimeFactory.createContainerRuntime(dataStoreRuntime);
const services = {
deltaConnection: containerRuntime.createDeltaConnection(),
objectStorage: new MockStorage(),
};
const cell = new SharedCell(id, dataStoreRuntime, CellFactory.Attributes);
cell.connect(services);
return cell;
}
function createLocalCell(id: string) {
const subCell = new SharedCell("cell", new MockFluidDataStoreRuntime(), CellFactory.Attributes);
return subCell;
}
function createCellForReconnection(id: string, runtimeFactory: MockContainerRuntimeFactoryForReconnection) {
const dataStoreRuntime = new MockFluidDataStoreRuntime();
const containerRuntime = runtimeFactory.createContainerRuntime(dataStoreRuntime);
const services = {
deltaConnection: containerRuntime.createDeltaConnection(),
objectStorage: new MockStorage(),
};
const cell = new SharedCell(id, dataStoreRuntime, CellFactory.Attributes);
cell.connect(services);
return { cell, containerRuntime };
}
describe("Cell", () => {
describe("Local state", () => {
let cell: SharedCell;
beforeEach(() => {
cell = createLocalCell("cell");
});
describe("APIs", () => {
it("Can create a cell", () => {
assert.ok(cell, "Could not create a cell");
});
it("Can set and get cell data", () => {
cell.set("testValue");
assert.equal(cell.get(), "testValue", "Could not retrieve cell value");
});
it("can delete cell data", () => {
cell.set("testValue");
assert.equal(cell.get(), "testValue", "Could not retrieve cell value");
cell.delete();
assert.equal(cell.get(), undefined, "Could not delete cell value");
});
it("can load a SharedCell from snapshot", async () => {
cell.set("testValue");
assert.equal(cell.get(), "testValue", "Could not retrieve cell value");
const services = MockSharedObjectServices.createFromSummary(cell.summarize().summary);
const cell2 = new SharedCell("cell2", new MockFluidDataStoreRuntime(), CellFactory.Attributes);
await cell2.load(services);
assert.equal(cell2.get(), "testValue", "Could not load SharedCell from snapshot");
});
});
describe("Op processing in local state", () => {
it("should correctly process a set operation sent in local state", async () => {
const dataStoreRuntime1 = new MockFluidDataStoreRuntime();
const cell1 = new SharedCell("cell1", dataStoreRuntime1, CellFactory.Attributes);
// Set a value in local state.
const value = "testValue";
cell1.set(value);
// Load a new SharedCell in connected state from the snapshot of the first one.
const containerRuntimeFactory = new MockContainerRuntimeFactory();
const dataStoreRuntime2 = new MockFluidDataStoreRuntime();
const containerRuntime2 = containerRuntimeFactory.createContainerRuntime(dataStoreRuntime2);
const services2 = MockSharedObjectServices.createFromSummary(cell1.summarize().summary);
services2.deltaConnection = containerRuntime2.createDeltaConnection();
const cell2 = new SharedCell("cell2", dataStoreRuntime2, CellFactory.Attributes);
await cell2.load(services2);
// Now connect the first SharedCell
dataStoreRuntime1.local = false;
const containerRuntime1 = containerRuntimeFactory.createContainerRuntime(dataStoreRuntime1);
const services1 = {
deltaConnection: containerRuntime1.createDeltaConnection(),
objectStorage: new MockStorage(),
};
cell1.connect(services1);
// Verify that both the cells have the value.
assert.equal(cell1.get(), value, "The first cell does not have the key");
assert.equal(cell2.get(), value, "The second cell does not have the key");
// Set a new value in the second SharedCell.
const newValue = "newvalue";
cell2.set(newValue);
// Process the message.
containerRuntimeFactory.processAllMessages();
// Verify that both the cells have the new value.
assert.equal(cell1.get(), newValue, "The first cell did not get the new value");
assert.equal(cell2.get(), newValue, "The second cell did not get the new value");
});
});
});
describe("Connected state", () => {
let cell1: ISharedCell;
let cell2: ISharedCell;
let containerRuntimeFactory: MockContainerRuntimeFactory;
describe("APIs", () => {
beforeEach(() => {
containerRuntimeFactory = new MockContainerRuntimeFactory();
// Connect the first SharedCell.
cell1 = createConnectedCell("cell1", containerRuntimeFactory);
// Create a second SharedCell.
cell2 = createConnectedCell("cell2", containerRuntimeFactory);
});
it("Can set and get cell data", () => {
cell1.set("testValue");
containerRuntimeFactory.processAllMessages();
assert.equal(cell1.get(), "testValue", "Could not retrieve cell value");
assert.equal(cell2.get(), "testValue", "Could not retrieve cell value from remote client");
});
it("can delete cell data", () => {
cell1.set("testValue");
containerRuntimeFactory.processAllMessages();
assert.equal(cell1.get(), "testValue", "Could not retrieve cell value");
assert.equal(cell2.get(), "testValue", "Could not retrieve cell value from remote client");
cell1.delete();
containerRuntimeFactory.processAllMessages();
assert.equal(cell1.get(), undefined, "Could not delete cell value");
assert.equal(cell2.get(), undefined, "Could not delete cell value from remote client");
});
});
});
describe("Reconnection", () => {
let containerRuntimeFactory: MockContainerRuntimeFactoryForReconnection;
let containerRuntime1: MockContainerRuntimeForReconnection;
let containerRuntime2: MockContainerRuntimeForReconnection;
let cell1: ISharedCell;
let cell2: ISharedCell;
beforeEach(() => {
containerRuntimeFactory = new MockContainerRuntimeFactoryForReconnection();
// Connect the first SharedCell.
const response1 = createCellForReconnection("cell1", containerRuntimeFactory);
cell1 = response1.cell;
containerRuntime1 = response1.containerRuntime;
// Create a second SharedCell.
const response2 = createCellForReconnection("cell2", containerRuntimeFactory);
cell2 = response2.cell;
containerRuntime2 = response2.containerRuntime;
});
it("can resend unacked ops on reconnection", async () => {
const value = "testValue";
// Set a value on the first SharedCell.
cell1.set(value);
// Disconnect and reconnect the first client.
containerRuntime1.connected = false;
containerRuntime1.connected = true;
// Process the messages.
containerRuntimeFactory.processAllMessages();
// Verify that the set value is processed by both clients.
assert.equal(cell1.get(), value, "The first client did not process the set");
assert.equal(cell2.get(), value, "The second client did not process the set");
// Delete the value from the second SharedCell.
cell2.delete();
// Disconnect and reconnect the second client.
containerRuntime2.connected = false;
containerRuntime2.connected = true;
// Process the messages.
containerRuntimeFactory.processAllMessages();
// Verify that the deleted value is processed by both clients.
assert.equal(cell1.get(), undefined, "The first client did not process the delete");
assert.equal(cell2.get(), undefined, "The second client did not process the delete");
});
it("can store ops in disconnected state and resend them on reconnection", async () => {
const value = "testValue";
// Disconnect the first client.
containerRuntime1.connected = false;
// Set a value on the first SharedCell.
cell1.set(value);
// Reconnect the first client.
containerRuntime1.connected = true;
// Process the messages.
containerRuntimeFactory.processAllMessages();
// Verify that the set value is processed by both clients.
assert.equal(cell1.get(), value, "The first client did not process the set");
assert.equal(cell2.get(), value, "The second client did not process the set");
// Disconnect the second client.
containerRuntime2.connected = false;
// Delete the value from the second SharedCell.
cell2.delete();
// Reconnect the second client.
containerRuntime2.connected = true;
// Process the messages.
containerRuntimeFactory.processAllMessages();
// Verify that the deleted value is processed by both clients.
assert.equal(cell1.get(), undefined, "The first client did not process the delete");
assert.equal(cell2.get(), undefined, "The second client did not process the delete");
});
});
describe("Garbage Collection", () => {
class GCSharedCellProvider implements IGCTestProvider {
private subCellCount = 0;
private _expectedRoutes: string[] = [];
private readonly cell1: ISharedCell;
private readonly cell2: ISharedCell;
private readonly containerRuntimeFactory: MockContainerRuntimeFactory;
constructor() {
this.containerRuntimeFactory = new MockContainerRuntimeFactory();
this.cell1 = createConnectedCell("cell1", this.containerRuntimeFactory);
this.cell2 = createConnectedCell("cell2", this.containerRuntimeFactory);
}
public get sharedObject() {
// Return the remote SharedCell because we want to verify its summary data.
return this.cell2;
}
public get expectedOutboundRoutes() {
return this._expectedRoutes;
}
public async addOutboundRoutes() {
const newSubCell = createLocalCell(`subCell-${++this.subCellCount}`);
this.cell1.set(newSubCell.handle);
this._expectedRoutes = [ newSubCell.handle.absolutePath ];
this.containerRuntimeFactory.processAllMessages();
}
public async deleteOutboundRoutes() {
this.cell2.delete();
this._expectedRoutes = [];
this.containerRuntimeFactory.processAllMessages();
}
public async addNestedHandles() {
const newSubCell = createLocalCell(`subCell-${++this.subCellCount}`);
const newSubCell2 = createLocalCell(`subCell-${++this.subCellCount}`);
const containingObject = {
subcellHandle: newSubCell.handle,
nestedObj: {
subcell2Handle: newSubCell2.handle,
},
};
this.cell1.set(containingObject);
this._expectedRoutes = [ newSubCell.handle.absolutePath, newSubCell2.handle.absolutePath ];
this.containerRuntimeFactory.processAllMessages();
}
}
runGCTests(GCSharedCellProvider);
});
}); | the_stack |
import * as assert from 'assert';
import { BitArray } from '@zxing/library';
import { QRCodeByteMatrix } from '@zxing/library';
import { QRCodeMatrixUtil } from '@zxing/library';
import { QRCodeDecoderErrorCorrectionLevel } from '@zxing/library';
import { QRCodeVersion } from '@zxing/library';
/**
* @author satorux@google.com (Satoru Takabayashi) - creator
* @author mysen@google.com (Chris Mysen) - ported from C++
*/
describe('QRCodeMatrixUtil', () => {
it('testToString', () => {
const array = new QRCodeByteMatrix(3, 3);
array.setNumber(0, 0, 0);
array.setNumber(1, 0, 1);
array.setNumber(2, 0, 0);
array.setNumber(0, 1, 1);
array.setNumber(1, 1, 0);
array.setNumber(2, 1, 1);
array.setNumber(0, 2, -1);
array.setNumber(1, 2, -1);
array.setNumber(2, 2, -1);
const expected: string = ' 0 1 0\n' + ' 1 0 1\n' + ' \n';
assert.strictEqual(array.toString(), expected);
});
it('testClearMatrix', () => {
const matrix = new QRCodeByteMatrix(2, 2);
QRCodeMatrixUtil.clearMatrix(matrix);
// TYPESCRIPTPORT: we use UintArray se changed here from -1 to 255
assert.strictEqual(matrix.get(0, 0), 255);
assert.strictEqual(matrix.get(1, 0), 255);
assert.strictEqual(matrix.get(0, 1), 255);
assert.strictEqual(matrix.get(1, 1), 255);
});
it('testEmbedBasicPatterns1', () => {
// QRCodeVersion 1.
const matrix = new QRCodeByteMatrix(21, 21);
QRCodeMatrixUtil.clearMatrix(matrix);
QRCodeMatrixUtil.embedBasicPatterns(QRCodeVersion.getVersionForNumber(1), matrix);
const expected: string =
' 1 1 1 1 1 1 1 0 0 1 1 1 1 1 1 1\n' +
' 1 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1\n' +
' 1 0 1 1 1 0 1 0 0 1 0 1 1 1 0 1\n' +
' 1 0 1 1 1 0 1 0 0 1 0 1 1 1 0 1\n' +
' 1 0 1 1 1 0 1 0 0 1 0 1 1 1 0 1\n' +
' 1 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1\n' +
' 1 1 1 1 1 1 1 0 1 0 1 0 1 0 1 1 1 1 1 1 1\n' +
' 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n' +
' 1 \n' +
' 0 \n' +
' 1 \n' +
' 0 \n' +
' 1 \n' +
' 0 0 0 0 0 0 0 0 1 \n' +
' 1 1 1 1 1 1 1 0 \n' +
' 1 0 0 0 0 0 1 0 \n' +
' 1 0 1 1 1 0 1 0 \n' +
' 1 0 1 1 1 0 1 0 \n' +
' 1 0 1 1 1 0 1 0 \n' +
' 1 0 0 0 0 0 1 0 \n' +
' 1 1 1 1 1 1 1 0 \n';
assert.strictEqual(matrix.toString(), expected);
});
it('testEmbedBasicPatterns2', () => {
// QRCodeVersion 2. Position adjustment pattern should apppear at right
// bottom corner.
const matrix = new QRCodeByteMatrix(25, 25);
QRCodeMatrixUtil.clearMatrix(matrix);
QRCodeMatrixUtil.embedBasicPatterns(QRCodeVersion.getVersionForNumber(2), matrix);
const expected: string =
' 1 1 1 1 1 1 1 0 0 1 1 1 1 1 1 1\n' +
' 1 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1\n' +
' 1 0 1 1 1 0 1 0 0 1 0 1 1 1 0 1\n' +
' 1 0 1 1 1 0 1 0 0 1 0 1 1 1 0 1\n' +
' 1 0 1 1 1 0 1 0 0 1 0 1 1 1 0 1\n' +
' 1 0 0 0 0 0 1 0 0 1 0 0 0 0 0 1\n' +
' 1 1 1 1 1 1 1 0 1 0 1 0 1 0 1 0 1 0 1 1 1 1 1 1 1\n' +
' 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n' +
' 1 \n' +
' 0 \n' +
' 1 \n' +
' 0 \n' +
' 1 \n' +
' 0 \n' +
' 1 \n' +
' 0 \n' +
' 1 1 1 1 1 1 \n' +
' 0 0 0 0 0 0 0 0 1 1 0 0 0 1 \n' +
' 1 1 1 1 1 1 1 0 1 0 1 0 1 \n' +
' 1 0 0 0 0 0 1 0 1 0 0 0 1 \n' +
' 1 0 1 1 1 0 1 0 1 1 1 1 1 \n' +
' 1 0 1 1 1 0 1 0 \n' +
' 1 0 1 1 1 0 1 0 \n' +
' 1 0 0 0 0 0 1 0 \n' +
' 1 1 1 1 1 1 1 0 \n';
assert.strictEqual(matrix.toString(), expected);
});
it('testEmbedTypeInfo', () => {
// Type info bits = 100000011001110.
const matrix = new QRCodeByteMatrix(21, 21);
QRCodeMatrixUtil.clearMatrix(matrix);
QRCodeMatrixUtil.embedTypeInfo(QRCodeDecoderErrorCorrectionLevel.M, 5, matrix);
const expected: string =
' 0 \n' +
' 1 \n' +
' 1 \n' +
' 1 \n' +
' 0 \n' +
' 0 \n' +
' \n' +
' 1 \n' +
' 1 0 0 0 0 0 0 1 1 1 0 0 1 1 1 0\n' +
' \n' +
' \n' +
' \n' +
' \n' +
' \n' +
' 0 \n' +
' 0 \n' +
' 0 \n' +
' 0 \n' +
' 0 \n' +
' 0 \n' +
' 1 \n';
assert.strictEqual(matrix.toString(), expected);
});
it('testEmbedVersionInfo', () => {
// QRCodeVersion info bits = 000111 110010 010100
// Actually, version 7 QR Code has 45x45 matrix but we use 21x21 here
// since 45x45 matrix is too big to depict.
const matrix = new QRCodeByteMatrix(21, 21);
QRCodeMatrixUtil.clearMatrix(matrix);
QRCodeMatrixUtil.maybeEmbedVersionInfo(QRCodeVersion.getVersionForNumber(7), matrix);
const expected: string =
' 0 0 1 \n' +
' 0 1 0 \n' +
' 0 1 0 \n' +
' 0 1 1 \n' +
' 1 1 1 \n' +
' 0 0 0 \n' +
' \n' +
' \n' +
' \n' +
' \n' +
' 0 0 0 0 1 0 \n' +
' 0 1 1 1 1 0 \n' +
' 1 0 0 1 1 0 \n' +
' \n' +
' \n' +
' \n' +
' \n' +
' \n' +
' \n' +
' \n' +
' \n';
assert.strictEqual(matrix.toString(), expected);
});
it('testEmbedDataBits', () => {
// Cells other than basic patterns should be filled with zero.
const matrix = new QRCodeByteMatrix(21, 21);
QRCodeMatrixUtil.clearMatrix(matrix);
QRCodeMatrixUtil.embedBasicPatterns(QRCodeVersion.getVersionForNumber(1), matrix);
const bits = new BitArray();
QRCodeMatrixUtil.embedDataBits(bits, 255, matrix);
const expected: string =
' 1 1 1 1 1 1 1 0 0 0 0 0 0 0 1 1 1 1 1 1 1\n' +
' 1 0 0 0 0 0 1 0 0 0 0 0 0 0 1 0 0 0 0 0 1\n' +
' 1 0 1 1 1 0 1 0 0 0 0 0 0 0 1 0 1 1 1 0 1\n' +
' 1 0 1 1 1 0 1 0 0 0 0 0 0 0 1 0 1 1 1 0 1\n' +
' 1 0 1 1 1 0 1 0 0 0 0 0 0 0 1 0 1 1 1 0 1\n' +
' 1 0 0 0 0 0 1 0 0 0 0 0 0 0 1 0 0 0 0 0 1\n' +
' 1 1 1 1 1 1 1 0 1 0 1 0 1 0 1 1 1 1 1 1 1\n' +
' 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n' +
' 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n' +
' 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n' +
' 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n' +
' 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n' +
' 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n' +
' 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0\n' +
' 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n' +
' 1 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n' +
' 1 0 1 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n' +
' 1 0 1 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n' +
' 1 0 1 1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n' +
' 1 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n' +
' 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n';
assert.strictEqual(matrix.toString(), expected);
});
it('testBuildMatrix', () => {
// From http://www.swetake.com/qr/qr7.html
const bytes = Uint16Array.from([32, 65, 205, 69, 41, 220, 46, 128, 236,
42, 159, 74, 221, 244, 169, 239, 150, 138,
70, 237, 85, 224, 96, 74, 219, 61]);
const bits = new BitArray();
for (let i = 0, length = bytes.length; i !== length; i++) {
const c = bytes[i];
bits.appendBits(c, 8);
}
const matrix = new QRCodeByteMatrix(21, 21);
QRCodeMatrixUtil.buildMatrix(bits,
QRCodeDecoderErrorCorrectionLevel.H,
QRCodeVersion.getVersionForNumber(1), // QRCodeVersion 1
3, // Mask pattern 3
matrix);
const expected: string =
' 1 1 1 1 1 1 1 0 0 1 1 0 0 0 1 1 1 1 1 1 1\n' +
' 1 0 0 0 0 0 1 0 0 0 0 0 0 0 1 0 0 0 0 0 1\n' +
' 1 0 1 1 1 0 1 0 0 0 0 1 0 0 1 0 1 1 1 0 1\n' +
' 1 0 1 1 1 0 1 0 0 1 1 0 0 0 1 0 1 1 1 0 1\n' +
' 1 0 1 1 1 0 1 0 1 1 0 0 1 0 1 0 1 1 1 0 1\n' +
' 1 0 0 0 0 0 1 0 0 0 1 1 1 0 1 0 0 0 0 0 1\n' +
' 1 1 1 1 1 1 1 0 1 0 1 0 1 0 1 1 1 1 1 1 1\n' +
' 0 0 0 0 0 0 0 0 1 1 0 1 1 0 0 0 0 0 0 0 0\n' +
' 0 0 1 1 0 0 1 1 1 0 0 1 1 1 1 0 1 0 0 0 0\n' +
' 1 0 1 0 1 0 0 0 0 0 1 1 1 0 0 1 0 1 1 1 0\n' +
' 1 1 1 1 0 1 1 0 1 0 1 1 1 0 0 1 1 1 0 1 0\n' +
' 1 0 1 0 1 1 0 1 1 1 0 0 1 1 1 0 0 1 0 1 0\n' +
' 0 0 1 0 0 1 1 1 0 0 0 0 0 0 1 0 1 1 1 1 1\n' +
' 0 0 0 0 0 0 0 0 1 1 0 1 0 0 0 0 0 1 0 1 1\n' +
' 1 1 1 1 1 1 1 0 1 1 1 1 0 0 0 0 1 0 1 1 0\n' +
' 1 0 0 0 0 0 1 0 0 0 0 1 0 1 1 1 0 0 0 0 0\n' +
' 1 0 1 1 1 0 1 0 0 1 0 0 1 1 0 0 1 0 0 1 1\n' +
' 1 0 1 1 1 0 1 0 1 1 0 1 0 0 0 0 0 1 1 1 0\n' +
' 1 0 1 1 1 0 1 0 1 1 1 1 0 0 0 0 1 1 1 0 0\n' +
' 1 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 1 0 1 0 0\n' +
' 1 1 1 1 1 1 1 0 0 0 1 1 1 1 1 0 1 0 0 1 0\n';
assert.strictEqual(matrix.toString(), expected);
});
it('testFindMSBSet', () => {
assert.strictEqual(QRCodeMatrixUtil.findMSBSet(0), 0);
assert.strictEqual(QRCodeMatrixUtil.findMSBSet(1), 1);
assert.strictEqual(QRCodeMatrixUtil.findMSBSet(0x80), 8);
assert.strictEqual(QRCodeMatrixUtil.findMSBSet(0x80000000), 32);
});
it('testCalculateBCHCode', () => {
// Encoding of type information.
// From Appendix C in JISX0510:2004 (p 65)
assert.strictEqual(QRCodeMatrixUtil.calculateBCHCode(5, 0x537), 0xdc);
// From http://www.swetake.com/qr/qr6.html
assert.strictEqual(QRCodeMatrixUtil.calculateBCHCode(0x13, 0x537), 0x1c2);
// From http://www.swetake.com/qr/qr11.html
assert.strictEqual(QRCodeMatrixUtil.calculateBCHCode(0x1b, 0x537), 0x214);
// Encoding of version information.
// From Appendix D in JISX0510:2004 (p 68)
assert.strictEqual(QRCodeMatrixUtil.calculateBCHCode(7, 0x1f25), 0xc94);
assert.strictEqual(QRCodeMatrixUtil.calculateBCHCode(8, 0x1f25), 0x5bc);
assert.strictEqual(QRCodeMatrixUtil.calculateBCHCode(9, 0x1f25), 0xa99);
assert.strictEqual(QRCodeMatrixUtil.calculateBCHCode(10, 0x1f25), 0x4d3);
assert.strictEqual(QRCodeMatrixUtil.calculateBCHCode(20, 0x1f25), 0x9a6);
assert.strictEqual(QRCodeMatrixUtil.calculateBCHCode(30, 0x1f25), 0xd75);
assert.strictEqual(QRCodeMatrixUtil.calculateBCHCode(40, 0x1f25), 0xc69);
});
// We don't test a lot of cases in this function since we've already
// tested them in TEST(calculateBCHCode).
it('testMakeVersionInfoBits', () => {
// From Appendix D in JISX0510:2004 (p 68)
const bits = new BitArray();
QRCodeMatrixUtil.makeVersionInfoBits(QRCodeVersion.getVersionForNumber(7), bits);
assert.strictEqual(bits.toString(), ' ...XXXXX ..X..X.X ..');
});
// We don't test a lot of cases in this function since we've already
// tested them in TEST(calculateBCHCode).
it('testMakeTypeInfoInfoBits', () => {
// From Appendix C in JISX0510:2004 (p 65)
const bits = new BitArray();
QRCodeMatrixUtil.makeTypeInfoBits(QRCodeDecoderErrorCorrectionLevel.M, 5, bits);
assert.strictEqual(bits.toString(), ' X......X X..XXX.');
});
}); | the_stack |
import { Injectable } from '@angular/core';
import { BehaviorSubject, of, pipe } from 'rxjs';
import { switchMap, tap } from 'rxjs/operators';
import { Participation } from 'app/entities/participation/participation.model';
import { Result } from 'app/entities/result.model';
import { Exercise } from 'app/entities/exercise.model';
import { StudentParticipation } from 'app/entities/participation/student-participation.model';
import { ParticipationService } from 'app/exercises/shared/participation/participation.service';
import { JhiWebsocketService } from 'app/core/websocket/websocket.service';
import dayjs from 'dayjs/esm';
import { ProgrammingExercise } from 'app/entities/programming-exercise.model';
const PERSONAL_PARTICIPATION_TOPIC = `/user/topic/newResults`;
const EXERCISE_PARTICIPATION_TOPIC = (exerciseId: number) => `/topic/exercise/${exerciseId}/newResults`;
export interface IParticipationWebsocketService {
addParticipation: (participation: Participation, exercise?: Exercise) => void;
getParticipationForExercise: (exerciseId: number) => StudentParticipation | undefined;
subscribeForParticipationChanges: () => BehaviorSubject<Participation | undefined>;
subscribeForLatestResultOfParticipation: (participationId: number, personal: boolean, exerciseId?: number) => BehaviorSubject<Result | undefined>;
unsubscribeForLatestResultOfParticipation: (participationId: number, exercise: Exercise) => void;
notifyAllResultSubscribers: (result: Result) => void;
}
@Injectable({ providedIn: 'root' })
export class ParticipationWebsocketService implements IParticipationWebsocketService {
cachedParticipations: Map<number /* ID of participation */, StudentParticipation> = new Map<number, StudentParticipation>();
openResultWebsocketSubscriptions: Map<number /*ID of participation */, string /* url of websocket connection */> = new Map<number, string>();
openPersonalWebsocketSubscription?: string; /* url of websocket connection */
resultObservables: Map<number /* ID of participation */, BehaviorSubject<Result | undefined>> = new Map<number, BehaviorSubject<Result>>();
participationObservable?: BehaviorSubject<Participation | undefined>;
subscribedExercises: Map<number /* ID of exercise */, Set<number> /* IDs of the participations of this exercise */> = new Map<number, Set<number>>();
participationSubscriptionTypes: Map<number /* ID of participation */, boolean /* Whether the participation was subscribed in personal mode */> = new Map<number, boolean>();
constructor(private jhiWebsocketService: JhiWebsocketService, private participationService: ParticipationService) {}
private getNotifyAllSubscribersPipe = () => {
return pipe(tap(this.notifyResultSubscribers), switchMap(this.addResultToParticipation), tap(this.notifyParticipationSubscribers));
};
/**
* remove all local participations
*/
public resetLocalCache() {
const participations = this.getAllParticipations();
participations.forEach((participation) => {
this.cachedParticipations.delete(participation.id!);
this.removeParticipation(participation.id!, participation.exercise?.id);
});
this.cachedParticipations = new Map<number, StudentParticipation>();
this.resultObservables = new Map<number, BehaviorSubject<Result>>();
this.participationObservable = undefined;
this.subscribedExercises = new Map<number, Set<number>>();
this.participationSubscriptionTypes = new Map<number, boolean>();
}
/**
* Notify all participation subscribers with the newest participation value (e.g. if the result has changed).
* @param participation
*/
private notifyParticipationSubscribers = (participation: Participation) => {
if (!this.participationObservable) {
this.participationObservable = new BehaviorSubject(participation);
} else {
this.participationObservable.next(participation);
}
};
/**
* Notify all result subscribers with the newest result provided.
* @param result
*/
private notifyResultSubscribers = (result: Result) => {
const resultObservable = this.resultObservables.get(result.participation!.id!);
// TODO: We never convert the date strings of the result (e.g. completionDate) to a Dayjs object
// this could be an issue in some parts of app when a formatted date is needed.
if (!resultObservable) {
this.resultObservables.set(result.participation!.id!, new BehaviorSubject(result));
} else {
resultObservable.next(result);
}
};
/**
* Update a cachedParticipation with the given result, meaning that the new result will be added to it.
* @param result
*/
private addResultToParticipation = (result: Result) => {
const cachedParticipation = this.cachedParticipations.get(result.participation!.id!);
if (cachedParticipation) {
// update the results with the new received one by filtering the old result
const updatedResults = [...(cachedParticipation.results || [])].filter((r) => r.id !== result.id);
updatedResults.push(result);
// create a clone
this.cachedParticipations.set(result.participation!.id!, { ...cachedParticipation, results: updatedResults } as StudentParticipation);
return of(this.cachedParticipations.get(result.participation!.id!));
}
return of();
};
/**
* This adds a participation to the cached data maps. The exercise information is required to find the correct
* participations for a given exercise. Please note: we explicitly do not want to use websockets here!
*
* @param newParticipation The new participation for the cached data maps
* @param exercise (optional) The exercise that the participation belongs to. Only needed if exercise is missing in participation.
*/
public addParticipation = (newParticipation: StudentParticipation, exercise?: Exercise) => {
// The participation needs to be cloned so that the original object is not modified
const participation = { ...newParticipation } as StudentParticipation;
if (!participation.exercise && !exercise) {
throw new Error('a link from the participation to the exercise is required. Please attach it manually or add exercise as function input');
}
participation.exercise = participation.exercise || exercise;
this.cachedParticipations.set(participation.id!, participation);
this.notifyParticipationSubscribers(participation);
};
/**
* Returns all participations for all exercises. The participation objects include the exercise data and all results.
* @return array of Participations
*/
private getAllParticipations(): StudentParticipation[] {
return [...this.cachedParticipations.values()];
}
/**
* Returns the student participation for the given exercise. The participation objects include the exercise data and all results.
*
* @param exerciseId ID of the exercise that the participations belong to.
* @return the cached student participation for the exercise or undefined
*/
public getParticipationForExercise(exerciseId: number) {
const participationsForExercise = [...this.cachedParticipations.values()].filter((participation) => {
return participation.exercise?.id === exerciseId;
});
if (participationsForExercise && participationsForExercise.length === 1) {
return participationsForExercise[0];
}
if (participationsForExercise && participationsForExercise.length > 1) {
return this.participationService.mergeStudentParticipations(participationsForExercise);
}
return undefined;
}
/**
* Unsubscribes from the topics used by the participationId, if possible
*
* @param participationId ID of the participation that should not be tracked anymore
* @param exerciseId optional the participationId an exercise that should not be tracked anymore
*/
private removeParticipation(participationId: number, exerciseId?: number) {
const subscriptionTypePersonal = this.participationSubscriptionTypes.get(participationId);
this.participationSubscriptionTypes.delete(participationId);
// We are only interested if there is a value
if (subscriptionTypePersonal != undefined) {
if (subscriptionTypePersonal) {
// The subscription was a personal subscription, so it should only be removed if it was the last of it kind
const openPersonalSubscriptions = [...this.participationSubscriptionTypes.values()].filter((personal: boolean) => personal).length;
if (openPersonalSubscriptions === 0) {
this.jhiWebsocketService.unsubscribe(PERSONAL_PARTICIPATION_TOPIC);
this.openPersonalWebsocketSubscription = undefined;
}
} else {
// The subscriptions are non-personal subscriptions, so it should only be removed if it was the last for this exercise
const openSubscriptionsForExercise = this.subscribedExercises.get(exerciseId!);
if (openSubscriptionsForExercise) {
openSubscriptionsForExercise.delete(participationId);
if (openSubscriptionsForExercise.size === 0) {
this.subscribedExercises.delete(exerciseId!);
const subscribedTopic = this.openResultWebsocketSubscriptions.get(exerciseId!);
if (subscribedTopic) {
this.jhiWebsocketService.unsubscribe(subscribedTopic);
this.openResultWebsocketSubscriptions.delete(exerciseId!);
}
}
}
}
}
}
/**
* Checks if a websocket connection for new results to the server already exists.
* If not a new one will be opened.
*
* @param participationId the id of the participation for which the subscription should be opened
* @param personal whether the current user is a participant in the participation.
* @param exerciseId optional exerciseId of the exercise where the participation is part of, only needed if personal == false
*/
private openResultWebsocketSubscriptionIfNotExisting(participationId: number, personal: boolean, exerciseId?: number) {
if ((personal && !this.openPersonalWebsocketSubscription) || (!personal && !this.openResultWebsocketSubscriptions.has(exerciseId!))) {
let participationResultTopic: string;
if (personal) {
participationResultTopic = PERSONAL_PARTICIPATION_TOPIC;
this.openPersonalWebsocketSubscription = participationResultTopic;
} else {
participationResultTopic = EXERCISE_PARTICIPATION_TOPIC(exerciseId!);
this.openResultWebsocketSubscriptions.set(exerciseId!, participationResultTopic);
}
this.participationSubscriptionTypes.set(participationId, personal);
if (!this.subscribedExercises.has(exerciseId!)) {
this.subscribedExercises.set(exerciseId!, new Set<number>());
}
const subscribedParticipations = this.subscribedExercises.get(exerciseId!);
subscribedParticipations!.add(participationId);
this.jhiWebsocketService.subscribe(participationResultTopic);
this.jhiWebsocketService.receive(participationResultTopic).pipe(this.getNotifyAllSubscribersPipe()).subscribe();
}
}
/**
* Notifies the result and participation subscribers with the newest result.
* Note: the result must contain the participation id
*
* @param result The result with which the subscribers get notified
*/
public notifyAllResultSubscribers = (result: Result) => {
of(result).pipe(this.getNotifyAllSubscribersPipe()).subscribe();
};
/**
* Subscribing for general changes in a participation object. This will triggered if a new result is received by the service.
* A received object will be the full participation object including all results and the exercise.
*
* If no observable exists a new one will be created.
*/
public subscribeForParticipationChanges(): BehaviorSubject<Participation | undefined> {
if (!this.participationObservable) {
this.participationObservable = new BehaviorSubject<Participation | undefined>(undefined);
}
return this.participationObservable;
}
/**
* Subscribing to new results of a certain participation. This will be triggered if a new result is received by the service.
* A received Object will be a result object.
*
* If there is no observable for the participation a new one will be created.
*
* @param participationId Id of Participation of which result to subscribe to
* @param personal whether the current user is a participant in the participation.
* @param exerciseId optional exerciseId of the exercise where the participation is part of, only needed if personal == false
*/
public subscribeForLatestResultOfParticipation(participationId: number, personal: boolean, exerciseId?: number): BehaviorSubject<Result | undefined> {
this.openResultWebsocketSubscriptionIfNotExisting(participationId, personal, exerciseId);
let resultObservable = this.resultObservables.get(participationId)!;
if (!resultObservable) {
resultObservable = new BehaviorSubject<Result | undefined>(undefined);
this.resultObservables.set(participationId, resultObservable);
}
return resultObservable;
}
/**
* Unsubscribe from the result
* @param participationId
* @param exercise The exercise to which the participationId belongs to. Needed for deciding whether to unsubscribe from the websocket
*/
public unsubscribeForLatestResultOfParticipation(participationId: number, exercise: Exercise): void {
// Only unsubscribe from websocket, if the exercise is not active any more
let isInactiveProgrammingExercise = false;
if (exercise instanceof ProgrammingExercise) {
const programmingExercise = exercise as ProgrammingExercise;
isInactiveProgrammingExercise =
!!programmingExercise.buildAndTestStudentSubmissionsAfterDueDate && dayjs(programmingExercise.buildAndTestStudentSubmissionsAfterDueDate).isBefore(dayjs());
}
if (isInactiveProgrammingExercise || (exercise.dueDate && dayjs(exercise.dueDate).isBefore(dayjs()))) {
this.removeParticipation(participationId, exercise.id);
}
}
} | the_stack |
* Testing for utils.ts
*/
import 'jasmine';
import {Spec} from '../lib/types';
import * as utils from './utils';
describe('randInt test', () => {
it('generates random integers in a given range', async () => {
let start = 1;
let end = 5;
let result = utils.randInt(start, end);
expect(result).not.toBeLessThan(start);
expect(result).toBeLessThan(end);
start = -2;
end = 2;
result = utils.randInt(start, end);
expect(result).not.toBeLessThan(start);
expect(result).toBeLessThan(end);
});
it('generates random integers when start and end are equal', async () => {
const start = 1;
const end = 1;
const result = utils.randInt(start, end);
expect(result).toBe(1);
});
});
describe('setEquals test', () => {
it('correctly determines that empty sets are equal', async () => {
const emptyA = new Set([]);
const emptyB = new Set([]);
expect(utils.setEquals(emptyA, emptyB)).toBe(true);
});
it('correctly determines that sets are equal', async () => {
let a = new Set(['a']);
let b = new Set(['a']);
expect(utils.setEquals(a, b)).toBe(true);
a = new Set(['a', 'b']);
b = new Set(['a', 'b']);
expect(utils.setEquals(a, b)).toBe(true);
a = new Set(['a', 'b', 'd']);
b = new Set(['a', 'b', 'd']);
expect(utils.setEquals(a, b)).toBe(true);
});
it('correctly determines that sets are not equal', async () => {
let a = new Set(['a']);
let b = new Set(['b']);
expect(utils.setEquals(a, b)).toBe(false);
a = new Set(['a', 'd']);
b = new Set(['a', 'c']);
expect(utils.setEquals(a, b)).toBe(false);
a = new Set(['a', 'b', 'd']);
b = new Set(['a', 'c']);
expect(utils.setEquals(a, b)).toBe(false);
});
});
describe('arrayContainsSame test', () => {
it('correctly determines that empty arrays contain the same items',
async () => {
const emptyA: string[] = [];
const emptyB: string[] = [];
expect(utils.arrayContainsSame(emptyA, emptyB)).toBe(true);
});
it('correctly determines that arrays contain the same items', async () => {
let a = ['a'];
let b = ['a'];
expect(utils.arrayContainsSame(a, b)).toBe(true);
a = ['a', 'b', 'd'];
b = ['a', 'b', 'd'];
expect(utils.arrayContainsSame(a, b)).toBe(true);
});
it('works for arrays with different number of duplicates', async () => {
const a = ['a', 'b'];
const b = ['a', 'b', 'b', 'b', 'a'];
expect(utils.arrayContainsSame(a, b)).toBe(true);
});
it('correctly determines that arrays do not contain the same items',
async () => {
let a = ['a'];
let b = ['b'];
expect(utils.arrayContainsSame(a, b)).toBe(false);
a = ['a', 'b', 'd'];
b = ['a', 'c', 'd'];
expect(utils.arrayContainsSame(a, b)).toBe(false);
});
});
describe('isLitSubtype test', () => {
it('finds a subclass', () => {
const spec: Spec = {
'score': {
__class__: 'LitType',
__name__: 'RegressionScore',
__mro__: ['RegressionScore', 'Scalar', 'LitType', 'object']
},
};
expect(utils.isLitSubtype(spec['score'], 'RegressionScore')).toBe(true);
expect(utils.isLitSubtype(spec['score'], 'Scalar')).toBe(true);
expect(utils.isLitSubtype(spec['score'], 'LitType')).toBe(true);
expect(utils.isLitSubtype(spec['score'], 'TextSegment')).toBe(false);
});
});
describe('findSpecKeys test', () => {
const spec: Spec = {
'score': {
__class__: 'LitType',
__name__: 'RegressionScore',
__mro__: ['RegressionScore', 'Scalar', 'LitType', 'object']
},
'probabilities': {
__class__: 'LitType',
__name__: 'MulticlassPreds',
__mro__: ['MulticlassPreds', 'LitType', 'object'],
null_idx: 0
},
'score2': {
__class__: 'LitType',
__name__: 'RegressionScore',
__mro__: ['RegressionScore', 'Scalar', 'LitType', 'object']
},
'scalar_foo': {
__class__: 'LitType',
__name__: 'Scalar',
__mro__: ['Scalar', 'LitType', 'object']
},
'segment': {
__class__: 'LitType',
__name__: 'TextSegment',
__mro__: ['TextSegment', 'LitType', 'object']
},
'generated_text': {
__class__: 'LitType',
__name__: 'GeneratedText',
__mro__: ['GeneratedText', 'TextSegment', 'LitType', 'object'],
parent: 'segment'
}
};
it('finds all spec keys that match the specified types', () => {
// Key is in spec.
expect(utils.findSpecKeys(spec, 'RegressionScore')).toEqual([
'score', 'score2'
]);
expect(utils.findSpecKeys(spec, 'MulticlassPreds')).toEqual([
'probabilities'
]);
// Keys are in spec.
expect(utils.findSpecKeys(spec, [
'MulticlassPreds', 'RegressionScore'
])).toEqual(['score', 'probabilities', 'score2']);
expect(utils.findSpecKeys(spec, ['GeneratedText'])).toEqual([
'generated_text'
]);
// Key is not in spec.
expect(utils.findSpecKeys(spec, ['TokenGradients'])).toEqual([]);
});
it('identifies subclass fields', () => {
expect(utils.findSpecKeys(spec, 'LitType')).toEqual(Object.keys(spec));
expect(utils.findSpecKeys(spec, 'TextSegment')).toEqual([
'segment', 'generated_text'
]);
expect(utils.findSpecKeys(spec, 'Scalar')).toEqual([
'score', 'score2', 'scalar_foo'
]);
});
});
describe('flatten test', () => {
it('flattens a nested array by a single level', async () => {
// Empty array
expect(utils.flatten([])).toEqual([]);
// Nested empty arrays.
expect(utils.flatten([[], []])).toEqual([]);
// Nested arrays.
expect(utils.flatten([[1, 2], [3]])).toEqual([1, 2, 3]);
expect(utils.flatten([[1, 2], [], [3], [7, 8, 1]])).toEqual([
1, 2, 3, 7, 8, 1
]);
});
});
describe('permute test', () => {
it('permutes an array correctly', async () => {
expect(utils.permute([], [])).toEqual([]);
expect(utils.permute([0, 1, 2], [0, 1, 2])).toEqual([0, 1, 2]);
expect(utils.permute([0, 1, 2], [2, 1, 0])).toEqual([2, 1, 0]);
expect(utils.permute([5, 6, 7, 8], [3, 1, 0, 2])).toEqual([8, 6, 5, 7]);
});
});
describe('handleEnterKey test', () => {
it('Handles input correctly', () => {
const callback = jasmine.createSpy('callback');
const event = new KeyboardEvent('keyup', {key: 'Enter'});
utils.handleEnterKey(event, callback);
expect(callback).toHaveBeenCalled();
});
it('Is not called for other keys', () => {
const callback = jasmine.createSpy('callback');
const event = new KeyboardEvent('keyup', {key: 'Delete'});
utils.handleEnterKey(event, callback);
expect(callback).not.toHaveBeenCalled();
});
});
describe('getThresholdFromMargin test', () => {
it('Works as expected in the basic case', () => {
expect(utils.getThresholdFromMargin(-5)).toEqual(0.0066928509242848554);
expect(utils.getThresholdFromMargin(-2.5)).toEqual(0.07585818002124355);
expect(utils.getThresholdFromMargin(1)).toEqual(0.7310585786300049);
expect(utils.getThresholdFromMargin(5 / 3)).toEqual(0.8411308951190849);
expect(utils.getThresholdFromMargin(5)).toEqual(0.9933071490757153);
});
it('Behaves correctly for 0 input', () => {
expect(utils.getThresholdFromMargin(0)).toEqual(.5);
});
});
describe('shortenID test', () => {
it('Shortens an id to 6 characters, for display', () => {
expect(utils.shortenId('b6ea684bec7bb1d4b9f2736b749c3030'))
.toEqual('b6ea68');
expect(utils.shortenId('924d4976b4ac56d053ed956671652892'))
.toEqual('924d49');
});
it('Behaves correctly for null input', () => {
expect(utils.shortenId(null)).toEqual(undefined);
});
});
describe('isNumber test', () => {
it('Returns true for normal numbers', () => {
expect(utils.isNumber(4)).toEqual(true);
expect(utils.isNumber(9999999)).toEqual(true);
expect(utils.isNumber(-9999999)).toEqual(true);
expect(utils.isNumber(0)).toEqual(true);
});
it('Works for text numbers', () => {
expect(utils.isNumber('0')).toEqual(true);
expect(utils.isNumber('12')).toEqual(true);
});
it('Returns false for infinite numbers', () => {
expect(utils.isNumber(Number.POSITIVE_INFINITY)).toEqual(false);
expect(utils.isNumber(Number.NEGATIVE_INFINITY)).toEqual(false);
});
it('Returns false for non-numbers', () => {
expect(utils.isNumber(NaN)).toEqual(false);
expect(utils.isNumber('asdf')).toEqual(false);
expect(utils.isNumber('twelve')).toEqual(false);
});
});
describe('sumArray test', () => {
it('Correctly sums a normal array', () => {
let arr = [2, 0, 1];
expect(utils.sumArray(arr)).toEqual(3);
arr = [5, 2, 1, 3, 0, 2, 1];
expect(utils.sumArray(arr)).toEqual(14);
});
it('Correctly sums an array with negative numbers', () => {
const arr = [-1, 3, 0, -4];
expect(utils.sumArray(arr)).toEqual(-2);
});
});
describe('range test', () => {
it('Creates a range() array from a number', () => {
// Empty array
expect(utils.range(0)).toEqual([]);
// Standard arrays
expect(utils.range(1)).toEqual([0]);
expect(utils.range(4)).toEqual([0, 1, 2, 3]);
});
});
describe('cumSumArray test', () => {
it('cumulatively sums an array', () => {
// Standard arrays
let arr = [2, 0, 1];
expect(utils.cumSumArray(arr)).toEqual([2, 2, 3]);
arr = [5, 2, 1, 3, 0, 2, 1];
expect(utils.cumSumArray(arr)).toEqual([5, 7, 8, 11, 11, 13, 14]);
});
it('cumulatively sums an array with negative numbers', () => {
const arr = [-1, 3, 0, -4];
expect(utils.cumSumArray(arr)).toEqual([-1, 2, 2, -2]);
});
});
describe('compareArrays test', () => {
it('Correctly tests normal comparison', () => {
// Shorter arrays.
let a = [2, 0];
let b = [1];
expect(utils.compareArrays(a, b)).toBe(1);
// Longer arrays.
a = [5, 9, 24, 1, 0, 0];
b = [2, 10, 40, 2, 2];
expect(utils.compareArrays(a, b)).toBe(1);
// When a < b.
a = [2, 10, 40, 2, 2];
b = [5, 9, 24, 1, 0, 0];
expect(utils.compareArrays(a, b)).toBe(-1);
});
it('Works correctly when b is a prefix of a', () => {
const a = [1, 2, 3, 4, 5];
const b = [1, 2, 3, 4];
expect(utils.compareArrays(a, b)).toBe(1);
});
it('Works correctly when arrays are equal', () => {
// Non-empty arrays.
let a = [3, 5, 8];
let b = [3, 5, 8];
expect(utils.compareArrays(a, b)).toBe(0);
// Empty arrays.
a = [];
b = [];
expect(utils.compareArrays(a, b)).toBe(0);
});
});
describe('roundToDecimalPlaces test', () => {
it('rounds to zero places correctly', () => {
expect(utils.roundToDecimalPlaces(4.22, 0)).toEqual(4);
expect(utils.roundToDecimalPlaces(-5.6, 0)).toEqual(-6);
});
it('rounds to one place correctly', () => {
expect(utils.roundToDecimalPlaces(4.54, 1)).toEqual(4.5);
expect(utils.roundToDecimalPlaces(4.55, 1)).toEqual(4.6);
});
it('does not add unnecessary decimals', () => {
expect(utils.roundToDecimalPlaces(33, 1)).toEqual(33);
});
it('rounds to two places correctly', () => {
expect(utils.roundToDecimalPlaces(4.546, 2)).toEqual(4.55);
expect(utils.roundToDecimalPlaces(3.333, 2)).toEqual(3.33);
});
it('does not round when given negative places', () => {
expect(utils.roundToDecimalPlaces(4.22, -1)).toEqual(4.22);
});
}); | the_stack |
import { UserModel } from 'src/chat21-core/models/user';
import { Component, OnInit, Output, EventEmitter, Input, AfterViewInit, ViewChild, ElementRef, OnChanges, HostListener, Renderer2 } from '@angular/core';
import { Chooser } from '@ionic-native/chooser/ngx';
import { IonTextarea, ModalController, ToastController } from '@ionic/angular';
// Pages
import { LoaderPreviewPage } from 'src/app/pages/loader-preview/loader-preview.page';
// Services
import { UploadService } from 'src/chat21-core/providers/abstract/upload.service';
// utils
import { TYPE_MSG_TEXT } from 'src/chat21-core/utils/constants';
// Models
import { UploadModel } from 'src/chat21-core/models/upload';
import { Observable } from 'rxjs';
import { checkPlatformIsMobile } from 'src/chat21-core/utils/utils';
// Logger
import { LoggerService } from 'src/chat21-core/providers/abstract/logger.service';
import { LoggerInstance } from 'src/chat21-core/providers/logger/loggerInstance';
@Component({
selector: 'app-message-text-area',
templateUrl: './message-text-area.component.html',
styleUrls: ['./message-text-area.component.scss'],
})
export class MessageTextAreaComponent implements OnInit, AfterViewInit, OnChanges {
@ViewChild('textArea', { static: false }) messageTextArea: IonTextarea
@ViewChild('message_text_area', { static: false }) message_text_area: ElementRef
// set textArea(element: ElementRef<HTMLInputElement>) {
// if(element) {
// this.logger.log("[CONVS-DETAIL][MSG-TEXT-AREA] ViewChild element ", element);
// element.nativeElement.focus()
// }
// }
@ViewChild('fileInput', { static: false }) fileInput: any;
@Input() loggedUser: UserModel;
@Input() conversationWith: string;
@Input() tagsCannedFilter: any = [];
@Input() events: Observable<void>;
@Input() fileUploadAccept: string
@Input() isOpenInfoConversation: boolean;
@Input() translationMap: Map<string, string>;
@Input() dropEvent: any;
@Output() eventChangeTextArea = new EventEmitter<object>();
@Output() eventSendMessage = new EventEmitter<object>();
@Output() onPresentModalScrollToBottom = new EventEmitter<boolean>();
public conversationEnabled = false;
public messageString: string;
public HAS_PASTED: boolean = false;
public toastMsg: string;
public TEXAREA_PLACEHOLDER: string;
public LONG_TEXAREA_PLACEHOLDER: string;
public SHORT_TEXAREA_PLACEHOLDER: string;
public SHORTER_TEXAREA_PLACEHOLDER: string;
public currentWindowWidth: any;
private logger: LoggerService = LoggerInstance.getInstance();
public countClicks: number = 0;
TYPE_MSG_TEXT = TYPE_MSG_TEXT;
/**
* Constructor
* @param chooser
* @param modalController
* @param uploadService
* @param toastController
*/
constructor(
public chooser: Chooser,
public modalController: ModalController,
public uploadService: UploadService,
public toastController: ToastController,
private renderer: Renderer2,
) { }
// ---------------------------------------------------------
// @ Lifehooks
// ---------------------------------------------------------
ngOnInit() {
// this.setSubscriptions();
this.logger.log("[CONVS-DETAIL] [MSG-TEXT-AREA] HELLO !!!!! ");
// this.events.subscribe((cannedmessage) => {
// this.logger.log("[CONVS-DETAIL] [MSG-TEXT-AREA] events.subscribe cannedmessage ", cannedmessage);
// })
// this.logger.log("[CONVS-DETAIL][MSG-TEXT-AREA] LONG_TEXAREA_PLACEHOLDER ", this.LONG_TEXAREA_PLACEHOLDER);
// this.logger.log("[CONVS-DETAIL][MSG-TEXT-AREA] SHORT_TEXAREA_PLACEHOLDER ", this.SHORT_TEXAREA_PLACEHOLDER);
// this.logger.log("[CONVS-DETAIL][MSG-TEXT-AREA] SHORTER_TEXAREA_PLACEHOLDER ", this.SHORTER_TEXAREA_PLACEHOLDER);
this.getWindowWidth();
}
ngOnChanges() {
if (this.translationMap) {
this.LONG_TEXAREA_PLACEHOLDER = this.translationMap.get('LABEL_ENTER_MSG')
this.SHORT_TEXAREA_PLACEHOLDER = this.translationMap.get('LABEL_ENTER_MSG_SHORT')
this.SHORTER_TEXAREA_PLACEHOLDER = this.translationMap.get('LABEL_ENTER_MSG_SHORTER')
}
this.logger.log("[CONVS-DETAIL][MSG-TEXT-AREA] ngOnChanges DROP EVENT ", this.dropEvent);
this.logger.log("[CONVS-DETAIL][MSG-TEXT-AREA] ngOnChanges tagsCannedFilter ", this.tagsCannedFilter);
// use case drop
if (this.dropEvent) {
this.presentModal(this.dropEvent)
}
// if (this.isOpenInfoConversation === true) {
// this.getIfTexareaIsEmpty('ngOnChanges')
this.getWindowWidth();
// }
}
// ngAfterViewInit() {
ngAfterViewInit() {
this.logger.log("[CONVS-DETAIL][MSG-TEXT-AREA] ngAfterViewInit message_text_area ", this.message_text_area);
this.logger.log("[CONVS-DETAIL][MSG-TEXT-AREA] ngAfterViewInit messageTextArea ", this.messageTextArea);
if (this.messageTextArea) {
setTimeout(() => {
const elTextArea = this.message_text_area['el'];
// this.logger.log("[CONVS-DETAIL][MSG-TEXT-AREA] ngAfterViewInit elTextArea ", elTextArea);
// this.logger.log("[CONVS-DETAIL][MSG-TEXT-AREA] ngAfterViewInit elTextArea children", elTextArea.children);
if (elTextArea.children.length === 1) {
const elTextAreaWrapper = elTextArea.children[0]
// this.logger.log("[CONVS-DETAIL][MSG-TEXT-AREA] ngAfterViewInit elTextAreaWrapper", elTextAreaWrapper);
if (elTextAreaWrapper.children.length === 1) {
const elNativeTearea = elTextAreaWrapper.children[0]
// this.logger.log("[CONVS-DETAIL][MSG-TEXT-AREA] ngAfterViewInit elNativeTearea", elNativeTearea);
elNativeTearea.setAttribute("style", "height: 37px !important; ");
}
}
// this.logger.log("[CONVS-DETAIL][MSG-TEXT-AREA] set focus on ", this.messageTextArea);
// Keyboard.show() // for android
this.messageTextArea.setFocus();
}, 1500); //a least 150ms.
}
}
getWindowWidth(): any {
this.currentWindowWidth = window.innerWidth;
// if ((this.currentWindowWidth < 1045 && this.currentWindowWidth > 835) && this.isOpenInfoConversation === true) {
// this.logger.log("[CONVS-DETAIL] [MSG-TEXT-AREA] DISPLAY SHORT_TEXAREA_PLACEHOLDER ");
// // this.TEXAREA_PLACEHOLDER = '';
// this.TEXAREA_PLACEHOLDER = this.SHORT_TEXAREA_PLACEHOLDER;
// } else if (this.currentWindowWidth < 835 && this.isOpenInfoConversation === true) {
// this.logger.log("[CONVS-DETAIL] [MSG-TEXT-AREA] DISPLAY SHORTER_TEXAREA_PLACEHOLDER ");
// this.TEXAREA_PLACEHOLDER = this.SHORTER_TEXAREA_PLACEHOLDER;
// // this.TEXAREA_PLACEHOLDER = '';
// } else
// this.logger.log("[CONVS-DETAIL][MSG-TEXT-AREA] currentWindowWidth ", this.currentWindowWidth);
// this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] isOpenInfoConversation', this.isOpenInfoConversation);
// this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] this.conversationWith.startsWith("support-group")', this.conversationWith.startsWith("support-group"));
if (this.currentWindowWidth >= 844 && this.isOpenInfoConversation === false && this.conversationWith.startsWith("support-group")) {
this.TEXAREA_PLACEHOLDER = this.LONG_TEXAREA_PLACEHOLDER;
this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] currentWindowWidth', this.currentWindowWidth, ' - DISPLAY LONG_TEXAREA_PLACEHOLDER ');
} else if (this.currentWindowWidth >= 844 && this.isOpenInfoConversation === true && this.conversationWith.startsWith("support-group")) {
this.TEXAREA_PLACEHOLDER = this.SHORT_TEXAREA_PLACEHOLDER;
} else if (this.currentWindowWidth < 844 && this.isOpenInfoConversation === false && this.conversationWith.startsWith("support-group")) {
this.TEXAREA_PLACEHOLDER = this.SHORT_TEXAREA_PLACEHOLDER;
} else if (this.currentWindowWidth < 844 && this.isOpenInfoConversation === true && this.conversationWith.startsWith("support-group")) {
this.TEXAREA_PLACEHOLDER = this.SHORTER_TEXAREA_PLACEHOLDER;
} else if (!this.conversationWith.startsWith("support-group")) {
this.TEXAREA_PLACEHOLDER = this.SHORT_TEXAREA_PLACEHOLDER;
}
// this.logger.log("[CONVS-DETAIL][MSG-TEXT-AREA] checkPlatformIsMobile() ", checkPlatformIsMobile());
if (checkPlatformIsMobile() === true) {
if (this.currentWindowWidth <= 430 && this.currentWindowWidth >= 274) {
this.TEXAREA_PLACEHOLDER = this.SHORT_TEXAREA_PLACEHOLDER;
} else if (this.currentWindowWidth <= 273) {
this.TEXAREA_PLACEHOLDER = this.SHORTER_TEXAREA_PLACEHOLDER;
}
}
// if (checkPlatformIsMobile && this.currentWindowWidth <= 430) {
// this.TEXAREA_PLACEHOLDER = this.SHORT_TEXAREA_PLACEHOLDER;
// } else if (checkPlatformIsMobile && this.currentWindowWidth > 430) {
// this.TEXAREA_PLACEHOLDER = this.LONG_TEXAREA_PLACEHOLDER;
// }
}
// -------------------------------------------------------------------------------------------
// Change the placeholder of the 'send message' textarea according to the width of the window
// -------------------------------------------------------------------------------------------
@HostListener('window:resize', ['$event'])
onResize(event) {
// this.getIfTexareaIsEmpty('onResize')
this.logger.log("[CONVS-DETAIL][MSG-TEXT-AREA] event.target.innerWidth; ", event.target.innerWidth);
// if ((event.target.innerWidth < 1045 && event.target.innerWidth > 835) && this.isOpenInfoConversation === true) {
// this.logger.log("[CONVS-DETAIL] [MSG-TEXT-AREA] ON RESIZE DISPAY SHORT_TEXAREA_PLACEHOLDER");
// this.TEXAREA_PLACEHOLDER = this.SHORT_TEXAREA_PLACEHOLDER;
// } else if (event.target.innerWidth < 835 && this.isOpenInfoConversation === true) {
// this.TEXAREA_PLACEHOLDER = this.SHORTER_TEXAREA_PLACEHOLDER;
// } else {
// this.TEXAREA_PLACEHOLDER = this.LONG_TEXAREA_PLACEHOLDER;
if (event.target.innerWidth >= 844 && this.isOpenInfoConversation === false && this.conversationWith.startsWith("support-group")) {
this.TEXAREA_PLACEHOLDER = this.LONG_TEXAREA_PLACEHOLDER;
// this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] - else - DISPLAY LONG_TEXAREA_PLACEHOLDER ');
// this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] - else - this.currentWindowWidth ', this.currentWindowWidth);
} else if (event.target.innerWidth >= 844 && this.isOpenInfoConversation === true && this.conversationWith.startsWith("support-group")) {
this.TEXAREA_PLACEHOLDER = this.SHORT_TEXAREA_PLACEHOLDER;
} else if (event.target.innerWidth < 844 && this.isOpenInfoConversation === false && this.conversationWith.startsWith("support-group")) {
this.TEXAREA_PLACEHOLDER = this.SHORT_TEXAREA_PLACEHOLDER;
} else if (event.target.innerWidth < 844 && this.isOpenInfoConversation === true && this.conversationWith.startsWith("support-group")) {
this.TEXAREA_PLACEHOLDER = this.SHORTER_TEXAREA_PLACEHOLDER;
} else if (!this.conversationWith.startsWith("support-group")) {
this.TEXAREA_PLACEHOLDER = this.SHORT_TEXAREA_PLACEHOLDER;
}
// this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] checkPlatformIsMobile() ', checkPlatformIsMobile());
if (checkPlatformIsMobile() === true) {
if (event.target.innerWidth <= 430 && event.target.innerWidth >= 274) {
this.TEXAREA_PLACEHOLDER = this.SHORT_TEXAREA_PLACEHOLDER;
} else if (this.currentWindowWidth <= 273) {
this.TEXAREA_PLACEHOLDER = this.SHORTER_TEXAREA_PLACEHOLDER;
}
}
// if (checkPlatformIsMobile && event.target.innerWidth <= 430) {
// this.TEXAREA_PLACEHOLDER = this.SHORT_TEXAREA_PLACEHOLDER;
// } else if (checkPlatformIsMobile && event.target.innerWidth > 430) {
// this.TEXAREA_PLACEHOLDER = this.LONG_TEXAREA_PLACEHOLDER;
// }
}
onPaste(event: any) {
this.logger.log("[CONVS-DETAIL][MSG-TEXT-AREA] onPaste DROP EVENT ", this.dropEvent);
this.dropEvent = undefined
this.logger.log("[CONVS-DETAIL][MSG-TEXT-AREA] onPaste event ", event);
const items = (event.clipboardData || event.originalEvent.clipboardData).items;
let file = null;
this.logger.log("[CONVS-DETAIL][MSG-TEXT-AREA] onPaste items ", items);
for (const item of items) {
this.logger.log("[CONVS-DETAIL][MSG-TEXT-AREA] onPaste item ", item);
this.logger.log("[CONVS-DETAIL][MSG-TEXT-AREA] onPaste item.type ", item.type);
if (item.type.startsWith("image")) {
let content = event.clipboardData.getData('text/plain');
this.logger.log("[CONVS-DETAIL][MSG-TEXT-AREA] onPaste content ", content);
setTimeout(() => {
this.messageString = "";
}, 100);
this.logger.log("[CONVS-DETAIL][MSG-TEXT-AREA] onPaste item.type ", item.type);
file = item.getAsFile();
const data = new ClipboardEvent('').clipboardData || new DataTransfer();
data.items.add(new File([file], file.name, { type: file.type }));
this.logger.log("[CONVS-DETAIL][MSG-TEXT-AREA] onPaste data ", data);
this.logger.log("[CONVS-DETAIL][MSG-TEXT-AREA] onPaste file ", file);
this.presentModal(data);
} else if (item.type.startsWith("application")) {
event.preventDefault();
this.presentToastOnlyImageFilesAreAllowed();
// let content = event.clipboardData.getData('text/plain');
// this.logger.log("[CONVS-DETAIL] [MSG-TEXT-AREA] onPaste else content ", content);
// setTimeout(() => {
// this.messageString = "";
// }, 0)
this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] onPaste file NOT SUPPORTED FILE TYPE');
}
}
}
onFileSelected(e: any) {
this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] - onFileSelected event', e);
this.presentModal(e);
}
/**
*
* @param e
*/
private async presentModal(e: any): Promise<any> {
this.onPresentModalScrollToBottom.emit(true);
const that = this;
let dataFiles = " "
if (e.type === 'change') {
this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] presentModal change e', e);
this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] presentModal change e.target ', e.target);
this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] presentModal change e.target.files', e.target.files);
dataFiles = e.target.files;
} else if (e.type === 'drop') {
dataFiles = e.dataTransfer.files
this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] presentModal drop e.dataTransfer.files', e.dataTransfer.files);
} else {
// paste use case
dataFiles = e.files
this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] presentModal dataFiles when paste', dataFiles);
// const elemTexarea= <HTMLElement>document.querySelector('#ion-textarea .textarea-wrapper textarea')
// const elemTexarea= <HTMLInputElement>document.getElementById('ion-textarea')
// this.logger.log('[CONVS-DETAIL] [MSG-TEXT-AREA] presentModal elemTexarea when paste', elemTexarea);
// let textarea_value = elemTexarea.value
// this.logger.log('[CONVS-DETAIL] [MSG-TEXT-AREA] presentModal textarea_value when paste', textarea_value);
// textarea_value = ""
}
// this.logger.log('presentModal e.target.files.length', e.target.files.length);
const attributes = { files: dataFiles, enableBackdropDismiss: false };
this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] attributes', attributes);
const modal: HTMLIonModalElement =
await this.modalController.create({
component: LoaderPreviewPage,
componentProps: attributes,
swipeToClose: false,
backdropDismiss: true
});
modal.onDidDismiss().then((detail: any) => {
this.logger.log('presentModal onDidDismiss detail', detail);
if (detail.data !== undefined) {
let type = ''
if (detail.data.fileSelected.type && detail.data.fileSelected.type.startsWith("image") && (!detail.data.fileSelected.type.includes('svg'))) {
this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] FIREBASE-UPLOAD presentModal onDidDismiss detail type ', detail.data.fileSelected.type);
type = 'image'
// if ((detail.data.fileSelected.type && detail.data.fileSelected.type.startsWith("application")) || (detail.data.fileSelected.type && detail.data.fileSelected.type === 'image/svg+xml'))
} else {
this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] FIREBASE-UPLOAD presentModal onDidDismiss detail type ', detail.data.fileSelected.type);
type = 'file'
}
let fileSelected = null;
if (e.type === 'change') {
fileSelected = e.target.files.item(0);
} else if (e.type === 'drop') {
this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] FIREBASE-UPLOAD [MSG-TEXT-AREA] DROP dataFiles[0]', dataFiles[0])
fileSelected = dataFiles[0]
// const fileList = e.dataTransfer.files;
// this.logger.log('FIREBASE-UPLOAD [MSG-TEXT-AREA] DROP fileList', fileList)
// const file: File = fileList[0];
// this.logger.log('FIREBASE-UPLOAD [MSG-TEXT-AREA] DROP FILE', file)
// const data = new ClipboardEvent('').clipboardData || new DataTransfer();
// data.items.add(new File([file], file.name, { type: file.type }));
// this.logger.log('FIREBASE-UPLOAD [MSG-TEXT-AREA] DROP DATA', data)
} else {
// PASTE USE CASE
this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] FIREBASE-UPLOAD PASTE e', e)
fileSelected = e.files.item(0)
}
let messageString = detail.data.messageString;
let metadata = detail.data.metadata;
// let type = detail.data.type;
this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] FIREBASE-UPLOAD presentModal onDidDismiss detail.data', detail.data);
this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] FIREBASE-UPLOAD presentModal onDidDismiss fileSelected', fileSelected);
if (detail !== null) {
const currentUpload = new UploadModel(fileSelected);
this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] FIREBASE-UPLOAD presentModal onDidDismiss currentUpload', currentUpload);
this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] FIREBASE-UPLOAD presentModal onDidDismiss detail.data', detail.data);
that.uploadService.upload(that.loggedUser.uid, currentUpload).then(downloadURL => {
metadata.src = downloadURL;
this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] FIREBASE-UPLOAD presentModal invio msg metadata::: ', metadata);
this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] FIREBASE-UPLOAD presentModal invio msg metadata downloadURL::: ', downloadURL);
this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] FIREBASE-UPLOAD presentModal invio msg type::: ', type);
this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] FIREBASE-UPLOAD presentModal invio msg message::: ', messageString);
// send message
// if(messageString === undefined) {
// messageString = metadata.name
// }
that.eventSendMessage.emit({ message: messageString, type: type, metadata: metadata });
that.fileInput.nativeElement.value = '';
this.dropEvent = null
}).catch(error => {
// Use to signal error if something goes wrong.
this.logger.error(`[CONVS-DETAIL][MSG-TEXT-AREA] FIREBASE-UPLOAD - upload Failed to upload file and get link `, error);
that.presentToastFailedToUploadFile();
});
}
} else {
that.fileInput.nativeElement.value = '';
}
});
return await modal.present();
}
ionChange(e: any) {
this.logger.log("[CONVS-DETAIL][MSG-TEXT-AREA] ionChange event ", e);
// this.logger.log("[CONVS-DETAIL][MSG-TEXT-AREA] ionChange detail.value ", e.detail.value);
const message = e.detail.value
// this.logger.log("[CONVS-DETAIL] [MSG-TEXT-AREA] ionChange message ", message);
// this.logger.log("[CONVS-DETAIL] [MSG-TEXT-AREA] ionChange this.messageString ", this.messageString);
const height = e.target.offsetHeight + 20; // nk added +20
// this.logger.log("[CONVS-DETAIL] [MSG-TEXT-AREA] ionChange text-area height ", height);
// this.getIfTexareaIsEmpty('ionChange')
try {
if (message.trim().length > 0) {
this.conversationEnabled = true;
} else {
this.conversationEnabled = false;
}
} catch (err) {
this.logger.error("[CONVS-DETAIL][MSG-TEXT-AREA] ionChange err ", err);
this.conversationEnabled = false;
}
this.eventChangeTextArea.emit({ msg: message, offsetHeight: height });
}
// ------------------------------------------------------------------------
// invoked by pressing the enter key on the message input field
// if the message is not empty it is passed to the control method
// ------------------------------------------------------------------------
onKeydown(e: any, text: string) {
e.preventDefault(); // Prevent press enter from creating new line
this.countClicks++;
this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] onKeydown - countClicks: ', this.countClicks);
this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] onKeydown - event: ', e);
this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] onKeydown - event target: ', e.target);
this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] onKeydown - event target textContent: ', e.target.textContent);
this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] onKeydown - tagsCannedFilter: ', this.tagsCannedFilter);
// this.logger.error("[CONVS-DETAIL][MSG-TEXT-AREA] pressedOnKeyboard e.keyCode ", e.keyCode);
const message = e.target.textContent.trim();
this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] onKeydown - event target textContent (message): ', message);
// e.inputType === 'insertLineBreak' &&
if (e.inputType === 'insertLineBreak' && message === '') {
this.messageString = '';
return;
} else {
var pos = text.lastIndexOf("/");
this.logger.log("[CONVS-DETAIL][MSG-TEXT-AREA] onKeydown - POSITION OF '/': ", pos);
if (!text.includes("/")) {
this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] onKeydown - SEND MESSAGE 1 message: ', message);
this.messageString = '';
this.sendMessage(text);
this.countClicks = 0
} else if (text.includes("/") && pos >= 0 && this.countClicks > 1 && this.tagsCannedFilter.length > 0) {
this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] onKeydown - tagsCannedFilter.length 2: ', this.tagsCannedFilter.length);
this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] onKeydown - SEND MESSAGE 2 message: ', message);
this.messageString = '';
this.sendMessage(text);
this.countClicks = 0
} else if (text.includes("/") && this.tagsCannedFilter.length === 0) {
this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] onKeydown - tagsCannedFilter.length 3: ', this.tagsCannedFilter.length);
this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] onKeydown - SEND MESSAGE 3 message: ', message);
this.messageString = '';
this.sendMessage(text);
this.countClicks = 0
}
}
}
sendMessage(text: string) {
this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] sendMessage', text);
this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] sendMessage conve width', this.conversationWith);
// text.replace(/\s/g, "")
this.messageString = '';
// text = text.replace(/(\r\n|\n|\r)/gm, '');
if (text.trim() !== '') {
this.eventSendMessage.emit({ message: text, type: TYPE_MSG_TEXT });
}
}
// --------------------------------
// on mobile !
// --------------------------------
onFileSelectedMobile(e: any) {
this.logger.log('controlOfMessage');
this.chooser.getFile()
.then(file => {
this.logger.log(file ? file.name : 'canceled');
})
.catch((error: any) => {
this.logger.error(error);
});
}
async presentToastOnlyImageFilesAreAllowed() {
const toast = await this.toastController.create({
message: this.translationMap.get('ONLY_IMAGE_FILES_ARE_ALLOWED_TO_PASTE'),
duration: 3000,
color: "danger",
cssClass: 'toast-custom-class',
});
toast.present();
}
async presentToastFailedToUploadFile() {
const toast = await this.toastController.create({
message: this.translationMap.get('UPLOAD_FILE_ERROR'),
duration: 3000,
color: "danger",
cssClass: 'toast-custom-class',
});
toast.present();
}
private async closeModal() {
this.logger.log('closeModal', this.modalController);
await this.modalController.getTop();
this.modalController.dismiss({ confirmed: true });
}
@HostListener('document:keydown', ['$event'])
handleKeyboardEvent(event: KeyboardEvent) {
// Note: on mac keyboard "metakey" matches "cmd"
if (event.key === 'Enter' && event.altKey || event.key === 'Enter' && event.ctrlKey || event.key === 'Enter' && event.metaKey) {
this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] HAS PRESSED COMBO KEYS this.messageString', this.messageString);
if (this.messageString !== undefined && this.messageString.trim() !== '') {
this.logger.log('[CONVS-DETAIL][MSG-TEXT-AREA] HAS PRESSED Enter + ALT this.messageString', this.messageString);
this.messageString = this.messageString + "\r\n"
}
}
}
/* NOT USED */
// getIfTexareaIsEmpty(calledby: string) {
// let elemTexarea = <HTMLElement>document.querySelector('#ion-textarea');
// this.logger.log("[CONVS-DETAIL] [MSG-TEXT-AREA] elemTexarea ", elemTexarea)
// if (this.messageString == null || this.messageString == '') {
// if (elemTexarea) {
// this.logger.log("[CONVS-DETAIL] [MSG-TEXT-AREA] messageString is empty - called By ", calledby)
// elemTexarea.style.height = "30px !important";
// elemTexarea.style.overflow = "hidden !important";
// }
// } else {
// if (elemTexarea) {
// this.logger.log("[CONVS-DETAIL] [MSG-TEXT-AREA] messageString not empty - called By ", calledby)
// elemTexarea.style.height = null;
// elemTexarea.style.overflow = null;
// }
// }
// }
// attualmente non usata
// dovrebbe scattare quando termina il caricamento dell'immagine per inviare il messaggio
// private setSubscriptions() {
// const that = this;
// const subscribeBSStateUpload = this.uploadService.BSStateUpload.subscribe((data: any) => {
// this.logger.log('***** BSStateUpload *****', data);
// if (data) {
// let message = data.message;
// let type_message = data.type_message;
// let metadata = data.metadata;
// this.logger.log('***** message *****', message);
// this.logger.log('***** type_message *****', type_message);
// this.logger.log('***** metadata *****', metadata);
// //this.eventSendMessage.emit({ message: messageString, type: TYPE_MSG_TEXT });
// }
// });
// }
} | the_stack |
import { SET, ADD_ITEM, REMOVE_ITEM } from "#SRC/js/constants/TransactionTypes";
import { combineReducers, simpleFloatReducer } from "#SRC/js/utils/ReducerUtil";
import { findNestedPropertyInObject } from "#SRC/js/utils/Util";
import ContainerUtil from "#SRC/js/utils/ContainerUtil";
import { isEmpty } from "#SRC/js/utils/ValidatorUtil";
import Transaction from "#SRC/js/structs/Transaction";
import Networking from "#SRC/js/constants/Networking";
import { DEFAULT_POD_CONTAINER } from "../../../constants/DefaultPod";
import { JSONReducer as volumeMountsReducer } from "./MultiContainerVolumeMounts";
import { JSONReducer as endpointsJSONReducer } from "./Endpoints";
import { JSONReducer as multiContainerArtifactsJSONReducer } from "./MultiContainerArtifacts";
import {
JSONSegmentReducer as multiContainerHealthCheckReducer,
JSONSegmentParser as multiContainerHealthCheckParser,
} from "../MultiContainerHealthChecks";
import { PROTOCOLS } from "../../../constants/PortDefinitionConstants";
import VipLabelUtil from "../../../utils/VipLabelUtil";
import { JSONReducer as resourceLimitsReducer } from "./resourceLimits";
const { CONTAINER, HOST } = Networking.type;
const containerFloatReducer = combineReducers({
cpus: simpleFloatReducer("resources.cpus"),
mem: simpleFloatReducer("resources.mem"),
disk: simpleFloatReducer("resources.disk"),
});
function mapEndpoints(endpoints = [], networkType, appState) {
return endpoints.map((endpoint, index) => {
let {
name,
networkNames,
hostPort,
containerPort,
automaticPort,
protocol,
vipLabel,
vipPort,
labels,
} = endpoint;
protocol = Object.keys(protocol).filter((key) => protocol[key]);
if (automaticPort) {
hostPort = 0;
}
labels = VipLabelUtil.generateVipLabel(
appState.id,
endpoint,
vipLabel || VipLabelUtil.defaultVip(index),
vipPort || containerPort || hostPort
);
if (networkType === CONTAINER) {
return {
name,
networkNames,
containerPort,
hostPort,
protocol,
labels,
};
}
return {
name,
networkNames,
hostPort,
protocol,
labels,
};
});
}
function containersParser(state) {
if (state == null || state.containers == null) {
return [];
}
return state.containers.reduce((memo, item, index) => {
memo.push(new Transaction(["containers"], item, ADD_ITEM));
if (item.name) {
memo.push(new Transaction(["containers", index, "name"], item.name));
}
if (item.image) {
memo.push(new Transaction(["containers", index, "image"], item.image));
}
if (item.image && item.image.id) {
memo.push(
new Transaction(["containers", index, "image", "id"], item.image.id)
);
}
if (item.image && item.image.forcePull) {
memo.push(
new Transaction(
["containers", index, "image", "forcePull"],
item.image.forcePull
)
);
}
if (item.resources != null) {
const { resources } = item;
if (resources.cpus != null) {
memo.push(
new Transaction(
["containers", index, "resources", "cpus"],
resources.cpus
)
);
}
if (resources.mem != null) {
memo.push(
new Transaction(
["containers", index, "resources", "mem"],
resources.mem
)
);
}
if (resources.disk != null) {
memo.push(
new Transaction(
["containers", index, "resources", "disk"],
resources.disk
)
);
}
}
if (item.resourceLimits != null) {
const { resourceLimits } = item;
if (resourceLimits.cpus != null) {
memo.push(
new Transaction(
["containers", index, "limits", "cpus"],
resourceLimits.cpus
)
);
}
if (resourceLimits.mem != null) {
memo.push(
new Transaction(
["containers", index, "limits", "mem"],
resourceLimits.mem
)
);
}
}
if (item.privileged != null) {
memo.push(
new Transaction(["containers", index, "privileged"], item.privileged)
);
}
if (item.healthCheck != null) {
memo = memo.concat(
multiContainerHealthCheckParser(item.healthCheck, [
"containers",
index,
"healthCheck",
])
);
}
if (item.artifacts != null && item.artifacts.length !== 0) {
item.artifacts.forEach((artifact, artifactIndex) => {
memo.push(
new Transaction(
["containers", index, "artifacts"],
artifact,
ADD_ITEM
)
);
if (artifact == null || typeof artifact !== "object") {
return;
}
memo = memo.concat(
Object.keys(artifact).map(
(key) =>
new Transaction(
["containers", index, "artifacts", artifactIndex, key],
artifact[key]
)
)
);
});
}
if (item.endpoints != null && item.endpoints.length !== 0) {
const networkMode = findNestedPropertyInObject(state, "networks.0.mode");
item.endpoints.forEach((_endpoint, endpointIndex) => {
const endpoint = {
..._endpoint,
};
// Internal representation of protocols field differs from the JSON
// Thus we need to delete the field from the ADD_ITEM value so that
// JSONReducer isn't confused by it
const endpointProtocol = endpoint.protocol;
delete endpoint.protocol;
memo = memo.concat([
new Transaction(
["containers", index, "endpoints"],
endpoint,
ADD_ITEM
),
new Transaction(
["containers", index, "endpoints", endpointIndex, "hostPort"],
endpoint.hostPort
),
new Transaction(
["containers", index, "endpoints", endpointIndex, "automaticPort"],
endpoint.hostPort === 0
),
new Transaction(
["containers", index, "endpoints", endpointIndex, "servicePort"],
endpoint.servicePort === 0
),
new Transaction(
["containers", index, "endpoints", endpointIndex, "name"],
endpoint.name
),
]);
if (endpoint.labels != null) {
memo.push(
new Transaction(
["containers", index, "endpoints", endpointIndex, "labels"],
endpoint.labels
)
);
}
if (endpoint.networkNames != null) {
memo.push(
new Transaction(
["containers", index, "endpoints", endpointIndex, "networkNames"],
endpoint.networkNames
)
);
}
if (networkMode === CONTAINER.toLowerCase()) {
memo.push(
new Transaction(
[
"containers",
index,
"endpoints",
endpointIndex,
"containerPort",
],
endpoint.containerPort
)
);
}
const vip = VipLabelUtil.findVip(endpoint.labels);
if (vip != null) {
const [vipLabel, vipValue] = vip;
memo.push(
new Transaction(
["containers", index, "endpoints", endpointIndex, "loadBalanced"],
true
)
);
memo.push(
new Transaction(
["containers", index, "endpoints", endpointIndex, "vipLabel"],
vipLabel
)
);
if (!vipValue.startsWith(`${state.id}:`)) {
memo.push(
new Transaction(
["containers", index, "endpoints", endpointIndex, "vip"],
vipValue
)
);
}
const vipPortMatch = vipValue.match(/.+:(\d+)/);
if (vipPortMatch) {
memo.push(
new Transaction(
["containers", index, "endpoints", endpointIndex, "vipPort"],
vipPortMatch[1]
)
);
}
}
const protocols = endpointProtocol || [];
PROTOCOLS.forEach((protocol) => {
memo.push(
new Transaction(
[
"containers",
index,
"endpoints",
endpointIndex,
"protocol",
protocol,
],
protocols.includes(protocol),
SET
)
);
});
});
}
if (item.forcePullImage != null) {
memo.push(
new Transaction(
["containers", index, "forcePullImage"],
item.forcePullImage
)
);
}
if (
item.exec != null &&
item.exec.command != null &&
item.exec.command.shell != null
) {
memo.push(
new Transaction(
["containers", index, "exec", "command", "shell"],
item.exec.command.shell
)
);
}
return memo;
}, []);
}
function shouldDeleteContainerImage(image) {
return image == null || !image.id;
}
export function JSONReducer(
this: { networkType: string },
state: any[] = [],
{
type,
path = [],
value,
}: { type: symbol; path: Array<string | number>; value: unknown },
containerIndex: number
) {
if (containerIndex === 0) {
state = [];
}
const [base, index, field, subField] = path;
if (this.networkType == null) {
this.networkType = HOST;
}
if (this.appState == null) {
this.appState = {};
}
if (this.healthCheckState == null) {
this.healthCheckState = [];
}
if (base === "id" && type === SET) {
this.appState.id = value;
}
if (base === "networks" && parseInt(index, 10) === 0 && type === SET) {
const valueSplit = value.split(".");
this.networkType = valueSplit[0];
}
if (!path.includes("containers") && !path.includes("volumeMounts")) {
return state.map((container, index) => {
if (this.endpoints && this.endpoints[index]) {
container.endpoints = mapEndpoints(
this.endpoints[index],
this.networkType,
this.appState
);
if (container.endpoints.length === 0) {
delete container.endpoints;
}
}
return container;
});
}
if (this.cache == null) {
// This is needed to provide a context for nested reducers.
// Containers is an array so we will have multiple items and so that
// the reducers are not overwriting each others context we are
// providing one object per item in the array.
this.cache = [];
}
if (this.images == null) {
this.images = {};
}
if (this.endpoints == null) {
this.endpoints = [];
}
if (this.volumeMounts == null) {
this.volumeMounts = [];
}
let newState = state.slice();
const joinedPath = path.join(".");
if (joinedPath === "containers") {
switch (type) {
case ADD_ITEM:
const name = ContainerUtil.getNewContainerName(
newState.length,
newState
);
newState.push({
...DEFAULT_POD_CONTAINER,
name,
...value,
});
this.cache.push({});
this.endpoints.push([]);
break;
case REMOVE_ITEM:
newState = newState.filter((item, index) => index !== value);
this.cache = this.cache.filter((item, index) => index !== value);
this.endpoints = this.endpoints.filter(
(item, index) => index !== value
);
break;
}
return newState;
}
this.volumeMounts = volumeMountsReducer(this.volumeMounts, {
type,
path,
value,
});
newState = state.map((container, index) => {
if (this.volumeMounts.length !== 0) {
container.volumeMounts = this.volumeMounts
.filter(
(volumeMount) =>
volumeMount.name != null && volumeMount.mountPath[index]
)
.map((volumeMount) => ({
name: volumeMount.name,
mountPath: volumeMount.mountPath[index],
}));
}
if (this.volumeMounts.length === 0 && container.volumeMounts != null) {
container.volumeMounts = [];
}
return container;
});
if (field === "endpoints") {
if (this.endpoints[index] == null) {
this.endpoints[index] = [];
}
this.endpoints = endpointsJSONReducer(this.endpoints, {
type,
path,
value,
});
}
newState = newState.map((container, index) => {
if (this.endpoints && this.endpoints[index]) {
container.endpoints = mapEndpoints(
this.endpoints[index],
this.networkType,
this.appState
);
if (container.endpoints.length === 0) {
delete container.endpoints;
}
}
return container;
});
if (field === "healthCheck") {
if (this.healthCheckState[index] == null) {
this.healthCheckState[index] = {};
}
newState[index].healthCheck = multiContainerHealthCheckReducer.call(
this.healthCheckState[index],
newState[index].healthCheck,
{ type, path: path.slice(3), value }
);
}
if (field === "artifacts") {
// Create a local cache of artifacts so we can filter the display values
if (this.artifactState == null) {
this.artifactState = [];
}
// Filter empty values and assign to state
multiContainerArtifactsJSONReducer
.call(this.artifactState, null, { type, path, value })
.forEach((item, index) => {
newState[index].artifacts = item.filter(({ uri }) => !isEmpty(uri));
});
}
if (type === SET && joinedPath === `containers.${index}.name`) {
newState[index].name = value;
}
if (type === SET && joinedPath === `containers.${index}.exec.command.shell`) {
newState[index].exec = {
...newState[index].exec,
command: { shell: value },
};
}
if (type === SET && field === "resources") {
// Parse numbers
newState[index].resources = containerFloatReducer.call(
this.cache[index],
newState[index].resources,
{ type, value, path: [field, subField] }
);
}
if (type === SET && "limits" === field) {
// Parse numbers
newState[index].resourceLimits = resourceLimitsReducer.call(
this.cache[index],
newState[index].resourceLimits,
{ type, value, path: [field, subField] }
);
}
if (type === SET && joinedPath === `containers.${index}.image`) {
newState[index].image = this.images[index] = {
...newState[index].image,
...value,
};
}
if (type === SET && joinedPath === `containers.${index}.image.id`) {
newState[index].image = this.images[index] = {
...this.images[index],
id: value,
kind: "DOCKER",
};
if (shouldDeleteContainerImage(newState[index].image)) {
delete newState[index].image;
}
}
if (type === SET && joinedPath === `containers.${index}.image.forcePull`) {
newState[index].image = this.images[index] = {
...this.images[index],
forcePull: value,
};
if (shouldDeleteContainerImage(newState[index].image)) {
delete newState[index].image;
}
}
return newState;
}
export const JSONParser = containersParser; | the_stack |
import moment from 'moment'
import { Map as ImmutableMap } from 'immutable'
import { isEqual } from '../compare'
import {
toArr,
every,
some,
findIndex,
find,
includes,
map,
reduce,
} from '../array'
import { clone, shallowClone } from '../clone'
import { lowerCase } from '../case'
import { deprecate } from '../deprecate'
import { globalThisPolyfill } from '../global'
import { isValid, isEmpty } from '../isEmpty'
import { stringLength } from '../string'
import { Subscribable } from '../subscribable'
import { merge } from '../merge'
import { instOf } from '../instanceof'
import { isFn, isHTMLElement, isNumberLike, isReactElement } from '../checkers'
import { defaults } from '../defaults'
import { applyMiddleware } from '../middleware'
const sleep = (d = 100) => new Promise((resolve) => setTimeout(resolve, d))
describe('array', () => {
test('toArr', () => {
expect(isEqual(toArr([123]), [123])).toBeTruthy()
expect(isEqual(toArr(123), [123])).toBeTruthy()
expect(isEqual(toArr(null), [])).toBeTruthy()
})
test('some', () => {
const values1 = [1, 2, 3, 4, 5]
const values2 = []
const values3 = { a: 1, b: 2, c: 3 }
const values4 = {}
expect(some(values1, (item) => item === 3)).toBeTruthy()
expect(some(values1, (item) => item === 6)).toBeFalsy()
expect(some(values2, () => true)).toBeFalsy()
expect(some(values2, () => false)).toBeFalsy()
expect(some(values3, (item) => item === 3)).toBeTruthy()
expect(some(values3, (item) => item === 6)).toBeFalsy()
expect(some(values4, () => true)).toBeFalsy()
expect(some(values4, () => false)).toBeFalsy()
})
test('every', () => {
const values1 = [1, 2, 3, 4, 5]
const values2 = []
const values3 = { a: 1, b: 2, c: 3 }
const values4 = {}
expect(every(values1, (item) => item < 6)).toBeTruthy()
expect(every(values1, (item) => item < 3)).toBeFalsy()
expect(every(values2, () => true)).toBeTruthy()
expect(every(values2, () => false)).toBeTruthy()
expect(every(values2, () => false)).toBeTruthy()
expect(every(values3, (item) => item < 6)).toBeTruthy()
expect(every(values3, (item) => item < 3)).toBeFalsy()
expect(every(values4, () => false)).toBeTruthy()
expect(every(values4, () => false)).toBeTruthy()
})
test('findIndex', () => {
const value = [1, 2, 3, 4, 5]
expect(
isEqual(
findIndex(value, (item) => item > 3),
3
)
).toBeTruthy()
expect(
isEqual(
findIndex(value, (item) => item < 3, true),
1
)
).toBeTruthy()
expect(
isEqual(
findIndex(value, (item) => item > 6),
-1
)
).toBeTruthy()
})
test('find', () => {
const value = [1, 2, 3, 4, 5]
expect(
isEqual(
find(value, (item) => item > 3),
4
)
).toBeTruthy()
expect(
isEqual(
find(value, (item) => item < 3, true),
2
)
).toBeTruthy()
expect(
isEqual(
find(value, (item) => item > 6),
void 0
)
).toBeTruthy()
})
test('includes', () => {
const value = [1, 2, 3, 4, 5]
expect(includes(value, 3)).toBeTruthy()
expect(includes(value, 6)).toBeFalsy()
expect(includes('some test string', 'test')).toBeTruthy()
expect(includes('some test string', 'test2')).toBeFalsy()
})
test('map', () => {
const value = [1, 2, 3, 4, 5]
const stringVal = 'some test string'
const obj = { k1: 'v1', k2: 'v2' }
expect(
isEqual(
map(value, (item) => item + 1, true),
[6, 5, 4, 3, 2]
)
).toBeTruthy()
expect(
isEqual(
map(stringVal, (item) => item),
stringVal.split('')
)
).toBeTruthy()
expect(
isEqual(
map(obj, (item) => `${item}-copy`),
{ k1: 'v1-copy', k2: 'v2-copy' }
)
).toBeTruthy()
})
test('reduce', () => {
const value = [1, 2, 3, 4, 5]
expect(
isEqual(
reduce(value, (acc, item) => acc + item, 0, true),
15
)
).toBeTruthy()
})
})
describe('case', () => {
test('lowercase', () => {
expect(lowerCase('SOME_UPPER_CASE_TEXT')).toEqual('some_upper_case_text')
expect(lowerCase('')).toEqual('')
})
})
describe('compare', () => {
// base
expect(isEqual('some test string', 'some test string')).toBeTruthy()
// array
expect(
isEqual([{ k1: 'v1' }, { k2: 'v2' }], [{ k1: 'v1' }, { k2: 'v2' }])
).toBeTruthy()
expect(isEqual([{ k1: 'v1' }, { k2: 'v2' }], [{ k1: 'v1' }])).toBeFalsy()
// moment
const momentA = moment('2019-11-11', 'YYYY-MM-DD')
const momentB = moment('2019-11-10', 'YYYY-MM-DD')
expect(isEqual(momentA, {})).toBeFalsy()
expect(isEqual(momentA, moment('2019-11-11', 'YYYY-MM-DD'))).toBeTruthy()
expect(isEqual(momentA, momentB)).toBeFalsy()
// immutable
const immutableA = ImmutableMap({ key: 'val' })
const immutableB = ImmutableMap({ key1: 'val1' })
expect(isEqual(immutableA, {})).toBeFalsy()
expect(isEqual(immutableA, immutableB)).toBeFalsy()
// schema
// todo
// date
const dateA = new Date('2019-11-11')
const dateB = new Date('2019-11-10')
expect(isEqual(dateA, {})).toBeFalsy()
expect(isEqual(dateA, dateB)).toBeFalsy()
expect(isEqual(dateA, new Date('2019-11-11'))).toBeTruthy()
// regexp
const regexpA = new RegExp(/test/)
const regexpB = new RegExp(/test2/)
expect(isEqual(regexpA, {})).toBeFalsy()
expect(isEqual(regexpA, new RegExp(/test/))).toBeTruthy()
expect(isEqual(regexpA, regexpB)).toBeFalsy()
// URL
const urlA = new URL('https://formilyjs.org/')
const urlB = new URL('https://www.taobao.com')
const urlC = new URL('https://formilyjs.org/')
expect(isEqual(urlA, urlC)).toBeTruthy()
expect(isEqual(urlA, urlB)).toBeFalsy()
// object
const objA = { key: 'val' }
const objB = { key2: 'val2', key3: 'val3' }
const objC = { key2: 'val2' }
expect(isEqual(objA, { key: 'val' })).toBeTruthy()
expect(isEqual(objA, objB)).toBeFalsy()
expect(isEqual(objA, objC)).toBeFalsy()
expect(isEqual([11, 22], [33, 44])).toBeFalsy()
expect(isEqual([11, 22], {})).toBeFalsy()
expect(isEqual(new URL('https://aa.test'), {})).toBeFalsy()
expect(instOf(new URL('https://aa.test'), 'URL')).toBeTruthy()
expect(instOf(new Date(), 'Date')).toBeTruthy()
expect(
isEqual(new URL('https://aa.test'), new URL('https://aa.test'))
).toBeTruthy()
expect(
isEqual(
{
$$typeof: true,
_owner: true,
aaa: 123,
},
{
$$typeof: true,
_owner: true,
aaa: 123,
}
)
).toBeTruthy()
expect(
isEqual(
{
$$typeof: true,
_owner: true,
aaa: 123,
},
{
$$typeof: true,
_owner: true,
bbb: 123,
}
)
).toBeFalsy()
expect(
isEqual(
{
$$typeof: true,
_owner: true,
aaa: 123,
},
{
$$typeof: true,
_owner: true,
aaa: 333,
}
)
).toBeFalsy()
})
describe('clone and compare', () => {
test('clone form data', () => {
let dd = new Map()
dd.set('aaa', { bb: 123 })
let ee = new WeakMap()
ee.set({}, 1)
let ff = new WeakSet()
ff.add({})
let gg = new Set()
gg.add(3)
let a = {
aa: 123123,
bb: [{ bb: 111 }, { bb: 222 }],
cc: () => {
// eslint-disable-next-line no-console
console.log('123')
},
dd,
ee,
ff,
gg,
}
let cloned = clone(a)
expect(isEqual(cloned, a)).toBeTruthy()
expect(a === cloned).toBeFalsy()
expect(a.bb[0] === cloned.bb[0]).toBeFalsy()
expect(a.dd === cloned.dd).toBeTruthy()
expect(a.dd.get('aaa') === cloned.dd.get('aaa')).toBeTruthy()
expect(a.cc === cloned.cc).toBeTruthy()
expect(a.ee === cloned.ee).toBeTruthy()
expect(a.ff === cloned.ff).toBeTruthy()
expect(a.gg === cloned.gg).toBeTruthy()
expect(
clone({
aa: {
_isAMomentObject: true,
},
bb: {
_isJSONSchemaObject: true,
},
cc: {
$$typeof: true,
_owner: true,
},
})
).toEqual({
aa: {
_isAMomentObject: true,
},
bb: {
_isJSONSchemaObject: true,
},
cc: {
$$typeof: true,
_owner: true,
},
})
expect(
clone({
toJS() {
return 123
},
})
).toEqual(123)
expect(
clone({
toJSON() {
return 123
},
})
).toEqual(123)
})
test('native clone', () => {
const map = new Map()
map.set('key', 123)
expect(clone(map) === map).toBeTruthy()
const weakMap = new WeakMap()
const key = {}
weakMap.set(key, 123)
expect(clone(weakMap) === weakMap).toBeTruthy()
const weakSet = new WeakSet()
const key2 = {}
weakMap.set(key2, 123)
expect(clone(weakSet) === weakSet).toBeTruthy()
const set = new Set()
expect(clone(set) === set).toBeTruthy()
const date = new Date()
expect(clone(date) === date).toBeTruthy()
const file = new File([''], 'filename')
expect(clone(file) === file).toBeTruthy()
const url = new URL('https://test.com')
expect(clone(url) === url).toBeTruthy()
const regexp = /\d+/
expect(clone(regexp) === regexp).toBeTruthy()
const promise = Promise.resolve(1)
expect(clone(promise) === promise).toBeTruthy()
})
test('shallowClone', () => {
expect(shallowClone({ aa: 123 })).toEqual({ aa: 123 })
expect(shallowClone([123])).toEqual([123])
expect(shallowClone(/\d+/)).toEqual(/\d+/)
})
})
describe('deprecate', () => {
test('deprecate', () => {
const test = jest.fn(() => {
console.info('### deprecated function called ###')
})
const deprecatedFn = jest.fn(
deprecate(test, 'Some.Deprecated.Api', 'some deprecated error')
)
// arguments - function
deprecatedFn()
expect(deprecatedFn).toHaveBeenCalledTimes(1)
expect(test).toHaveBeenCalledTimes(1)
// arguments - string
const testDeprecatedFn = jest.fn(() =>
deprecate('Some.Deprecated.Api', 'some deprecated error')
)
testDeprecatedFn()
expect(testDeprecatedFn).toHaveBeenCalledTimes(1)
// arguments - empty string
const testDeprecatedFn2 = jest.fn(() => deprecate('Some.Deprecated.Api'))
testDeprecatedFn2()
expect(testDeprecatedFn2).toHaveBeenCalledTimes(1)
})
})
describe('isEmpty', () => {
test('isValid', () => {
// val - undefined
expect(isValid(undefined)).toBeFalsy()
// val - any
expect(isValid(!undefined)).toBeTruthy()
})
test('isEmpty', () => {
// val - null
expect(isEmpty(null)).toBeTruthy()
// val - boolean
expect(isEmpty(true)).toBeFalsy()
// val - number
expect(isEmpty(2422)).toBeFalsy()
// val - string
expect(isEmpty('some text')).toBeFalsy()
expect(isEmpty('')).toBeTruthy()
// val - function
const emptyFunc = function () {}
const nonEmptyFunc = function (payload) {
console.info(payload)
}
expect(isEmpty(emptyFunc)).toBeTruthy()
expect(isEmpty(nonEmptyFunc)).toBeFalsy()
// val - arrays
expect(isEmpty([])).toBeTruthy()
expect(isEmpty([0])).toBeTruthy()
expect(isEmpty([''])).toBeTruthy()
expect(isEmpty([''], true)).toBeFalsy()
expect(isEmpty([0], true)).toBeFalsy()
expect(isEmpty([1, 2, 3, 4, 5])).toBeFalsy()
expect(isEmpty([0, undefined, null, ''])).toBeTruthy()
// val - errors
expect(isEmpty(new Error())).toBeTruthy()
expect(isEmpty(new Error('some error'))).toBeFalsy()
// val - objects
expect(
isEmpty(new File(['foo'], 'filename.txt', { type: 'text/plain' }))
).toBeFalsy()
expect(isEmpty(new Map())).toBeTruthy()
expect(isEmpty(new Map().set('key', 'val'))).toBeFalsy()
expect(isEmpty(new Set())).toBeTruthy()
expect(isEmpty(new Set([1, 2]))).toBeFalsy()
expect(isEmpty({ key: 'val' })).toBeFalsy()
expect(isEmpty({})).toBeTruthy()
expect(isEmpty(Symbol()))
})
})
describe('string', () => {
test('stringLength', () => {
expect(stringLength('🦄some text')).toEqual(10)
})
})
describe('shared Subscribable', () => {
test('Subscribable', () => {
const cb = jest.fn((payload) => payload)
// defualt subscribable
const obj = new Subscribable()
const handlerIdx = obj.subscribe(cb)
expect(handlerIdx).toEqual(1)
obj.notify({ key: 'val' })
expect(cb).toHaveBeenCalledTimes(1)
expect(cb).toBeCalledWith({ key: 'val' })
obj.unsubscribe(handlerIdx)
obj.notify({ key: 'val' })
expect(cb).toHaveBeenCalledTimes(1)
// subscribable with custom filter
const objWithCustomFilter = new Subscribable()
const customFilter = (payload) => {
payload.key2 = 'val2'
return payload
}
objWithCustomFilter.subscription = {
filter: customFilter,
}
objWithCustomFilter.subscribe(cb)
const handlerIdx2 = objWithCustomFilter.subscribe(cb)
expect(handlerIdx2).toEqual(2)
objWithCustomFilter.notify({ key4: 'val4' })
expect(cb).toHaveBeenCalledTimes(3)
expect(cb).toBeCalledWith({ key4: 'val4', key2: 'val2' })
// subscribable with custom notify
const objWithCustomNotify = new Subscribable()
const customNotify = jest.fn((payload) => {
console.info(payload)
return false
})
objWithCustomNotify.subscription = {
notify: customNotify,
}
objWithCustomNotify.subscribe(cb)
objWithCustomNotify.notify({ key3: 'val3' })
expect(customNotify).toBeCalledTimes(1)
objWithCustomNotify.unsubscribe()
})
})
describe('types', () => {
test('isFn', () => {
const normalFunction = function normalFn() {}
const asyncFunction = async function asyncFn() {}
const generatorFunction = function* generatorFn() {}
expect(isFn(() => {})).toBeTruthy()
expect(isFn(normalFunction)).toBeTruthy()
expect(isFn(asyncFunction)).toBeTruthy()
expect(isFn(generatorFunction)).toBeTruthy()
expect(isFn('')).toBeFalsy()
expect(isFn(undefined)).toBeFalsy()
expect(isFn(['🦄'])).toBeFalsy()
})
test('isNumberLike', () => {
expect(isNumberLike(123)).toBeTruthy()
expect(isNumberLike('123')).toBeTruthy()
expect(isNumberLike('aa')).toBeFalsy()
})
test('isReactElement', () => {
expect(isReactElement({ $$typeof: true, _owner: true })).toBeTruthy()
})
test('isHTMLElement', () => {
expect(isHTMLElement(document.createElement('div'))).toBeTruthy()
})
})
describe('merge', () => {
test('assign', () => {
const target = {
aa: {
bb: {
cc: {
dd: 123,
},
},
},
}
const source = {
aa: {
bb: {
cc: {
ee: '1234',
},
},
},
}
expect(
merge(target, source, {
assign: true,
})
).toEqual({
aa: {
bb: {
cc: {
dd: 123,
ee: '1234',
},
},
},
})
expect(target).toEqual({
aa: {
bb: {
cc: {
dd: 123,
ee: '1234',
},
},
},
})
expect(
merge(
{
react: {
$$typeof: true,
_owner: true,
aa: 123,
},
},
{
react: {
$$typeof: true,
_owner: true,
bb: 321,
},
},
{
assign: true,
}
)
).toEqual({
react: {
$$typeof: true,
_owner: true,
bb: 321,
},
})
expect(
merge(
{
react: {
_isAMomentObject: true,
aa: 123,
},
},
{
react: {
_isAMomentObject: true,
bb: 321,
},
},
{
assign: true,
}
)
).toEqual({
react: {
_isAMomentObject: true,
bb: 321,
},
})
expect(
merge(
{
react: {
_isJSONSchemaObject: true,
aa: 123,
},
},
{
react: {
_isJSONSchemaObject: true,
bb: 321,
},
},
{
assign: true,
}
)
).toEqual({
react: {
_isJSONSchemaObject: true,
bb: 321,
},
})
const toJSObj = {
toJS: () => {},
bb: 321,
}
expect(
merge(
{
toJSObj: {
toJS: () => {},
aa: 123,
},
},
{
toJSObj,
},
{
assign: true,
}
)
).toEqual({
toJSObj,
})
const toJSONObj = {
toJSON: () => {},
bb: 321,
}
expect(
merge(
{
toJSONObj: {
toJS: () => {},
aa: 123,
},
},
{
toJSONObj,
},
{
assign: true,
}
)
).toEqual({
toJSONObj,
})
})
test('empty', () => {
expect(
merge(
{
aa: undefined,
},
{
aa: {},
}
)
).toEqual({ aa: {} })
})
test('clone', () => {
const target = {
aa: {
bb: {
cc: {
dd: 123,
},
},
},
}
const source = {
aa: {
bb: {
cc: {
ee: '1234',
},
},
},
}
expect(merge(target, source)).toEqual({
aa: {
bb: {
cc: {
dd: 123,
ee: '1234',
},
},
},
})
expect(target).toEqual({
aa: {
bb: {
cc: {
dd: 123,
},
},
},
})
})
test('merge array', () => {
expect(merge([11, 22], [333])).toEqual([11, 22, 333])
})
test('merge custom', () => {
expect(
merge(
{ aa: { cc: 123 } },
{ aa: { bb: 321 } },
{
customMerge() {
return (a, b) => ({ ...a, ...b })
},
}
)
).toEqual({ aa: { cc: 123, bb: 321 } })
})
test('merge symbols', () => {
const symbol = Symbol('xxx')
expect(merge({ [symbol]: 123 }, { aa: 321 })).toEqual({
[symbol]: 123,
aa: 321,
})
})
test('merge unmatch', () => {
expect(merge({ aa: 123 }, [111])).toEqual([111])
})
})
describe('globalThis', () => {
expect(globalThisPolyfill.requestAnimationFrame).not.toBeUndefined()
})
describe('instanceof', () => {
test('instOf', () => {
expect(instOf(123, 123)).toBeFalsy()
})
})
test('defaults', () => {
const toJSON = () => {}
const toJS = () => {}
expect(
defaults(
{
aa: {
_isAMomentObject: true,
},
bb: {
_isJSONSchemaObject: true,
},
cc: {
$$typeof: true,
_owner: true,
},
dd: {
toJSON,
},
ee: {
toJS,
},
},
{
aa: { value: 111 },
bb: { value: 222 },
cc: { value: 333 },
dd: { value: 444 },
ee: { value: 555 },
mm: { value: 123 },
}
)
).toEqual({
aa: { value: 111 },
bb: { value: 222 },
cc: { value: 333 },
dd: { value: 444 },
ee: { value: 555 },
mm: { value: 123 },
})
})
test('applyMiddleware', async () => {
expect(await applyMiddleware(0)).toEqual(0)
expect(
await applyMiddleware(0, [
(num: number, next) => next(num + 1),
(num: number, next) => next(num + 1),
(num: number, next) => next(num + 1),
])
).toEqual(3)
expect(
await applyMiddleware(0, [
(num: number, next) => next(),
(num: number, next) => next(num + 1),
(num: number, next) => next(num + 1),
])
).toEqual(2)
const resolved = jest.fn()
applyMiddleware(0, [
(num: number, next) => next(num + 1),
() => '123',
(num: number, next) => next(num + 1),
]).then(resolved)
await sleep(16)
expect(resolved).toBeCalledTimes(0)
})
test('applyMiddleware with error', async () => {
try {
await applyMiddleware(0, [
() => {
throw 'this is error'
},
])
} catch (e) {
expect(e).toEqual('this is error')
}
}) | the_stack |
import { getHash } from "../../src/utils/getHash";
import { db } from "../../src/databases/databases";
import assert from "assert";
import { LockCategory } from "../../src/types/segments.model";
import { client } from "../utils/httpClient";
const stringDeepEquals = (a: string[] ,b: string[]): boolean => {
let result = true;
b.forEach((e) => {
if (!a.includes(e)) result = false;
});
return result;
};
const endpoint = "/api/lockCategories";
const submitEndpoint = "/api/skipSegments";
const checkLockCategories = (videoID: string): Promise<LockCategory[]> => db.prepare("all", 'SELECT * FROM "lockCategories" WHERE "videoID" = ?', [videoID]);
const lockVIPUser = "lockCategoriesRecordsVIPUser";
const lockVIPUserHash = getHash(lockVIPUser);
describe("lockCategoriesRecords", () => {
before(async () => {
const insertVipUserQuery = 'INSERT INTO "vipUsers" ("userID") VALUES (?)';
await db.prepare("run", insertVipUserQuery, [lockVIPUserHash]);
const insertLockCategoryQuery = 'INSERT INTO "lockCategories" ("userID", "videoID", "category", "reason", "service") VALUES (?, ?, ?, ?, ?)';
await db.prepare("run", insertLockCategoryQuery, [lockVIPUserHash, "no-segments-video-id", "sponsor", "reason-1", "YouTube"]);
await db.prepare("run", insertLockCategoryQuery, [lockVIPUserHash, "no-segments-video-id", "intro", "reason-1", "YouTube"]);
await db.prepare("run", insertLockCategoryQuery, [lockVIPUserHash, "no-segments-video-id-1", "sponsor", "reason-2", "YouTube"]);
await db.prepare("run", insertLockCategoryQuery, [lockVIPUserHash, "no-segments-video-id-1", "intro", "reason-2", "YouTube"]);
await db.prepare("run", insertLockCategoryQuery, [lockVIPUserHash, "lockCategoryVideo", "sponsor", "reason-3", "YouTube"]);
await db.prepare("run", insertLockCategoryQuery, [lockVIPUserHash, "delete-record", "sponsor", "reason-4", "YouTube"]);
await db.prepare("run", insertLockCategoryQuery, [lockVIPUserHash, "delete-record-1", "sponsor", "reason-5", "YouTube"]);
await db.prepare("run", insertLockCategoryQuery, [lockVIPUserHash, "delete-record-1", "intro", "reason-5", "YouTube"]);
});
it("Should update the database version when starting the application", async () => {
const version = (await db.prepare("get", "SELECT key, value FROM config where key = ?", ["version"])).value;
assert.ok(version > 1);
});
it("Should be able to submit categories not in video (http response)", (done) => {
const json = {
videoID: "no-segments-video-id",
userID: "lockCategoriesRecordsVIPUser",
categories: [
"outro",
"shilling",
"shilling",
"shil ling",
"",
"intro",
],
};
const expected = {
submitted: [
"outro",
"shilling",
],
};
client.post(endpoint, json)
.then(res => {
assert.strictEqual(res.status, 200);
assert.deepStrictEqual(res.data, expected);
done();
})
.catch(err => done(err));
});
it("Should be able to submit categories not in video (sql check)", (done) => {
const videoID = "no-segments-video-id-1";
const json = {
videoID,
userID: lockVIPUser,
categories: [
"outro",
"shilling",
"shilling",
"shil ling",
"",
"intro",
],
};
client.post(endpoint, json)
.then(async res => {
assert.strictEqual(res.status, 200);
const result = await checkLockCategories(videoID);
assert.strictEqual(result.length, 4);
const oldRecordNotChangeReason = result.filter(item =>
item.reason === "reason-2" && ["sponsor", "intro"].includes(item.category)
);
const newRecordWithEmptyReason = result.filter(item =>
item.reason === "" && ["outro", "shilling"].includes(item.category)
);
assert.strictEqual(newRecordWithEmptyReason.length, 2);
assert.strictEqual(oldRecordNotChangeReason.length, 2);
done();
})
.catch(err => done(err));
});
it("Should be able to submit categories not in video with reason (http response)", (done) => {
const videoID = "no-segments-video-id";
const json = {
videoID,
userID: lockVIPUser,
categories: [
"outro",
"shilling",
"shilling",
"shil ling",
"",
"intro",
],
reason: "new reason"
};
const expected = {
submitted: [
"outro",
"shilling",
"intro"
],
};
client.post(endpoint, json)
.then(res => {
assert.strictEqual(res.status, 200);
assert.deepStrictEqual(res.data.submitted, expected.submitted);
done();
})
.catch(err => done(err));
});
it("Should be able to submit categories not in video with reason (sql check)", (done) => {
const videoID = "no-segments-video-id-1";
const json = {
videoID,
userID: lockVIPUser,
categories: [
"outro",
"shilling",
"shilling",
"shil ling",
"",
"intro",
],
reason: "new reason"
};
const expectedWithNewReason = [
"outro",
"shilling",
"intro"
];
client.post(endpoint, json)
.then(async res => {
assert.strictEqual(res.status, 200);
const result = await checkLockCategories(videoID);
assert.strictEqual(result.length, 4);
const newRecordWithNewReason = result.filter(item =>
expectedWithNewReason.includes(item.category) && item.reason === "new reason"
);
const oldRecordNotChangeReason = result.filter(item =>
item.reason === "reason-2"
);
assert.strictEqual(newRecordWithNewReason.length, 3);
assert.strictEqual(oldRecordNotChangeReason.length, 1);
done();
})
.catch(err => done(err));
});
it("Should be able to submit categories with _ in the category", (done) => {
const json = {
videoID: "underscore",
userID: lockVIPUser,
categories: [
"word_word",
],
};
client.post(endpoint, json)
.then(async res => {
assert.strictEqual(res.status, 200);
const result = await checkLockCategories("underscore");
assert.strictEqual(result.length, 1);
done();
})
.catch(err => done(err));
});
it("Should be able to submit categories with upper and lower case in the category", (done) => {
const json = {
videoID: "bothCases",
userID: lockVIPUser,
categories: [
"wordWord",
],
};
client.post(endpoint, json)
.then(async res => {
assert.strictEqual(res.status, 200);
const result = await checkLockCategories("bothCases");
assert.strictEqual(result.length, 1);
done();
})
.catch(err => done(err));
});
it("Should not be able to submit categories with $ in the category", (done) => {
const videoID = "specialChar";
const json = {
videoID,
userID: lockVIPUser,
categories: [
"word&word",
],
};
client.post(endpoint, json)
.then(async res => {
assert.strictEqual(res.status, 200);
const result = await checkLockCategories(videoID);
assert.strictEqual(result.length, 0);
done();
})
.catch(err => done(err));
});
it("Should return 400 for missing params", (done) => {
client.post(endpoint, {})
.then(res => {
assert.strictEqual(res.status, 400);
done();
})
.catch(err => done(err));
});
it("Should return 400 for no categories", (done) => {
const json: any = {
videoID: "test",
userID: "test",
categories: [],
};
client.post(endpoint, json)
.then(res => {
assert.strictEqual(res.status, 400);
done();
})
.catch(err => done(err));
});
it("Should return 400 for no userID", (done) => {
const json: any = {
videoID: "test",
userID: null,
categories: ["sponsor"],
};
client.post(endpoint, json)
.then(res => {
assert.strictEqual(res.status, 400);
done();
})
.catch(err => done(err));
});
it("Should return 400 for no videoID", (done) => {
const json: any = {
videoID: null,
userID: "test",
categories: ["sponsor"],
};
client.post(endpoint, json)
.then(res => {
assert.strictEqual(res.status, 400);
done();
})
.catch(err => done(err));
});
it("Should return 400 object categories", (done) => {
const json = {
videoID: "test",
userID: "test",
categories: {},
};
client.post(endpoint, json)
.then(res => {
assert.strictEqual(res.status, 400);
done();
})
.catch(err => done(err));
});
it("Should return 400 bad format categories", (done) => {
const json = {
videoID: "test",
userID: "test",
categories: "sponsor",
};
client.post(endpoint, json)
.then(res => {
assert.strictEqual(res.status, 400);
done();
})
.catch(err => done(err));
});
it("Should return 403 if user is not VIP", (done) => {
const json = {
videoID: "test",
userID: "test",
categories: [
"sponsor",
],
};
client.post(endpoint, json)
.then(res => {
assert.strictEqual(res.status, 403);
done();
})
.catch(err => done(err));
});
it("Should be able to delete a lockCategories record", (done) => {
const videoID = "delete-record";
const json = {
videoID,
userID: lockVIPUser,
categories: [
"sponsor",
],
};
client.delete(endpoint, { data: json })
.then(async res => {
assert.strictEqual(res.status, 200);
const result = await checkLockCategories(videoID);
assert.strictEqual(result.length, 0);
done();
})
.catch(err => done(err));
});
it("Should be able to delete one lockCategories record without removing another", (done) => {
const videoID = "delete-record-1";
const json = {
videoID,
userID: lockVIPUser,
categories: [
"sponsor",
],
};
client.delete(endpoint, { data: json })
.then(async res => {
assert.strictEqual(res.status, 200);
const result = await checkLockCategories(videoID);
assert.strictEqual(result.length, 1);
done();
})
.catch(err => done(err));
});
/*
* Submission tests in this file do not check database records, only status codes.
* To test the submission code properly see ./test/cases/postSkipSegments.js
*/
const lockedVideoID = "lockCategoryVideo";
const testSubmitUser = "testman42-qwertyuiopasdfghjklzxcvbnm";
it("Should not be able to submit a segment to a video with a lock-category record (single submission)", (done) => {
client.post(submitEndpoint, {
userID: testSubmitUser,
videoID: lockedVideoID,
segments: [{
segment: [20, 40],
category: "sponsor",
}],
})
.then(res => {
assert.strictEqual(res.status, 403);
done();
})
.catch(err => done(err));
});
it("Should not be able to submit segments to a video where any of the submissions with a no-segment record", (done) => {
client.post(submitEndpoint, {
userID: testSubmitUser,
videoID: lockedVideoID,
segments: [{
segment: [20, 40],
category: "sponsor",
}, {
segment: [50, 60],
category: "intro",
}]
})
.then(res => {
assert.strictEqual(res.status, 403);
done();
})
.catch(err => done(err));
});
it("Should be able to submit a segment to a video with a different no-segment record", (done) => {
client.post(submitEndpoint, {
userID: testSubmitUser,
videoID: lockedVideoID,
segments: [{
segment: [20, 40],
category: "intro",
}],
})
.then(res => {
assert.strictEqual(res.status, 200);
done();
})
.catch(err => done(err));
});
it("Should be able to submit a segment to a video with no no-segment records", (done) => {
client.post(submitEndpoint, {
userID: testSubmitUser,
videoID: "normalVideo",
segments: [{
segment: [20, 40],
category: "intro",
}],
})
.then(res => {
assert.strictEqual(res.status, 200);
done();
})
.catch(err => done(err));
});
it("should be able to get existing category lock", (done) => {
const expected = {
categories: [
"sponsor",
"intro",
"outro",
"shilling"
],
};
client.get(endpoint, { params: { videoID: "no-segments-video-id" } })
.then(res => {
assert.strictEqual(res.status, 200);
const data = res.data;
assert.ok(stringDeepEquals(data.categories, expected.categories));
done();
})
.catch(err => done(err));
});
}); | the_stack |
import { Query, parseQuery, Condition, WhereClause } from 'soql-parser-js';
import { Common } from "../../components/common_components/common";
import { CONSTANTS } from "../../components/common_components/statics";
import { Logger, RESOURCES, LOG_MESSAGE_VERBOSITY, LOG_MESSAGE_TYPE } from "../../components/common_components/logger";
import { Sfdx } from "../../components/common_components/sfdx";
import {
composeQuery,
getComposedField,
Field as SOQLField
} from 'soql-parser-js';
import { ScriptObject, MigrationJob as Job, CommandExecutionError, ScriptOrg, Script, ScriptMockField, TaskData, TaskOrgData, CachedCSVContent, ProcessedData } from "..";
import SFieldDescribe from "../sf_models/sfieldDescribe";
import * as fs from 'fs';
import * as deepClone from 'deep.clone';
import { BulkApiV2_0Engine } from "../../components/api_engines/bulkApiV2_0Engine";
import { IApiEngine } from "../api_models/helper_interfaces";
import { BulkApiV1_0Engine } from "../../components/api_engines/bulkApiV1_0Engine";
import { RestApiEngine } from "../../components/api_engines/restApiEngine";
const alasql = require("alasql");
import casual = require("casual");
import { MockGenerator } from '../../components/common_components/mockGenerator';
import { ICSVIssueCsvRow, IMissingParentLookupRecordCsvRow, IMockField, IFieldMapping, IFieldMappingResult } from '../common_models/helper_interfaces';
import { ADDON_EVENTS, DATA_MEDIA_TYPE, MESSAGE_IMPORTANCE, OPERATION, RESULT_STATUSES, SPECIAL_MOCK_PATTERN_TYPES } from '../../components/common_components/enumerations';
import { ApiInfo } from '../api_models';
MockGenerator.createCustomGenerators(casual);
export default class MigrationJobTask {
scriptObject: ScriptObject;
job: Job;
sourceTotalRecorsCount: number = 0;
targetTotalRecorsCount: number = 0;
apiEngine: IApiEngine;
processedData: ProcessedData;
updateMode: 'forwards' | 'backwards';
apiProgressCallback: (apiResult: ApiInfo) => void;
constructor(init: Partial<MigrationJobTask>) {
if (init) {
Object.assign(this, init);
}
}
get sObjectName(): string {
return this.scriptObject && this.scriptObject.name;
}
get script(): Script {
return this.scriptObject.script;
}
get logger(): Logger {
return this.script.logger;
}
get operation(): OPERATION {
return this.scriptObject.operation;
}
get externalId(): string {
return this.scriptObject.externalId;
}
get complexExternalId(): string {
return Common.getComplexField(this.scriptObject.externalId);
}
data: TaskData = new TaskData(this);
sourceData: TaskOrgData = new TaskOrgData(this, true);
targetData: TaskOrgData = new TaskOrgData(this, false);
//------------------
tempData = {
filteredQueryValueCache: new Map<string, Set<string>>()
}
// ----------------------- Public methods -------------------------------------------
/**
* Check the structure of the CSV source file.
*
* @returns {Promise<void>}
* @memberof MigrationJob
*/
async validateCSV(): Promise<Array<ICSVIssueCsvRow>> {
let csvIssues = new Array<ICSVIssueCsvRow>();
// Check csv file --------------------------------------
if (!fs.existsSync(this.data.sourceCsvFilename)) {
// Missing or empty file
csvIssues.push({
"Date update": Common.formatDateTime(new Date()),
"Child sObject": this.sObjectName,
"Child field": null,
"Child value": null,
"Parent sObject": null,
"Parent field": null,
"Parent value": null,
"Error": this.logger.getResourceString(RESOURCES.csvFileIsEmpty)
});
return csvIssues;
}
// Read the csv header row
let csvColumnsRow = await Common.readCsvFileAsync(this.data.sourceCsvFilename, 1);
if (csvColumnsRow.length == 0) {
return csvIssues;
}
// Check columns in the csv file ------------------------
// Only checking for the mandatory fields (to be updated),
// Not checking for all fields in the query (like RecordType.DevelopeName).
[...this.data.fieldsToUpdateMap.keys()].forEach(fieldName => {
const columnExists = Object.keys(csvColumnsRow[0]).some(columnName => {
columnName = columnName.trim();
let nameParts = columnName.split('.');
return columnName == fieldName
|| nameParts.some(namePart => namePart == fieldName);
});
if (!columnExists) {
// Column is missing in the csv file
csvIssues.push({
"Date update": Common.formatDateTime(new Date()),
"Child sObject": this.sObjectName,
"Child field": fieldName,
"Child value": null,
"Parent sObject": null,
"Parent field": null,
"Parent value": null,
"Error": this.logger.getResourceString(RESOURCES.columnsMissingInCSV)
});
}
});
return csvIssues;
}
/**
* Try to add missing lookup csv columns
* - Adds missing id column on Insert operation.
* - Adds missing lookup columns like: Account__r.Name, Account__c
*
* @param {CachedCSVContent} cachedCSVContent The cached content of the source csv fiels
* @returns {Promise<Array<ICSVIssueCsvRow>>}
* @memberof MigrationJobTask
*/
async repairCSV(cachedCSVContent: CachedCSVContent): Promise<Array<ICSVIssueCsvRow>> {
let self = this;
let csvIssues = new Array<ICSVIssueCsvRow>();
let currentFileMap: Map<string, any> = await Common.readCsvFileOnceAsync(cachedCSVContent.csvDataCacheMap,
this.data.sourceCsvFilename,
null, null,
false, false);
if (currentFileMap.size == 0) {
// CSV file is empty or does not exist.
// Missing csvs were already reported. No additional report provided.
return csvIssues;
}
let firstRow = currentFileMap.values().next().value;
// Removes extra spaces from column headers
___trimColumnNames(firstRow);
if (this.scriptObject.useCSVValuesMapping && this.job.valueMapping.size > 0) {
// Update csv rows with csv value mapping
___mapCSVValues(firstRow);
}
if (!firstRow.hasOwnProperty("Id")) {
// Add missing id column
___addMissingIdColumn();
// Update child lookup id columns
let child__rSFields = this.scriptObject.externalIdSFieldDescribe.child__rSFields;
for (let fieldIndex = 0; fieldIndex < child__rSFields.length; fieldIndex++) {
const childIdSField = child__rSFields[fieldIndex].idSField;
await ___updateChildOriginalIdColumnsAsync(childIdSField);
}
}
// Add missing lookup columns
for (let fieldIndex = 0; fieldIndex < this.data.fieldsInQuery.length; fieldIndex++) {
const sField = this.data.fieldsInQueryMap.get(this.data.fieldsInQuery[fieldIndex]);
if (sField.lookup && (!firstRow.hasOwnProperty(sField.fullName__r) || !firstRow.hasOwnProperty(sField.nameId))) {
await ___addMissingLookupColumnsAsync(sField);
}
}
// RecordType.DeveloperName old-fashion backward support
___fixOldRecordTypeColumns();
return csvIssues;
// ------------------ Internal functions ------------------------- //
/**
* Updates csv rows according to provided value mapping file
*
* @param {*} firstRow
*/
function ___mapCSVValues(firstRow: any) {
self.logger.infoNormal(RESOURCES.mappingRawCsvValues, self.sObjectName);
let fields = Object.keys(firstRow);
let csvRows = [...currentFileMap.values()];
fields.forEach(field => {
let key = self.sObjectName + field;
let valuesMap = self.job.valueMapping.get(key);
if (valuesMap && valuesMap.size > 0) {
csvRows.forEach((csvRow: any) => {
let rawValue = (String(csvRow[field]) || "").trim();
if (valuesMap.has(rawValue)) {
csvRow[field] = valuesMap.get(rawValue);
}
});
}
});
cachedCSVContent.updatedFilenames.add(self.data.sourceCsvFilename);
}
/**
* Trim csv header columns to remove extra unvisible symbols and spaces
*
* @param {*} firstRow
*/
function ___trimColumnNames(firstRow: any) {
let columnsToUpdate = new Array<string>();
Object.keys(firstRow).forEach(field => {
if (field != field.trim()) {
columnsToUpdate.push(field);
}
});
if (columnsToUpdate.length > 0) {
let csvRows = [...currentFileMap.values()];
columnsToUpdate.forEach(column => {
let newColumn = column.trim();
csvRows.forEach((csvRow: any) => {
csvRow[newColumn] = csvRow[column];
delete csvRow[column];
});
});
cachedCSVContent.updatedFilenames.add(self.data.sourceCsvFilename);
}
}
/**
* Add Id column to the current csv file (if it is missing),
* then update all its child lookup "__r" columns in other csv files
*/
function ___addMissingIdColumn() {
[...currentFileMap.keys()].forEach(id => {
let csvRow = currentFileMap.get(id);
csvRow["Id"] = id;
});
cachedCSVContent.updatedFilenames.add(self.data.sourceCsvFilename);
}
/**
* Replaces the RecordType.DeveloperName column label of the old-formatted csv file
* with RecordType.$$DeveloperName$NamespacePrefix$SobjectType
* (current external id for the RecordType object)
* ----
* Also combines values of multiple columns of the old-formatted RecordType csv file
* (DeveloperName | NamespacePrefix | SobjectType)
* into the single value under the column label $$DeveloperName$NamespacePrefix$SobjectType
*
*/
function ___fixOldRecordTypeColumns() {
if (self.sObjectName != CONSTANTS.RECORD_TYPE_SOBJECT_NAME) {
let sField = [...self.data.fieldsInQueryMap.values()].find(field => {
return field.lookup && field.parentLookupObject.name == CONSTANTS.RECORD_TYPE_SOBJECT_NAME;
});
if (sField) {
let oldColumnName = CONSTANTS.OLD_DEFAULT_RECORD_TYPE_ID_FIELD_R_NAME;
let newColumnName = sField.fullName__r;
let isUpdated = false;
[...currentFileMap.values()].forEach(row => {
if (row.hasOwnProperty(oldColumnName)) {
isUpdated = true;
row[newColumnName] = row[oldColumnName];
delete row[oldColumnName];
}
});
if (isUpdated) {
cachedCSVContent.updatedFilenames.add(self.data.sourceCsvFilename);
}
}
} else {
let oldColumnNames = self.scriptObject.externalId.split(CONSTANTS.COMPLEX_FIELDS_SEPARATOR);
let newColumnName = self.scriptObject.complexExternalId;
let isUpdated = false;
[...currentFileMap.values()].forEach(row => {
if (!row.hasOwnProperty(newColumnName)) {
isUpdated = true;
row[newColumnName] = oldColumnNames.map(name => {
let value = row[name];
if (value) {
delete row[name];
return value;
}
}).filter(value => !!value).join(CONSTANTS.COMPLEX_FIELDS_SEPARATOR);
}
});
if (isUpdated) {
cachedCSVContent.updatedFilenames.add(self.data.sourceCsvFilename);
}
}
}
/**
* Add all missing lookup columns (like Account__c, Account__r.Name)
*
* @param {SFieldDescribe} sField sField to process
* @returns {Promise<void>}
*/
async function ___addMissingLookupColumnsAsync(sField: SFieldDescribe): Promise<void> {
let columnName__r = sField.fullOriginalName__r;
let columnNameId = sField.nameId;
let parentExternalId = sField.parentLookupObject.complexOriginalExternalId;
let parentTask = self.job.getTaskBySObjectName(sField.parentLookupObject.name);
if (parentTask) {
let parentFileMap: Map<string, any> = await Common.readCsvFileOnceAsync(cachedCSVContent.csvDataCacheMap, parentTask.data.sourceCsvFilename);
let parentCSVRowsMap = new Map<string, any>();
[...parentFileMap.values()].forEach(parentCsvRow => {
let key = parentTask.getRecordValue(parentCsvRow, parentExternalId);
if (key) {
parentCSVRowsMap.set(key, parentCsvRow);
}
});
let isFileChanged = false;
[...currentFileMap.keys()].forEach(id => {
let csvRow = currentFileMap.get(id);
if (!csvRow.hasOwnProperty(columnNameId)) {
if (!csvRow.hasOwnProperty(columnName__r)) {
// Missing both id and __r columns
// => fill them with next incremental numbers
// Since the missing columns were already reported no additional report provided.
isFileChanged = true;
csvRow[columnNameId] = cachedCSVContent.nextId;
csvRow[columnName__r] = cachedCSVContent.nextId;
return;
}
// Missing id column but __r column provided.
let desiredExternalIdValue = parentTask.getRecordValue(csvRow, parentExternalId);
if (desiredExternalIdValue) {
isFileChanged = true;
let parentCsvRow = parentCSVRowsMap.get(desiredExternalIdValue);
if (!parentCsvRow) {
csvIssues.push({
"Date update": Common.formatDateTime(new Date()),
"Child sObject": self.sObjectName,
"Child field": columnName__r,
"Child value": desiredExternalIdValue,
"Parent sObject": sField.parentLookupObject.name,
"Parent field": parentExternalId,
"Parent value": null,
"Error": self.logger.getResourceString(RESOURCES.missingParentRecordForGivenLookupValue)
});
csvRow[columnNameId] = cachedCSVContent.nextId;
} else {
csvRow[columnNameId] = parentCsvRow["Id"];
}
}
} else if (!csvRow.hasOwnProperty(columnName__r)) {
if (!csvRow.hasOwnProperty(columnNameId)) {
// Missing both id and __r columns
// => fill them with next incremental numbers
// Since the missing columns were already reported no additional report provided.
isFileChanged = true;
csvRow[columnNameId] = cachedCSVContent.nextId;
csvRow[columnName__r] = cachedCSVContent.nextId;
return;
}
// Missing __r column but id column provided.
// Create __r column.
let idValue = csvRow[columnNameId];
if (idValue) {
isFileChanged = true;
let parentCsvRow = parentFileMap.get(idValue);
if (!parentCsvRow) {
csvIssues.push({
"Date update": Common.formatDateTime(new Date()),
"Child sObject": self.sObjectName,
"Child field": columnNameId,
"Child value": idValue,
"Parent sObject": sField.parentLookupObject.name,
"Parent field": "Id",
"Parent value": null,
"Error": self.logger.getResourceString(RESOURCES.missingParentRecordForGivenLookupValue)
});
csvRow[columnName__r] = cachedCSVContent.nextId;
} else {
isFileChanged = true;
csvRow[columnName__r] = parentCsvRow[parentExternalId];
}
}
}
});
if (isFileChanged) {
cachedCSVContent.updatedFilenames.add(self.data.sourceCsvFilename);
}
}
}
/**
* When Id column was added
* - updates child lookup id columns
* for all other objects.
* For ex. if the current object is "Account", it will update
* the child lookup id column "Account__c" of the child "Case" object
*
* @param {SFieldDescribe} childIdSField Child lookup id sField to process
* @returns {Promise<void>}
*/
async function ___updateChildOriginalIdColumnsAsync(childIdSField: SFieldDescribe): Promise<void> {
let columnChildOriginalName__r = childIdSField.fullOriginalName__r;
let columnChildIdName__r = childIdSField.fullIdName__r;
let columnChildNameId = childIdSField.nameId;
let parentOriginalExternalIdColumnName = self.scriptObject.complexOriginalExternalId;
if (parentOriginalExternalIdColumnName != "Id") {
let childTask = self.job.getTaskBySObjectName(childIdSField.scriptObject.name);
if (childTask) {
let childFileMap: Map<string, any> = await Common.readCsvFileOnceAsync(cachedCSVContent.csvDataCacheMap, childTask.data.sourceCsvFilename);
let isFileChanged = false;
if (childFileMap.size > 0) {
let childCSVFirstRow = childFileMap.values().next().value;
if (childCSVFirstRow.hasOwnProperty(columnChildOriginalName__r)) {
let parentCSVExtIdMap = new Map<string, any>();
[...currentFileMap.values()].forEach(csvRow => {
let key = self.getRecordValue(csvRow, parentOriginalExternalIdColumnName);
if (key) {
parentCSVExtIdMap.set(key, csvRow);
}
});
[...childFileMap.values()].forEach(csvRow => {
let extIdValue = self.getRecordValue(csvRow, parentOriginalExternalIdColumnName);
if (extIdValue && parentCSVExtIdMap.has(extIdValue)) {
csvRow[columnChildNameId] = parentCSVExtIdMap.get(extIdValue)["Id"];
csvRow[columnChildIdName__r] = csvRow[columnChildNameId];
isFileChanged = true;
}
});
} else {
csvIssues.push({
"Date update": Common.formatDateTime(new Date()),
"Child sObject": childTask.sObjectName,
"Child field": columnChildOriginalName__r,
"Child value": null,
"Parent sObject": self.sObjectName,
"Parent field": "Id",
"Parent value": null,
"Error": self.logger.getResourceString(RESOURCES.cantUpdateChildLookupCSVColumn)
});
}
}
if (isFileChanged) {
cachedCSVContent.updatedFilenames.add(childTask.data.sourceCsvFilename);
}
}
}
}
}
/**
* Get record value by given property name
* for this sobject
*
* @param {*} record The record
* @param {string} propName The property name to extract value from the record object
* @memberof MigrationJobTask
*/
getRecordValue(record: any, propName: string): any {
if (!record) return null;
return record[propName];
}
/**
* Get CSV filename for this sobject including the full directory path
*
* @param {string} rootPath The root path to append the filename to it
* @returns {string}
* @memberof MigrationJobTask
*/
getCSVFilename(rootPath: string, pattern?: string): string {
return Common.getCSVFilename(rootPath, this.sObjectName, pattern);
}
/**
* Creates SOQL query to retrieve records
*
* @param {Array<string>} [fieldNames] Field names to include in the query,
* pass undefined value to use all fields
* of the current task
* @param {boolean} [removeLimits=false] true to remove LIMIT, OFFSET, ORDERBY clauses
* @param {Query} [parsedQuery] Default parsed query.
* @param {boolan} [useFieldMapping] Transform query string according to the field mapping before return.
* @returns {string}
* @memberof MigrationJobTask
*/
createQuery(fieldNames?: Array<string>, removeLimits: boolean = false, parsedQuery?: Query, useFieldMapping: boolean = false): string {
parsedQuery = parsedQuery || this.scriptObject.parsedQuery;
let tempQuery = deepClone.deepCloneSync(parsedQuery, {
absolute: true,
});
if (!fieldNames)
tempQuery.fields = this.data.fieldsInQuery.map(fieldName => getComposedField(fieldName));
else
tempQuery.fields = fieldNames.map(fieldName => getComposedField(fieldName));
if (removeLimits) {
tempQuery.limit = undefined;
tempQuery.offset = undefined;
tempQuery.orderBy = undefined;
}
let query = composeQuery(tempQuery);
if (useFieldMapping) {
query = this._mapSourceQueryToTarget(query, parsedQuery.sObject).query;
}
return query;
}
/**
* Converts full query string into short form
* to be displayed in the stdout
*
* @param {string} query
* @returns {string}
* @memberof MigrationJobTask
*/
createShortQueryString(longString: string): string {
let parts = longString.split("FROM");
return parts[0].substr(0, CONSTANTS.SHORT_QUERY_STRING_MAXLENGTH) +
(parts[0].length > CONSTANTS.SHORT_QUERY_STRING_MAXLENGTH ? "..." : "") +
" FROM "
+ parts[1].substr(0, CONSTANTS.SHORT_QUERY_STRING_MAXLENGTH) +
(parts[1].length > CONSTANTS.SHORT_QUERY_STRING_MAXLENGTH ? "..." : "");
}
/**
* Create SOQL query to delete records
*
* @returns
* @memberof MigrationJobTask
*/
createDeleteQuery() {
if (!this.scriptObject.parsedDeleteQuery) {
return this.createQuery(["Id"], true, null, this.scriptObject.useFieldMapping);
} else {
return this.createQuery(["Id"], true, this.scriptObject.parsedDeleteQuery, this.scriptObject.useFieldMapping);
}
}
/**
* Retireve the total records count
*
* @returns {Promise<void>}
* @memberof MigrationJobTask
*/
async getTotalRecordsCountAsync(): Promise<void> {
if (this.sourceData.media == DATA_MEDIA_TYPE.Org) {
let queryOrNumber = this.createQuery(['COUNT(Id) CNT'], true);
try {
let apiSf = new Sfdx(this.sourceData.org);
let ret = await apiSf.queryAsync(queryOrNumber, false);
this.sourceTotalRecorsCount = Number.parseInt(ret.records[0]["CNT"]);
if (this.scriptObject.parsedQuery.limit) {
this.sourceTotalRecorsCount = Math.min(this.sourceTotalRecorsCount, this.scriptObject.parsedQuery.limit);
}
this.logger.infoNormal(RESOURCES.totalRecordsAmount, this.sObjectName,
this.sourceData.resourceString_Source_Target, String(this.sourceTotalRecorsCount));
} catch (ex) {
// Aggregate queries does not suppoted
this.sourceTotalRecorsCount = this.scriptObject.parsedQuery.limit || 0;
}
}
if (this.targetData.media == DATA_MEDIA_TYPE.Org) {
let queryOrNumber = this.createQuery(['COUNT(Id) CNT'], true, null, true);
try {
let apiSf = new Sfdx(this.targetData.org);
let ret = await apiSf.queryAsync(queryOrNumber, false);
this.targetTotalRecorsCount = Number.parseInt(ret.records[0]["CNT"]);
if (this.scriptObject.parsedQuery.limit) {
this.targetTotalRecorsCount = Math.min(this.targetTotalRecorsCount, this.scriptObject.parsedQuery.limit);
}
this.logger.infoNormal(RESOURCES.totalRecordsAmount, this.sObjectName,
this.targetData.resourceString_Source_Target, String(this.targetTotalRecorsCount));
} catch (ex) {
// Aggregate queries does not suppoted
this.targetTotalRecorsCount = this.scriptObject.parsedQuery.limit || 0;
}
}
}
/**
* Delete old records from the target org
*
* @returns {Promise<void>}
* @memberof MigrationJobTask
*/
async deleteOldTargetRecords(): Promise<boolean> {
// Checking
if (!(this.targetData.media == DATA_MEDIA_TYPE.Org
&& this.scriptObject.operation != OPERATION.Readonly
&& this.scriptObject.deleteOldData)) {
this.logger.infoNormal(RESOURCES.nothingToDelete, this.sObjectName);
return false;
}
// Querying
this.logger.infoNormal(RESOURCES.deletingTargetSObjectRecords, this.sObjectName);
let soql = this.createDeleteQuery();
let apiSf = new Sfdx(this.targetData.org);
let queryResult = await apiSf.queryAsync(soql, this.targetData.useBulkQueryApi);
if (queryResult.totalSize == 0) {
this.logger.infoNormal(RESOURCES.nothingToDelete, this.sObjectName);
return false;
}
// Deleting
this.logger.infoVerbose(RESOURCES.deletingNRecordsWillBeDeleted, this.sObjectName, String(queryResult.totalSize));
let recordsToDelete = queryResult.records.map(x => {
return {
Id: x["Id"]
}
});
this.createApiEngine(this.targetData.org, OPERATION.Delete, recordsToDelete.length, true);
let resultRecords = await this.apiEngine.executeCRUD(recordsToDelete, this.apiProgressCallback);
if (resultRecords == null) {
this._apiOperationError(OPERATION.Delete);
}
// Done
this.logger.infoVerbose(RESOURCES.deletingRecordsCompleted, this.sObjectName);
return true;
}
/**
* Performs target records hierarchical deletion.
*
* @returns {Promise<number>} Total amount of deleted records
* @memberof MigrationJobTask
*/
async deleteRecords(): Promise<number> {
// DELETE ORG :::::::::
// Create delete data => only the target records which are existing in the Source
let recordsToDelete = this.sourceData.records.map(sourceRecord => {
let targetRecord = this.data.sourceToTargetRecordMap.get(sourceRecord);
if (targetRecord) {
return {
Id: targetRecord["Id"]
}
}
}).filter(record => !!record);
this.logger.infoVerbose(RESOURCES.deletingNRecordsWillBeDeleted, this.sObjectName, String(recordsToDelete.length));
// Delete records
if (recordsToDelete.length == 0) {
return 0;
}
this.createApiEngine(this.targetData.org, OPERATION.Delete, recordsToDelete.length, true);
let resultRecords = await this.apiEngine.executeCRUD(recordsToDelete, this.apiProgressCallback);
if (resultRecords == null) {
this._apiOperationError(OPERATION.Delete);
}
this.logger.infoVerbose(RESOURCES.deletingRecordsCompleted, this.sObjectName);
return resultRecords.length;
}
/**
* Retrieve records for this task
*
* @param {number} queryMode The mode of record processing
* @param {boolean} reversed If TRUE - queries from the child related object to parent object
* (selects all parent objects that exist in the child objects)
* forward: parent <== *child (before, prev)
* backward: *child ==> parent (after, next)
* If FALSE - queries from the parent related object to child object
* (selects all child objects that exist in the parent objects)
* forward: child ==> *parent (before, prev)
* backward: *parent <== child (after, next)
* @returns {Promise<void>}
* @memberof MigrationJobTask
*/
async retrieveRecords(queryMode: "forwards" | "backwards" | "target", reversed: boolean): Promise<boolean> {
let hasRecords = false;
let records: Array<any> = new Array<any>();
// Checking job status *********
if (this.operation == OPERATION.Delete
&& !this.scriptObject.isDeletedFromSourceOperation) {
return hasRecords;
};
// Read SOURCE DATA *********************************************************************************************
// **************************************************************************************************************
if (queryMode != "target") {
// Read main data *************************************
// ****************************************************
if (this.sourceData.media == DATA_MEDIA_TYPE.File && queryMode == "forwards") {
// Read from the SOURCE CSV FILE ***********************************
if (!reversed) {
let query = this.createQuery();
// Start message ------
this.logger.infoNormal(RESOURCES.queryingAll, this.sObjectName, this.sourceData.resourceString_Source_Target, this.data.resourceString_csvFile, this.data.getResourceString_Step(queryMode));
let sfdx = new Sfdx(this.targetData.org);
records = await sfdx.retrieveRecordsAsync(query, false, this.data.sourceCsvFilename, this.targetData.fieldsMap);
hasRecords = true;
}
} else if (this.sourceData.media == DATA_MEDIA_TYPE.Org) {
// Read from the SOURCE ORG **********************************************
if (this.scriptObject.processAllSource && queryMode == "forwards" && !reversed) {
// All records *********** //
let query = this.createQuery();
// Start message ------
this.logger.infoNormal(RESOURCES.queryingAll, this.sObjectName, this.sourceData.resourceString_Source_Target, this.data.resourceString_org,
this.data.getResourceString_Step(queryMode));
// Query string message ------
this.logger.infoVerbose(RESOURCES.queryString, this.sObjectName, this.createShortQueryString(query));
// Fetch records
let sfdx = new Sfdx(this.sourceData.org, this._sourceFieldMapping);
records = await sfdx.retrieveRecordsAsync(query, this.sourceData.useBulkQueryApi);
hasRecords = true;
} else if (!this.scriptObject.processAllSource) {
// Filtered records ************ //
let queries = this._createFilteredQueries(queryMode, reversed);
if (queries.length > 0) {
// Start message ------
this.logger.infoNormal(RESOURCES.queryingIn, this.sObjectName, this.sourceData.resourceString_Source_Target, this.data.resourceString_org, this.data.getResourceString_Step(queryMode));
// Fetch records
records = await this._retrieveFilteredRecords(queries, this.sourceData, this._sourceFieldMapping);
hasRecords = true;
}
}
}
if (hasRecords) {
// Map records --------
this._mapRecords(records);
// Set external id map ---------
let newRecordsCount = this._setExternalIdMap(records, this.sourceData.extIdRecordsMap, this.sourceData.idRecordsMap);
// Completed message ------
this.logger.infoNormal(RESOURCES.queryingFinished, this.sObjectName, this.sourceData.resourceString_Source_Target, String(newRecordsCount));
}
// Read SELF REFERENCE records from the SOURCE *************
// *********************************************************
if (this.sourceData.media == DATA_MEDIA_TYPE.Org && queryMode == "forwards"
// When there is allRecords source mode
// => no any addtional records should be fetched,
// so need to skip retrieving the self-reference records a well...
&& !this.sourceData.allRecords
) {
records = new Array<any>();
let inValues: Array<string> = new Array<string>();
for (let fieldIndex = 0; fieldIndex < this.data.fieldsInQuery.length; fieldIndex++) {
const describe = this.data.fieldsInQueryMap.get(this.data.fieldsInQuery[fieldIndex]);
if (describe.isSimpleSelfReference) {
this.sourceData.records.forEach(sourceRec => {
if (sourceRec[describe.name]) {
inValues.push(sourceRec[describe.name]);
}
});
}
}
if (inValues.length > 0) {
// Start message ------
this.logger.infoNormal(RESOURCES.queryingSelfReferenceRecords, this.sObjectName, this.sourceData.resourceString_Source_Target);
inValues = Common.distinctStringArray(inValues);
let sfdx = new Sfdx(this.sourceData.org, this._sourceFieldMapping);
let queries = Common.createFieldInQueries(this.data.fieldsInQuery, "Id", this.sObjectName, inValues);
for (let queryIndex = 0; queryIndex < queries.length; queryIndex++) {
const query = queries[queryIndex];
// Query string message ------
this.logger.infoVerbose(RESOURCES.queryString, this.sObjectName, this.createShortQueryString(query));
// Fetch records
records = records.concat(await sfdx.retrieveRecordsAsync(query));
}
if (queries.length > 0) {
// Map records --------
this._mapRecords(records);
// Set external id map ---------
let newRecordsCount = this._setExternalIdMap(records, this.sourceData.extIdRecordsMap, this.sourceData.idRecordsMap);
// Completed message ------
this.logger.infoNormal(RESOURCES.queryingFinished, this.sObjectName, this.sourceData.resourceString_Source_Target, String(newRecordsCount));
}
}
}
}
// If it's "deleteFromSource" mode -> Always skip retrieving from the target
if (this.scriptObject.isDeletedFromSourceOperation) {
return hasRecords;
}
// Read TARGET DATA ***********************************************************************************
// ****************************************************************************************************
if (queryMode == "target") {
hasRecords = false;
if (this.targetData.media == DATA_MEDIA_TYPE.Org && this.operation != OPERATION.Insert) {
// Read from the TARGET ORG *********
let fieldsInQuery = this.data.fieldsInQuery.filter(field => this.data.fieldsExcludedFromTargetQuery.indexOf(field) < 0);
let query = this.createQuery(fieldsInQuery);
records = new Array<any>();
if (this.scriptObject.processAllTarget) {
// All records ****** //
// Start message ------
this.logger.infoNormal(RESOURCES.queryingAll, this.sObjectName, this.targetData.resourceString_Source_Target, this.data.resourceString_org, this.data.getResourceString_Step(queryMode));
// Query string message ------
this.logger.infoVerbose(RESOURCES.queryString, this.sObjectName, this.createShortQueryString(query));
// Fetch records
let sfdx = new Sfdx(this.targetData.org, this._targetFieldMapping);
records = await sfdx.retrieveRecordsAsync(query, this.targetData.useBulkQueryApi);
hasRecords = true;
} else {
// Filtered records ***** //
let queries = this._createFilteredQueries(queryMode, reversed, fieldsInQuery);
if (queries.length > 0) {
// Start message ------
this.logger.infoNormal(RESOURCES.queryingIn, this.sObjectName, this.targetData.resourceString_Source_Target, this.data.resourceString_org, this.data.getResourceString_Step(queryMode));
// Fetch target records
records = await this._retrieveFilteredRecords(queries, this.targetData, this._targetFieldMapping);
hasRecords = true;
}
}
}
if (hasRecords) {
// Set external id map --------- TARGET
let newRecordsCount = this._setExternalIdMap(records, this.targetData.extIdRecordsMap, this.targetData.idRecordsMap, true);
// Completed message ------
this.logger.infoNormal(RESOURCES.queryingFinished, this.sObjectName, this.targetData.resourceString_Source_Target, String(newRecordsCount));
}
}
return hasRecords;
}
/**
* Performs target records update.
*
* @param {("forwards" | "backwards")} updateMode
* @param {(data : ProcessedData) => boolean} warnUserCallbackAsync true to abort the job
* @returns {Promise<number>} Total amount of updated records
* @memberof MigrationJobTask
*/
async updateRecords(updateMode: "forwards" | "backwards", warnUserCallbackAsync: (data: ProcessedData) => Promise<void>): Promise<number> {
let self = this;
// Set the run context
this.updateMode = updateMode;
if (this.scriptObject.isDeletedFromSourceOperation) {
if (updateMode != "forwards") {
return 0;
}
// DELETE SOURCE RECORDS ::::::::::
return (await ___deleteSourceRecords());
}
if (this.targetData.media == DATA_MEDIA_TYPE.File) {
// WRITE CSV ::::::::::
if (this.operation != OPERATION.Delete && updateMode == "forwards") {
this.logger.infoNormal(RESOURCES.writingToFile, this.sObjectName, this.data.csvFilename);
let records = await ___filterRecords(this.sourceData.records);
records = ___mockRecords(records);
records = ___removeCSVFileColumns(records);
await ___writeToTargetCSVFile(records);
await Common.writeCsvFileAsync(self.data.csvFilename, records, true);
return records.length;
}
return 0;
}
// UPDATE ORG :::::::::
let totalProcessedRecordsAmount = 0;
let totalNonProcessedRecordsAmount = 0;
if (this.operation != OPERATION.Readonly && this.operation != OPERATION.Delete) {
// Non-person Accounts/Contacts + other objects //////////
// Create data ****
let data = await ___createUpdateData(false);
if (data.missingParentLookups.length > 0) {
// Warn user
await warnUserCallbackAsync(data);
}
// Process data - main ****
totalProcessedRecordsAmount += (await ___updateData(data));
totalNonProcessedRecordsAmount += data.nonProcessedRecordsAmount;
// Person Accounts/Contacts only /////////////
if (this.data.isPersonAccountOrContact) {
// Create data ****
data = await ___createUpdateData(true);
if (data.missingParentLookups.length > 0) {
// Warn user
await warnUserCallbackAsync(data);
}
// Process data - person accounts ****
totalProcessedRecordsAmount += (await ___updateData(data));
totalNonProcessedRecordsAmount += data.nonProcessedRecordsAmount;
// Add Person Contacts when inserting/upserting Person Accounts ****
if ((this.operation == OPERATION.Insert || this.operation == OPERATION.Upsert) && this.sObjectName == "Account") {
await ___insertPersonContactsFromPersonAccounts(data);
}
}
// Warn the about skipped equal records
if (totalNonProcessedRecordsAmount > 0) {
this.logger.infoNormal(RESOURCES.skippedUpdatesWarning, this.sObjectName, String(totalNonProcessedRecordsAmount));
}
}
return totalProcessedRecordsAmount;
// ------------------------ Internal functions --------------------------
async function ___createUpdateData(processPersonAccounts: boolean): Promise<ProcessedData> {
let processedData = new ProcessedData();
processedData.processPersonAccounts = processPersonAccounts;
// Prepare fields /////////
processedData.fields = self.data.sFieldsToUpdate.filter((field: SFieldDescribe) => {
if (updateMode == "forwards")
// For Step 1 : Simple sFields or reference fields with the parent lookup BEFORE
return field.isSimple || field.isSimpleReference && self.data.prevTasks.indexOf(field.parentLookupObject.task) >= 0;
else
// For Step 2 : Reference sFields with the parent lookup AFTER + self
return field.isSimpleReference && self.data.nextTasks.concat(self).indexOf(field.parentLookupObject.task) >= 0;
}).concat(new SFieldDescribe({
name: CONSTANTS.__ID_FIELD_NAME
}));
// Add record Id field ////////
if (self.operation != OPERATION.Insert) {
processedData.fields.push(self.data.sFieldsInQuery.filter(field => field.nameId == "Id")[0]);
}
// Remove unsupported fields for person accounts/contacts /////////
if (self.data.isPersonAccountOrContact) {
if (!processPersonAccounts) {
processedData.fields = self.sObjectName == "Account" ?
processedData.fields.filter((field: SFieldDescribe) => {
// For Business accounts
return !field.person && CONSTANTS.FIELDS_TO_EXCLUDE_FROM_UPDATE_FOR_BUSINESS_ACCOUNT.indexOf(field.nameId) < 0;
}) : processedData.fields.filter(field => {
// For Business contacts
return CONSTANTS.FIELDS_TO_EXCLUDE_FROM_UPDATE_FOR_BUSINESS_CONTACT.indexOf(field.nameId) < 0;
});
} else if (self.sObjectName == "Account") {
processedData.fields = processedData.fields.filter(field => {
// For Person accounts
//return !field.person && CONSTANTS.FIELDS_TO_EXCLUDE_FROM_UPDATE_FOR_PERSON_ACCOUNT.indexOf(field.nameId) < 0;
return CONSTANTS.FIELDS_TO_EXCLUDE_FROM_UPDATE_FOR_PERSON_ACCOUNT.indexOf(field.nameId) < 0;
});
} else {
// Person contact => skip from the processing
return processedData;
}
}
// Remove master-detail fields for Update / Upsert ////////////////
// (to avoid master-detail reparenting if not available)
// 9/11/2020: Removed ALL non-updateable fields to avoid the issue with updating of audit field.
let notUpdateableFields = processedData.fields.filter(field => {
//return field.isMasterDetail && !field.updateable;
return field.isDescribed && !field.updateable && field.name != "Id";
}).map(field => field.nameId);
// Field do not Insert //////////////
let fieldsToCompareRecords = self.data.fieldsToCompareSourceWithTarget;
// Non-insertable is the same as fields to compare but not included in the Update
let notIsertableFields = fieldsToCompareRecords.filter(field => !processedData.fields.some(f => f.nameId == field));
notUpdateableFields = notUpdateableFields.concat(notIsertableFields); // Must include both non-updateable & non-insertable
// Additional field filters ////////////
let doNotDeleteIdFieldOnInsert = self.scriptObject.idFieldIsMapped;
// Prepare records //////////////
// (Only if any field to update exist)
let fieldNamesToClone = processedData.fieldNames.concat(notIsertableFields);
if (processedData.fields.some(field => field.name != "Id" && field.name != CONSTANTS.__ID_FIELD_NAME)) {
// Map: cloned => source
let tempClonedToSourceMap = Common.cloneArrayOfObjects(self.sourceData.records, fieldNamesToClone);
// Map: "___Id" => cloned
let ___IdToClonedMap = new Map<string, any>();
[...tempClonedToSourceMap.keys()].forEach(cloned => {
___IdToClonedMap.set(cloned[CONSTANTS.__ID_FIELD_NAME], cloned);
});
// Map: cloned => source
// + update lookup Id fields (f.ex. Account__c)
if (self.data.isPersonAccountOrContact) {
// Person accounts are supported --------- *** /
if (!processPersonAccounts) {
// Process only Business Acounts/Contacts (IsPersonAccount == false)
tempClonedToSourceMap.forEach((source, cloned) => {
if (!source["IsPersonAccount"]) {
___updateLookupIdFields(processedData, source, cloned);
// Always ensure that account Name field is not empty,
// join FirstName + LastName fields into Name field if necessary
___updatePrsonAccountFields(processedData, source, cloned, false);
processedData.clonedToSourceMap.set(cloned, source);
}
});
} else {
// Process only Person Accounts/Contacts (IsPersonAccount == true)
tempClonedToSourceMap.forEach((source, cloned) => {
if (!!source["IsPersonAccount"]) {
___updateLookupIdFields(processedData, source, cloned);
// Always ensure that account FirstName / LastName fields are not empty,
// split Name field if necessary
___updatePrsonAccountFields(processedData, source, cloned, true);
processedData.clonedToSourceMap.set(cloned, source);
}
});
}
} else {
// Person accounts are not supported ---------- *** /
// All objects including Accounts/Contacts (all items)
tempClonedToSourceMap.forEach((source, cloned) => {
___updateLookupIdFields(processedData, source, cloned);
processedData.clonedToSourceMap.set(cloned, source);
});
}
// Filter records /////////////
tempClonedToSourceMap = processedData.clonedToSourceMap;
processedData.clonedToSourceMap = new Map<any, any>();
// Apply Records Filter
let clonedRecords = await ___filterRecords([...tempClonedToSourceMap.keys()]);
// Mock records
clonedRecords = ___mockRecords(clonedRecords);
// Truncate records
clonedRecords = ___truncateRecords(clonedRecords);
// Create records map: cloned => source
clonedRecords.forEach(cloned => {
let initialCloned = ___IdToClonedMap.get(cloned[CONSTANTS.__ID_FIELD_NAME]);
let source = tempClonedToSourceMap.get(initialCloned);
processedData.clonedToSourceMap.set(cloned, source);
});
// Create separated record sets to Update/Insert /////////////
processedData.clonedToSourceMap.forEach((source, cloned) => {
source[CONSTANTS.__IS_PROCESSED_FIELD_NAME] = typeof source[CONSTANTS.__IS_PROCESSED_FIELD_NAME] == "undefined" ? false : source[CONSTANTS.__IS_PROCESSED_FIELD_NAME];
delete cloned[CONSTANTS.__ID_FIELD_NAME];
let target = self.data.sourceToTargetRecordMap.get(source);
if (target && updateMode == "backwards") {
// ???
if (target["Id"] && ___compareRecords(target, cloned, fieldsToCompareRecords)) {
cloned["Id"] = target["Id"];
___removeRecordFields(cloned, notUpdateableFields);
processedData.recordsToUpdate.push(cloned);
source[CONSTANTS.__IS_PROCESSED_FIELD_NAME] = true;
}
} else if (!target && self.operation == OPERATION.Upsert || self.operation == OPERATION.Insert) {
// Inserting new record to the target
if (!doNotDeleteIdFieldOnInsert) {
delete cloned["Id"];
}
___removeRecordFields(cloned, notIsertableFields);
processedData.recordsToInsert.push(cloned);
source[CONSTANTS.__IS_PROCESSED_FIELD_NAME] = true;
} else if (target && (self.operation == OPERATION.Upsert || self.operation == OPERATION.Update)) {
// Updating existing record on the target
if (target["Id"] && ___compareRecords(target, cloned, fieldsToCompareRecords)) {
cloned["Id"] = target["Id"];
___removeRecordFields(cloned, notUpdateableFields);
processedData.recordsToUpdate.push(cloned);
source[CONSTANTS.__IS_PROCESSED_FIELD_NAME] = true;
}
}
});
///////////////
// Filter out unwanted records (for example of AccountContactRelation)
processedData.recordsToInsert = __filterInserts(processedData.recordsToInsert);
processedData.recordsToUpdate = __filterUpdates(processedData.recordsToUpdate);
}
return processedData;
}
/**
* @returns {Promise<number>} Number of records actually processed
*/
async function ___deleteSourceRecords(): Promise<number> {
self.logger.infoNormal(RESOURCES.deletingSourceSObjectRecords, self.sObjectName);
if (self.sourceData.records.length == 0) {
self.logger.infoNormal(RESOURCES.nothingToDelete, self.sObjectName);
return 0;
}
// Deleting ////////
self.logger.infoVerbose(RESOURCES.deletingNRecordsWillBeDeleted, self.sObjectName, String(self.sourceData.records.length));
let recordsToDelete = self.sourceData.records.map(record => {
return {
Id : record["Id"]
}
});
// Create engine
self.createApiEngine(self.sourceData.org, OPERATION.Delete, recordsToDelete.length, true);
let resultRecords = await self.apiEngine.executeCRUD(recordsToDelete, self.apiProgressCallback);
if (resultRecords == null) {
self._apiOperationError(OPERATION.Delete);
}
// Done
self.logger.infoVerbose(RESOURCES.deletingRecordsCompleted, self.sObjectName);
return resultRecords.length;
}
/**
* @returns {Promise<number>} Number of records actually processed
*/
async function ___updateData(data: ProcessedData): Promise<number> {
let totalProcessedAmount = 0;
let targetFilenameSuffix = data.processPersonAccounts ? CONSTANTS.CSV_TARGET_FILE_PERSON_ACCOUNTS_SUFFIX : "";
// Temporary store the current processed data
// to allow access it from the Add-On engine
self.processedData = data;
// Call addon onBeforeUpdate event
await self.runAddonEvent(ADDON_EVENTS.onBeforeUpdate)
// Inserting ////////
if (data.recordsToInsert.length > 0) {
self.logger.infoVerbose(RESOURCES.updatingTargetNRecordsWillBeUpdated,
self.sObjectName,
self.logger.getResourceString(RESOURCES.insert),
String((data.recordsToInsert.length)));
self.createApiEngine(self.targetData.org, OPERATION.Insert, data.recordsToInsert.length, true, targetFilenameSuffix);
let targetRecords = await self.apiEngine.executeCRUD(data.recordsToInsert, self.apiProgressCallback);
if (targetRecords == null) {
self._apiOperationError(OPERATION.Insert);
}
totalProcessedAmount += targetRecords.length;
// Set external ids ---
self._setExternalIdMap(targetRecords, self.targetData.extIdRecordsMap, self.targetData.idRecordsMap);
// Map records ---
targetRecords.forEach(target => {
let source = data.clonedToSourceMap.get(target);
if (source) {
self.data.sourceToTargetRecordMap.set(source, target);
data.insertedRecordsSourceToTargetMap.set(source, target);
}
});
}
// Updating ///////
if (data.recordsToUpdate.length > 0) {
self.logger.infoVerbose(RESOURCES.updatingTargetNRecordsWillBeUpdated,
self.sObjectName,
self.logger.getResourceString(RESOURCES.update),
String((data.recordsToUpdate.length)));
self.createApiEngine(self.targetData.org, OPERATION.Update, data.recordsToUpdate.length, false, targetFilenameSuffix);
let targetRecords = await self.apiEngine.executeCRUD(data.recordsToUpdate, self.apiProgressCallback);
if (targetRecords == null) {
self._apiOperationError(OPERATION.Update);
}
totalProcessedAmount += targetRecords.length;
// Map records ---
// TODO: This is new update, check if it has no any negative impact
targetRecords.forEach(target => {
let source = data.clonedToSourceMap.get(target);
// Prevent override of previously mapped inserts
if (source && !self.data.sourceToTargetRecordMap.has(source)) {
self.data.sourceToTargetRecordMap.set(source, target);
}
});
}
return totalProcessedAmount;
}
/**
* After the Person Accounts inserted the Person Contacts are automatically added.
* Need to query and add them to the local data storage.
*
* @param {ProcessedData} personAccountsInsertData The last person account insert result
* @returns {Promise<number>} Number of records actually processed
*/
async function ___insertPersonContactsFromPersonAccounts(personAccountsInsertData: ProcessedData): Promise<number> {
let contactTask = self.job.tasks.filter(task => task.sObjectName == "Contact")[0];
if (contactTask) {
let targetPersonAccountIdTosourceContactMap: Map<string, any> = new Map<string, any>();
let targetAccountIds = new Array<string>();
contactTask.sourceData.records.forEach(sourceContact => {
let accountId = sourceContact["AccountId"];
if (accountId && !contactTask.data.sourceToTargetRecordMap.has(sourceContact)) {
let sourceAccount = self.sourceData.idRecordsMap.get(accountId);
let targetAccount = personAccountsInsertData.insertedRecordsSourceToTargetMap.get(sourceAccount);
if (targetAccount) {
let targetAccountId = targetAccount["Id"];
if (targetAccountId) {
targetPersonAccountIdTosourceContactMap.set(targetAccountId, sourceContact);
targetAccountIds.push(targetAccountId);
}
}
}
});
// Query on Person Contacts
let queries = Common.createFieldInQueries(contactTask.data.fieldsInQuery, "AccountId", contactTask.sObjectName, targetAccountIds);
if (queries.length > 0) {
// Start message ------
self.logger.infoNormal(RESOURCES.queryingIn2, self.sObjectName, self.logger.getResourceString(RESOURCES.personContact));
// Fetch target records
let records = await self._retrieveFilteredRecords(queries, self.targetData, self._targetFieldMapping);
if (records.length > 0) {
//Set external id map --------- TARGET
contactTask._setExternalIdMap(records, contactTask.targetData.extIdRecordsMap, contactTask.targetData.idRecordsMap, true);
//Completed message ------
let newRecordsCount = 0;
records.forEach(targetContact => {
let accountId = targetContact["AccountId"];
let sourceContact = targetPersonAccountIdTosourceContactMap.get(accountId);
if (sourceContact && !contactTask.data.sourceToTargetRecordMap.has(sourceContact)) {
contactTask.data.sourceToTargetRecordMap.set(sourceContact, targetContact);
sourceContact[CONSTANTS.__IS_PROCESSED_FIELD_NAME] = true;
newRecordsCount++;
}
});
self.logger.infoNormal(RESOURCES.queryingFinished, self.sObjectName, self.logger.getResourceString(RESOURCES.personContact), String(newRecordsCount));
return newRecordsCount;
}
}
}
return 0;
}
function ___updatePrsonAccountFields(processedData: ProcessedData, source: any, cloned: any, isPersonRecord: boolean) {
if (self.sObjectName == "Account") {
if (isPersonRecord) {
// Person account record
// Name of Person account => split into First name / Last name
if (!cloned["FirstName"] && !cloned["LastName"]
&& processedData.fieldNames.indexOf("FirstName") >= 0 // Currently updating First/Last names fo account
) {
let parts = (source["Name"] || '').split(' ');
cloned["FirstName"] = parts[0] || '';
cloned["LastName"] = parts[1] || '';
cloned["FirstName"] = !cloned["FirstName"] && !cloned["LastName"] ? Common.makeId(10) : cloned["FirstName"];
}
} else {
// Business account record
// First name & last name of Business account => join into Name
if (processedData.fieldNames.indexOf("Name") >= 0) {
cloned["Name"] = cloned["Name"] || `${source["FirstName"]} ${source["LastName"]}`;
cloned["Name"] = !(cloned["Name"] || '').trim() ? Common.makeId(10) : cloned["Name"];
}
}
}
}
function ___updateLookupIdFields(processedData: ProcessedData, source: any, cloned: any) {
processedData.lookupIdFields.forEach(idField => {
cloned[idField.nameId] = null;
let found = false;
let parentId = source[idField.nameId];
if (parentId) {
let parentTask = idField.parentLookupObject.task;
let parentRecord = parentTask.sourceData.idRecordsMap.get(parentId);
if (parentRecord) {
let targetRecord = parentTask.data.sourceToTargetRecordMap.get(parentRecord);
if (targetRecord) {
let id = targetRecord["Id"];
if (id) {
cloned[idField.nameId] = id;
found = true;
}
}
}
}
if (parentId && !found) {
let csvRow: IMissingParentLookupRecordCsvRow = {
"Date update": Common.formatDateTime(new Date()),
"Id": source["Id"],
"Child ExternalId": idField.fullName__r,
"Child lookup": idField.nameId,
"Child SObject": idField.scriptObject.name,
"Missing value": source[idField.fullName__r] || source[idField.nameId],
"Parent ExternalId": idField.parentLookupObject.externalId,
"Parent SObject": idField.parentLookupObject.name
};
processedData.missingParentLookups.push(csvRow);
}
});
}
async function ___filterRecords(records: Array<any>): Promise<Array<any>> {
return new Promise<Array<any>>(resolve => {
if (!self.scriptObject.targetRecordsFilter) {
resolve(records);
return;
}
try {
return alasql(`SELECT * FROM ? WHERE ${self.scriptObject.targetRecordsFilter}`, [records], function (selectedRecords: any) {
resolve(selectedRecords);
});
} catch (ex) {
resolve(records);
}
});
}
function ___removeRecordFields(record: any, fieldsToRemove: Array<string>) {
if (fieldsToRemove.length == 0) return;
fieldsToRemove.forEach(field => {
delete record[field];
});
}
function ___removeCSVFileColumns(records: Array<any>): Array<any> {
// Create the list of columns to remove from the CSV file
let fieldNamesToRemove = self.script.excludeIdsFromCSVFiles ? self.data.sFieldsInQuery.filter(field => {
/* Account__c (all lookup id fields, not when ExternalId == Id)*/
return (field.name == "Id" || field.isSimpleReference) && !field.isOriginalExternalIdField
/* Account__r.Id (only when Original Externalid != Id and ExternalID == Id)*/
|| field.is__r && field.parentLookupObject.externalId == "Id" && field.parentLookupObject.originalExternalId != "Id";
}).map(field => field.name) : new Array<string>();
// Add ___Id column
fieldNamesToRemove = fieldNamesToRemove.concat(CONSTANTS.__ID_FIELD_NAME, CONSTANTS.__IS_PROCESSED_FIELD_NAME);
// Remove properties corresponds to the selected columns
records.forEach(record => {
fieldNamesToRemove.forEach(fieldName => delete record[fieldName]);
});
return records;
}
async function ___writeToTargetCSVFile(records: Array<any>): Promise<void> {
if (self.script.createTargetCSVFiles) {
await Common.writeCsvFileAsync(self.data.getTargetCSVFilename(self.operation), records, true);
}
}
function __filterInserts(records: Array<any>): Array<any> {
// Remove unnecessary records from AccountContactRelation ///////////
if (self.sObjectName == "AccountContactRelation") {
// Remove primary Contacts
let contactTask = self.job.tasks.filter(task => task.sObjectName == "Contact")[0];
if (contactTask) {
records = records.filter(record => {
let targetContact = contactTask.targetData.idRecordsMap.get(record["ContactId"]);
if (targetContact && targetContact["AccountId"] == record["AccountId"]) {
// This is the primary Contact for given Account,
// so don't need to insert AccountRelationObject for this
// => record is not encessary, remove it
return false;
}
// => Record is necessary, leave it
return true;
});
}
}
return records;
}
function __filterUpdates(records: Array<any>): Array<any> {
// TODO: Optional, implement this if needed
return records;
}
function ___truncateRecords(records: Array<any>): Array<any> {
if (records.length == 0) {
return records;
}
if (self.script.allowFieldTruncation) {
let sfieldsToTruncate = self.data.sFieldsToUpdate.filter(field => field.isTextual
&& Object.keys(records[0]).indexOf(field.name) >= 0);
records.forEach(record => {
sfieldsToTruncate.forEach(field => {
if (field.length > 0) {
record[field.name] = record[field.name] && String(record[field.name]).substr(0, field.length);
}
});
});
}
return records;
}
function ___mockRecords(records: Array<any>): Array<any> {
let updatedRecords = new Array<any>();
if (records.length == 0) {
return updatedRecords;
}
let recordIds = records.map(x => x["Id"]);
let recordProperties = Object.keys(records[0]);
if (self.scriptObject.updateWithMockData && self.scriptObject.mockFields.length > 0) {
let fieldNameToMockFieldMap: Map<string, IMockField> = new Map<string, IMockField>();
self.data.sFieldsToUpdate.forEach(fieldDescribe => {
let mockField = ___getMockPatternByFieldName(fieldDescribe.name);
if (recordProperties.indexOf(mockField.name) >= 0 && mockField.pattern) {
let fn = mockField.pattern;
if (CONSTANTS.SPECIAL_MOCK_COMMANDS.some(x => fn.startsWith(x + "("))) {
fn = fn.replace(/\(/, `('${mockField.name}',`);
}
mockField.excludedRegex = mockField.excludedRegex || '';
mockField.includedRegex = mockField.includedRegex || '';
fieldNameToMockFieldMap.set(mockField.name, <IMockField>{
fn,
regExcl: mockField.excludedRegex.split(CONSTANTS.MOCK_PATTERN_ENTIRE_ROW_FLAG)[0].trim(),
regIncl: mockField.includedRegex.split(CONSTANTS.MOCK_PATTERN_ENTIRE_ROW_FLAG)[0].trim(),
disallowMockAllRecord: mockField.excludedRegex.indexOf(CONSTANTS.MOCK_PATTERN_ENTIRE_ROW_FLAG) >= 0,
allowMockAllRecord: mockField.includedRegex.indexOf(CONSTANTS.MOCK_PATTERN_ENTIRE_ROW_FLAG) >= 0,
});
}
});
MockGenerator.resetCounter();
records.forEach((originalRecord: any, index: number) => {
let updatedRecord = Object.assign({}, originalRecord);
let doNotMock = false;
let mockAllRecord = false;
let fieldsToMockMap: Map<string, boolean> = new Map<string, boolean>();
[...fieldNameToMockFieldMap.keys()].forEach(fieldName => {
if (!doNotMock) {
let mockField = fieldNameToMockFieldMap.get(fieldName);
let value = String(updatedRecord[fieldName]);
let excluded = mockField.regExcl && ___testRegex(mockField.regExcl, value);
let included = mockField.regIncl && ___testRegex(mockField.regIncl, value);
if (included && mockField.allowMockAllRecord) {
mockAllRecord = true;
}
if (excluded && mockField.disallowMockAllRecord) {
doNotMock = true;
} else {
if (mockAllRecord || (!mockField.regExcl || !excluded) && (!mockField.regIncl || included)) {
fieldsToMockMap.set(fieldName, true);
}
}
}
});
if (!doNotMock) {
[...fieldNameToMockFieldMap.keys()].forEach(fieldName => {
if (mockAllRecord || fieldsToMockMap.has(fieldName)) {
let mockField = fieldNameToMockFieldMap.get(fieldName);
if (mockField.fn == "ids") {
updatedRecord[fieldName] = recordIds[index];
} else {
updatedRecord[fieldName] = eval(`casual.${mockField.fn}`);
}
}
});
}
updatedRecords.push(updatedRecord);
});
} else {
return records;
}
return updatedRecords;
}
function ___testRegex(expr: string, value: string) : boolean{
switch (expr) {
case CONSTANTS.SPECIAL_MOCK_PATTERNS.get(SPECIAL_MOCK_PATTERN_TYPES.haveAnyValue):
// *
return !!value;
case CONSTANTS.SPECIAL_MOCK_PATTERNS.get(SPECIAL_MOCK_PATTERN_TYPES.missingValue):
// ^*
return !value;
default:
// regex
return new RegExp(expr, 'ig').test(value);
}
}
function ___getMockPatternByFieldName(fieldName: string): ScriptMockField {
return self.scriptObject.mockFields.filter(field => field.name == fieldName)[0] || new ScriptMockField();
}
/**
* @returns {boolean} true = > not equal
*/
function ___compareRecords(target: any, cloned: any, fieldsToCompareRecords: Array<string>): boolean {
if (target && !cloned || cloned && !target) {
return true;
}
return Object.keys(cloned)
.filter(key => fieldsToCompareRecords.length == 0 || fieldsToCompareRecords.indexOf(key) >= 0)
.some(key => {
if (key != "Id" && key != CONSTANTS.__ID_FIELD_NAME) {
// FIXME: && target.hasOwnProperty(key) solves issue
// Auto-number fields ignored when used as sourceField in fieldMapping #89
// But it causes error when copying self-referencing fields with field mapping with complex extgernal id
return target[key] != cloned[key]; // && target.hasOwnProperty(key);
}
return false;
});
}
}
/**
* Creates new api engine for the given org and operation
*
* @param {ScriptOrg} org The org to connect the api engine
* @param {OPERATION} operation The operation to perform
* @param {boolean} updateRecordId Allow update Id property
* of the processed (the source) records
* with the target record ids
* @param {number} amountOfRecordsToProcess The total amount of records that should
* be processed using this engine instance
* @returns {IApiEngine}
* @memberof MigrationJobTask
*/
createApiEngine(org: ScriptOrg, operation: OPERATION, amountOfRecordsToProcess: number, updateRecordId: boolean, targetFilenameSuffix?: string): IApiEngine {
let engine: IApiEngine;
if ((amountOfRecordsToProcess > this.script.bulkThreshold && !this.script.alwaysUseRestApiToUpdateRecords)
&& CONSTANTS.NOT_SUPPORTED_OBJECTS_IN_BULK_API.indexOf(this.sObjectName) < 0) {
// Use bulk api
switch (this.script.bulkApiVersionNumber) {
case 2: // Bulk Api V2.0
engine = new BulkApiV2_0Engine({
logger: this.logger,
connectionData: org.connectionData,
sObjectName: this.sObjectName,
operation,
pollingIntervalMs: this.script.pollingIntervalMs,
concurrencyMode: this.script.concurrencyMode,
updateRecordId,
targetCSVFullFilename: this.data.getTargetCSVFilename(operation, targetFilenameSuffix),
createTargetCSVFiles: this.script.createTargetCSVFiles,
targetFieldMapping: this._targetFieldMapping,
simulationMode: this.script.simulationMode,
binaryDataCache: this.script.binaryDataCache
});
break;
default: // Bulk Api V1.0
engine = new BulkApiV1_0Engine({
logger: this.logger,
connectionData: org.connectionData,
sObjectName: this.sObjectName,
operation,
pollingIntervalMs: this.script.pollingIntervalMs,
concurrencyMode: this.script.concurrencyMode,
updateRecordId,
bulkApiV1BatchSize: this.script.bulkApiV1BatchSize,
targetCSVFullFilename: this.data.getTargetCSVFilename(operation, targetFilenameSuffix),
createTargetCSVFiles: this.script.createTargetCSVFiles,
targetFieldMapping: this._targetFieldMapping,
simulationMode: this.script.simulationMode,
binaryDataCache: this.script.binaryDataCache
});
break;
}
} else {
// Use rest api
engine = new RestApiEngine({
logger: this.logger,
connectionData: org.connectionData,
sObjectName: this.sObjectName,
operation,
pollingIntervalMs: this.script.pollingIntervalMs,
concurrencyMode: this.script.concurrencyMode,
updateRecordId,
restApiBatchSize: this.script.restApiBatchSize,
allOrNone: this.script.allOrNone,
targetCSVFullFilename: this.data.getTargetCSVFilename(operation, targetFilenameSuffix),
createTargetCSVFiles: this.script.createTargetCSVFiles,
targetFieldMapping: this._targetFieldMapping,
simulationMode: this.script.simulationMode,
binaryDataCache: this.script.binaryDataCache,
binaryCacheDirectory: this.script.binaryCacheDirectory
});
}
this.setApiEngine(engine);
return this.apiEngine;
}
/**
* Executes addon event related to the current executed object
*
* @param {ADDON_EVENTS} event The addon event to execute
* @returns {Promise<void>}
* @memberof MigrationJobTask
*/
async runAddonEvent(event: ADDON_EVENTS): Promise<boolean> {
return await this.script.addonManager.triggerAddonModuleMethodAsync(event, this.sObjectName);
}
/**
* Set the API engine instance for the current task
*
* @param {IApiEngine} engine The engine instance
* @memberof MigrationJobTask
*/
setApiEngine(engine: IApiEngine) {
this.apiEngine = engine;
this.apiProgressCallback = this.apiProgressCallback || this._apiProgressCallback.bind(this);
}
// ----------------------- Private members -------------------------------------------
private _apiProgressCallback(apiResult: ApiInfo): void {
let verbosity = LOG_MESSAGE_VERBOSITY.MINIMAL;
let logMessageType = LOG_MESSAGE_TYPE.STRING;
switch (apiResult.messageImportance) {
case MESSAGE_IMPORTANCE.Low:
verbosity = LOG_MESSAGE_VERBOSITY.VERBOSE;
break;
case MESSAGE_IMPORTANCE.Normal:
verbosity = LOG_MESSAGE_VERBOSITY.NORMAL;
break;
case MESSAGE_IMPORTANCE.Warn:
logMessageType = LOG_MESSAGE_TYPE.WARN;
break;
case MESSAGE_IMPORTANCE.Error:
logMessageType = LOG_MESSAGE_TYPE.ERROR;
break;
}
switch (apiResult.resultStatus) {
case RESULT_STATUSES.Information:
if (apiResult.informationMessageData.length > 0) {
// [0] - always is the RESOURCE message
// [1...] - the rest of the RESOURCE message tokens
let resourceString = this.logger.getResourceString.apply(this.logger, [apiResult.informationMessageData[0], ...apiResult.informationMessageData.slice(1)])
this.logger.log.apply(this.logger, [resourceString, logMessageType, verbosity]);
}
break;
case RESULT_STATUSES.ApiOperationStarted:
let simulationModeResourceString = this.script.simulationMode ? this.logger.getResourceString(RESOURCES.simulationMode) : "";
this.logger.log(RESOURCES.apiOperationStarted, logMessageType, verbosity, this.sObjectName, this.apiEngine.getStrOperation(), this.apiEngine.getEngineName(), simulationModeResourceString);
break;
case RESULT_STATUSES.ApiOperationFinished:
this.logger.log(RESOURCES.apiOperationFinished, logMessageType, verbosity, this.sObjectName, this.apiEngine.getStrOperation());
break;
case RESULT_STATUSES.JobCreated:
this.logger.log(RESOURCES.apiOperationJobCreated, logMessageType, verbosity, apiResult.jobId, this.apiEngine.getStrOperation(), this.sObjectName);
break;
case RESULT_STATUSES.BatchCreated:
this.logger.log(RESOURCES.apiOperationBatchCreated, logMessageType, verbosity, apiResult.batchId, this.apiEngine.getStrOperation(), this.sObjectName);
break;
case RESULT_STATUSES.DataUploaded:
this.logger.log(RESOURCES.apiOperationDataUploaded, logMessageType, verbosity, apiResult.batchId, this.apiEngine.getStrOperation(), this.sObjectName);
break;
case RESULT_STATUSES.InProgress:
this.logger.log(RESOURCES.apiOperationInProgress, logMessageType, verbosity, apiResult.batchId, this.apiEngine.getStrOperation(), this.sObjectName, String(apiResult.numberRecordsProcessed), String(apiResult.numberRecordsFailed));
break;
case RESULT_STATUSES.Completed:
this.logger.log(logMessageType != LOG_MESSAGE_TYPE.WARN ? RESOURCES.apiOperationCompleted : RESOURCES.apiOperationWarnCompleted, logMessageType, verbosity, apiResult.batchId, this.apiEngine.getStrOperation(), this.sObjectName, String(apiResult.numberRecordsProcessed), String(apiResult.numberRecordsFailed));
break;
case RESULT_STATUSES.ProcessError:
case RESULT_STATUSES.FailedOrAborted:
if (apiResult.errorMessage)
this.logger.log(RESOURCES.apiOperationProcessError, logMessageType, verbosity, this.sObjectName, this.apiEngine.getStrOperation(), apiResult.errorMessage);
else
this.logger.log(RESOURCES.apiOperationFailed, logMessageType, verbosity, this.sObjectName, this.apiEngine.getStrOperation());
break;
}
}
private _apiOperationError(operation: OPERATION) {
throw new CommandExecutionError(this.logger.getResourceString(RESOURCES.apiOperationFailed, this.sObjectName, this.apiEngine.getStrOperation()));
}
private _createFilteredQueries(queryMode: "forwards" | "backwards" | "target", reversed: boolean, fieldNames?: string[]): Array<string> {
let queries = new Array<string>();
let fieldsToQueryMap: Map<SFieldDescribe, Array<string>> = new Map<SFieldDescribe, Array<string>>();
let isSource = queryMode != "target";
if (reversed) {
if (CONSTANTS.OBJECTS_NOT_TO_USE_IN_FILTERED_QUERYIN_CLAUSE.indexOf(this.sObjectName) < 0) {
// ONLY SOURCE + FORWARDS FOR reversed == true !
let fields: SFieldDescribe[] = Common.flatMap(this.data.sFieldsInQuery
.filter(field => field.child__rSFields.length > 0), (field: SFieldDescribe) => {
return field.child__rSFields.map(f => f.idSField);
});
let values = new Array<string>();
fields.forEach((field: SFieldDescribe) => {
values = values.concat(field.scriptObject.task.sourceData.records
.map((value: any) => value[field.nameId])
.filter(value => !!value));
});
values = Common.distinctStringArray(values);
fieldsToQueryMap.set(new SFieldDescribe({
name: "Id"
}), values);
}
} else {
this.data.sFieldsInQuery.forEach(field => {
if (isSource) {
// SOURCE
// For source => |SOURCE Case|Account__c IN (|SOURCE Account|Id....)
if (field.isSimpleReference
&& field.parentLookupObject.isInitialized
&& CONSTANTS.OBJECTS_NOT_TO_USE_IN_FILTERED_QUERYIN_CLAUSE.indexOf(field.referencedObjectType) < 0) {
// Only for simple reference lookup fields (f.ex.: Account__c)
// *** The previous logic:
// -----------------------
if (!field.parentLookupObject.task.sourceData.allRecords
|| field.parentLookupObject.isLimitedQuery
// *** The new (fixed) logic:
//-------------------
// Fixed the issue of incorrect fetching releated records when
// the parent object is master and the child object is slave:
// - field.parentLookupObject.task.sourceData.allRecords = true -> Fetch all records mode for the parent sObject
// - this.scriptObject.allRecords = false -> The current sObject is NOT master
// === > also need to create the filtered queries.
// (TODO: need to check if it's working properly)
|| field.parentLookupObject.task.sourceData.allRecords && !this.scriptObject.allRecords
) {
if (queryMode != "forwards") {
// FORWARDS
// For forwards => build the query using all the PREVIOUS related tasks by the tasks order
if (this.data.prevTasks.indexOf(field.parentLookupObject.task) >= 0) {
// The parent task is before => create child lookup query for all Id values of the parent lookup object
fieldsToQueryMap.set(field, [...field.parentLookupObject.task.sourceData.idRecordsMap.keys()]);
}
} else {
// BACKWARDS
// For backwards => build the query using all the NEXT related tasks by the tasks order
if (this.data.nextTasks.indexOf(field.parentLookupObject.task) >= 0) {
// The parent task is before => create child lookup query for all Id values of the parent lookup object
fieldsToQueryMap.set(field, [...field.parentLookupObject.task.sourceData.idRecordsMap.keys()]);
}
}
}
}
} else {
// TARGET
// For target => |TARGET Account|Name IN (|SOURCE Account|Name....)
if (field.isSimple && field.isExternalIdField) {
// Only for current object's external id (f.ex.: Name) - not complex and not Id - only simple
fieldsToQueryMap.set(field, [...this.sourceData.extIdRecordsMap.keys()]);
}
}
});
}
if (isSource && this.scriptObject.isLimitedQuery && !reversed) {
queries.push(this.createQuery(fieldNames));
}
fieldsToQueryMap.forEach((inValues, field) => {
// Filter by cached values => get out all duplicated IN values thet
// were previously queried
let valueCache = this.tempData.filteredQueryValueCache.get(field.name);
if (!valueCache) {
valueCache = new Set<string>();
this.tempData.filteredQueryValueCache.set(field.name, valueCache);
}
inValues = inValues.filter(inValue => !valueCache.has(inValue));
if (inValues.length > 0) {
inValues.forEach(inValue => {
valueCache.add(inValue);
});
// Create and add query
Common.createFieldInQueries(fieldNames || this.data.fieldsInQuery, field.name, this.sObjectName, inValues).forEach(query => {
queries.push(query);
});
}
});
return queries;
}
/**
* @returns {number} New records count
*/
private _setExternalIdMap(records: Array<any>,
sourceExtIdRecordsMap: Map<string, string>,
sourceIdRecordsMap: Map<string, string>,
isTarget: boolean = false): number {
let newRecordsCount = 0;
records.forEach(targetRecord => {
if (targetRecord["Id"]) {
let value = this.getRecordValue(targetRecord, this.complexExternalId);
if (value) {
sourceExtIdRecordsMap.set(value, targetRecord["Id"]);
}
if (!sourceIdRecordsMap.has(targetRecord["Id"])) {
sourceIdRecordsMap.set(targetRecord["Id"], targetRecord);
targetRecord[CONSTANTS.__ID_FIELD_NAME] = targetRecord["Id"];
if (isTarget) {
let extIdValue = this.getRecordValue(targetRecord, this.complexExternalId);
if (extIdValue) {
let sourceId = this.sourceData.extIdRecordsMap.get(extIdValue);
if (sourceId) {
let sourceRecord = this.sourceData.idRecordsMap.get(sourceId);
this.data.sourceToTargetRecordMap.set(sourceRecord, targetRecord);
}
}
}
newRecordsCount++;
}
} else {
targetRecord[CONSTANTS.__ID_FIELD_NAME] = Common.makeId(18);
}
});
return newRecordsCount;
}
private _mapRecords(records: Array<any>) {
if (records.length == 0 || !this.scriptObject.useValuesMapping) {
return;
}
this.logger.infoNormal(RESOURCES.mappingRawValues, this.sObjectName);
let fields = Object.keys(records[0]);
fields.forEach(field => {
let key = this.sObjectName + field;
let valuesMap = this.job.valueMapping.get(key);
if (valuesMap && valuesMap.size > 0) {
let sourceExtIdMap: Map<string, string>;
let nameId: string;
let describe = this.data.sFieldsInQuery.filter(f => {
return f.name == field;
})[0];
if (describe.is__r) {
let parentTask = this.job.getTaskBySObjectName(describe.parentLookupObject.name);
if (parentTask) {
sourceExtIdMap = parentTask.sourceData.extIdRecordsMap;
nameId = describe.nameId;
}
}
// Regex
let regexp: RegExp;
let regexpReplaceValue: any;
valuesMap.forEach((newValue, rawValue) => {
try {
if (new RegExp(CONSTANTS.FIELDS_MAPPING_REGEX_PATTERN).test(rawValue)) {
let pattern = rawValue.replace(new RegExp(CONSTANTS.FIELDS_MAPPING_REGEX_PATTERN), '$1');
regexpReplaceValue = newValue;
regexp = regexpReplaceValue && new RegExp(pattern, 'gi');
}
} catch (ex) { }
});
records.forEach((record: any) => {
let newValue: any;
let rawValue = (String(record[field]) || "").trim();
if (regexp) {
// Use regex
try {
if (regexp.test(rawValue)) {
newValue = rawValue.replace(regexp, regexpReplaceValue);
}
} catch (ex) { }
}
// Use regular replace
newValue = newValue ? valuesMap.get(String(newValue)) || newValue : valuesMap.get(rawValue);
// Correct values
newValue = newValue == 'TRUE' || newValue == 'true' ? true :
newValue == 'FALSE' || newValue == 'false' ? false :
newValue == 'null' || newValue == 'NULL' || newValue == 'undefined' || newValue == '#N/A' || newValue == undefined ? null : newValue;
if (typeof newValue != 'undefined') {
record[field] = newValue;
}
// Replace lookups
if (nameId && record.hasOwnProperty(nameId)) {
let newValueId = sourceExtIdMap.get(newValue);
if (newValueId) {
record[nameId] = newValueId;
}
}
});
}
});
}
private async _retrieveFilteredRecords(queries: string[], orgData: TaskOrgData, targetFieldMapping?: IFieldMapping): Promise<Array<any>> {
let sfdx = new Sfdx(orgData.org, targetFieldMapping);
let records = new Array<any>();
for (let index = 0; index < queries.length; index++) {
const query = queries[index];
// Query message ------
this.logger.infoVerbose(RESOURCES.queryString, this.sObjectName, this.createShortQueryString(query));
// Fetch records
records = records.concat(await sfdx.retrieveRecordsAsync(query, false));
}
return records;
}
private _transformQuery(query: string, sourceSObjectName: string) {
let sourceParsedQuery = parseQuery(query);
sourceSObjectName = sourceParsedQuery.sObject;
let scriptObject = this.script.objectsMap.get(sourceSObjectName);
if (scriptObject) {
let fields = [];
sourceParsedQuery.fields.forEach((field: SOQLField) => {
let rawValue = String(field["rawValue"] || field.field);
let describe = scriptObject.fieldsInQueryMap.get(rawValue);
describe = describe || [...scriptObject.fieldsInQueryMap.values()]
.filter(field => field.__rNames.filter(x => x == rawValue)[0])
.filter(x => !!x)[0];
if (describe) {
// Start to transform fields/////
// 1. Trasnsform polymorfic fields
if (describe.isPolymorphicField && describe.is__r) {
fields.push(getComposedField(describe.getPolymorphicQueryField(rawValue)));
} else {
fields.push(getComposedField(rawValue));
}
// End to transform fields //////
} else {
fields.push(getComposedField(rawValue));
}
});
sourceParsedQuery.fields = fields;
query = composeQuery(sourceParsedQuery);
}
return {
targetSObjectName: sourceSObjectName,
query
};
}
private _mapSourceQueryToTarget(query: string, sourceSObjectName: string): IFieldMappingResult {
let mapping = this.script.sourceTargetFieldMapping.get(sourceSObjectName);
if (mapping && mapping.hasChange) {
let scriptObject = this.script.objectsMap.get(sourceSObjectName);
if (scriptObject) {
let targetParsedQuery = parseQuery(query);
targetParsedQuery.sObject = mapping.targetSObjectName;
let fields = [];
targetParsedQuery.fields.forEach((field: SOQLField) => {
let rawValue = String(field["rawValue"] || field.field);
let describe = scriptObject.fieldsInQueryMap.get(rawValue);
if (describe) {
let targetField = describe.targetName + (field["alias"] ? " " + field["alias"] : "");
fields.push(getComposedField(targetField));
if (rawValue == "Id") {
fields.unshift(getComposedField(rawValue));
}
} else {
let targetField = rawValue + (field["alias"] ? " " + field["alias"] : "");
fields.push(getComposedField(targetField));
}
});
fields = Common.distinctArray(fields, 'field');
targetParsedQuery.fields = fields;
if (targetParsedQuery.where) {
let left: Condition = targetParsedQuery.where.left;
let right: WhereClause = targetParsedQuery.where.right;
while (left) {
let describe = scriptObject.fieldsInQueryMap.get(left.field);
if (describe) {
left.field = describe.targetName;
}
left = right && right.left;
right = right && right.right;
}
}
query = composeQuery(targetParsedQuery);
this.logger.infoNormal(RESOURCES.mappingQuery, this.sObjectName, mapping.targetSObjectName, this.createShortQueryString(query));
return {
targetSObjectName: mapping.targetSObjectName,
query
};
}
}
return {
targetSObjectName: sourceSObjectName,
query
};
}
private _mapSourceRecordsToTarget(records: Array<any>, sourceSObjectName: string): IFieldMappingResult {
let mapping = this.script.sourceTargetFieldMapping.get(sourceSObjectName);
if (mapping && mapping.hasChange && records) {
let scriptObject = this.script.objectsMap.get(sourceSObjectName);
if (scriptObject) {
this.logger.infoNormal(RESOURCES.mappingSourceRecords, this.sObjectName, mapping.targetSObjectName);
let fieldMapping = scriptObject.sourceToTargetFieldNameMap;
records.forEach(record => {
fieldMapping.forEach((newProp, oldProp) => {
if (newProp != oldProp && record.hasOwnProperty(oldProp)) {
record[newProp] = record[oldProp];
if (oldProp != "Id") { // Id => ExternalId__c (Id -> ExternalId__c)
delete record[oldProp];
}
}
});
});
return {
targetSObjectName: mapping.targetSObjectName,
records
};
}
}
return {
targetSObjectName: sourceSObjectName,
records
};
}
private _mapTargetRecordsToSource(records: Array<any>, sourceSObjectName: string): IFieldMappingResult {
let mapping = this.script.sourceTargetFieldMapping.get(sourceSObjectName);
if (mapping && mapping.hasChange && records) {
let scriptObject = this.script.objectsMap.get(sourceSObjectName);
if (scriptObject) {
this.logger.infoNormal(RESOURCES.mappingTargetRecords, this.sObjectName, mapping.targetSObjectName);
let fieldMapping = scriptObject.sourceToTargetFieldNameMap;
records.forEach(record => {
fieldMapping.forEach((newProp, oldProp) => {
if (newProp != oldProp && record.hasOwnProperty(newProp)) {
if (oldProp != "Id") {
record[oldProp] = record[newProp]; // Id => Externalid__c (ExternalId -> Id)
}
delete record[newProp];
}
});
});
return {
targetSObjectName: mapping.targetSObjectName,
records
};
}
}
return {
targetSObjectName: sourceSObjectName,
records
};
}
private _targetFieldMapping: IFieldMapping = <IFieldMapping>{
sourceQueryToTarget: this._mapSourceQueryToTarget.bind(this),
sourceRecordsToTarget: this._mapSourceRecordsToTarget.bind(this),
targetRecordsToSource: this._mapTargetRecordsToSource.bind(this),
transformQuery: this._transformQuery.bind(this)
}
private _sourceFieldMapping: IFieldMapping = <IFieldMapping>{
transformQuery: this._transformQuery.bind(this)
}
} | the_stack |
import { Big, BigJS, Biglike, ONE, ZERO } from './types';
import { OrderbookState, PriceLevel } from './Orderbook';
import BigArray from './BigArray';
export interface MarketOrderStats {
first_price: BigJS;
last_price: BigJS;
ave_price: BigJS;
total_size: BigJS;
total_cost: BigJS;
slippage: BigJS;
fees: BigJS;
unfilled: BigJS;
first_price_index?: number;
last_price_index?: number;
}
interface OrdersCache {
prices: BigArray;
sizes: BigArray;
value: BigArray;
}
interface OrderbookCache {
bids: OrdersCache;
asks: OrdersCache;
}
/**
* Calculate stats for trades given an order book. The orderbook is immutable.
*/
export default class OrderbookUtils {
static calcFees(fees: BigJS, totalCost: BigJS): { fees_total: BigJS; total_cost: BigJS } {
const feesTotal = totalCost.times(fees);
totalCost = totalCost.plus(feesTotal);
return { fees_total: feesTotal, total_cost: totalCost };
}
private static extractOrders(orders: PriceLevel[]): OrdersCache {
const len = orders.length;
const prices = new Array(len);
const sizes = new Array(len);
for (let i = 0; i < len; i++) {
prices[i] = orders[i].price;
sizes[i] = orders[i].totalSize;
}
const priceArray = new BigArray(prices);
const sizeArray = new BigArray(sizes);
const value = sizeArray.mult(priceArray);
return {
prices: priceArray,
sizes: sizeArray,
value: value
};
}
/**
* Find the index of the order that will fill size items starting at start_index
* @param cumSum {BigJS[]}
* @param startIndex {number} Optional optimisation argument, if it is known that the answer is above a certain index
* @param size {BigJS}
* @returns {number} the first index in order_data s.t. sum_to_i >= size
*/
private static getIndexOf(cumSum: BigJS[], size: BigJS, startIndex: number): number {
let result = startIndex || 0;
while (result < cumSum.length - 1 && cumSum[result].lt(size)) {
result++;
}
return result;
}
private readonly book: OrderbookState;
private precalc: OrderbookCache;
constructor(book: OrderbookState) {
if (!book || typeof book !== 'object') {
throw new Error('OrderbookUtils requires an order book object in the constructor');
}
const validBook = !!book.asks && !!book.bids;
if (!validBook) {
throw new Error('The order object must have both a bids and asks array');
}
this.book = book;
this.precalc = null;
}
get isCached() {
return this.precalc !== null;
}
private get cache() {
if (!this.precalc) {
this.precache();
}
return this.precalc;
}
precache() {
const book = this.book;
this.precalc = {
asks: OrderbookUtils.extractOrders(book.asks),
bids: OrderbookUtils.extractOrders(book.bids)
};
}
bustCache() {
this.precalc = null;
}
state() {
return this.book;
}
/**
* Calculate stats for a market order. If a cached version is available, it will use that, which is much more
* efficient if multiple calculations on the same book are required. Otherwise for small, once-off calculations
* it's better to use the naive approach
* @param side {string} Must be 'buy' or 'sell'
* @param amount {string|number} The size of the trade
* @param fees {string|number} [] Optional. The fee rate charged (as a fraction, NOT a percentage)
* @returns {{ave_price: BigJS, total_size: BigJS, total_cost: BigJS, slippage: BigJS, fees: BigJS, unfilled: BigJS}}
*/
calculateMarketOrderStats(side: string, amount: Biglike, fees: BigJS = ZERO): MarketOrderStats {
if (+amount === 0) {
const orders: PriceLevel[] = side === 'buy' ? this.book.asks : this.book.bids;
const firstOrder: PriceLevel = orders[0];
return {
first_price: firstOrder.price,
last_price: firstOrder.price,
ave_price: firstOrder.price,
total_size: ZERO,
total_cost: ZERO,
slippage: ZERO,
fees: ZERO,
unfilled: ZERO
};
}
return this.isCached ? this.calculateStatsFromCache(side, amount, fees) : this.calculateStatsNoCache(side, amount, fees);
}
/**
* Return the index of the first order where the cumulative size is greater or equal to size
* @param size {BigJS}
* @param isBuy {boolean}
* @returns {number}
*/
getIndexOfTotalSize(size: BigJS, isBuy: boolean): number {
const orderData = isBuy ? this.cache.asks : this.cache.bids;
const sizes = orderData.sizes;
if (size.gt(sizes.sum())) {
return -1;
}
return OrderbookUtils.getIndexOf(sizes.cumsum().values, size, 0);
}
/**
* Return the index of the first order where the cumulative value is greater or equal to value
* @param value {BigJS}
* @param isBuy {boolean}
* @returns {number}
*/
getIndexOfTotalValue(value: BigJS, isBuy: boolean): number {
const orderData = isBuy ? this.cache.asks : this.cache.bids;
const cumsum = orderData.value.cumsum().values;
return OrderbookUtils.getIndexOf(cumsum, value, 0);
}
/**
* Calculate the marginal cost in buying from start_size to end_size, ie sum(price_i * size_i) i == start_size to end_size
* @param startSize {BigJS} the lower bound of the order
* @param endSize {BigJS} the upper bound of the order
* @param isBuy
* @param fees {BigJS}
* @param useValue {boolean} integrate using the value (quote currency) rather than base
*/
integrateBetween(startSize: BigJS, endSize: Biglike, isBuy: boolean, fees: BigJS, useValue: boolean = false): MarketOrderStats {
endSize = Big(endSize);
const cache = this.cache;
const orderData = isBuy ? cache.asks : cache.bids;
// Cumulative sums for these arrays are cached, so multiple calls to this method is very efficient after the first one
// if calculating with values (quote currency) the 'size' vars actually refer to value. They'll be remapped later
const cumSize = useValue ? orderData.value.cumsum().values : orderData.sizes.cumsum().values;
const startIndex = OrderbookUtils.getIndexOf(cumSize, startSize, 0);
const partialStartSize = cumSize[startIndex].minus(startSize);
const firstPriceIndex = partialStartSize.eq(ZERO) ? startIndex + 1 : startIndex;
const firstPrice = Big(orderData.prices.values[firstPriceIndex]);
let endIndex = OrderbookUtils.getIndexOf(cumSize, endSize, startIndex);
let sizeNotIncluded = cumSize[endIndex].minus(endSize);
if (sizeNotIncluded.lt(ZERO)) {
sizeNotIncluded = ZERO;
}
let lastPrice = Big(orderData.prices.values[endIndex]);
let totalSize = cumSize[endIndex].minus(startSize).minus(sizeNotIncluded);
const remaining = endSize.minus(startSize).minus(totalSize);
let totalCost;
if (!useValue) {
const cumValues = orderData.value.cumsum().values;
totalCost = cumValues[endIndex].minus(cumValues[startIndex])
.plus(partialStartSize.times(firstPrice))
.minus(sizeNotIncluded.times(lastPrice));
} else {
// We were summing over values, so 'cost' was actually size. Re-map that here
totalCost = totalSize;
const cumSizes = orderData.sizes.cumsum().values;
totalSize = cumSizes[endIndex].minus(cumSizes[startIndex])
.plus(partialStartSize.div(firstPrice))
.minus(sizeNotIncluded.div(lastPrice));
}
const feeCalc = OrderbookUtils.calcFees(fees, totalCost);
let avePrice;
if (totalSize.eq(ZERO)) {
avePrice = firstPrice;
lastPrice = firstPrice;
endIndex = firstPriceIndex;
} else {
avePrice = feeCalc.total_cost.div(totalSize);
}
const slippage = avePrice.minus(firstPrice).div(firstPrice).abs();
return {
first_price: firstPrice,
last_price: lastPrice,
ave_price: avePrice,
total_size: totalSize,
total_cost: feeCalc.total_cost,
slippage: slippage,
fees: feeCalc.fees_total,
unfilled: remaining,
first_price_index: firstPriceIndex,
last_price_index: endIndex
};
}
/**
* Return the cumulative order size after filling until `index` orders
* @param index {number}
* @param isBuy {boolean}
*/
getCumulativeSize(index: number, isBuy: boolean): BigJS {
const orderData = isBuy ? this.cache.asks : this.cache.bids;
return orderData.sizes.sumTo(index);
}
/**
* Return the cumulative order cost after filling until `index` orders
* @param index {number}
* @param isBuy {boolean}
*/
getCumulativeCost(index: number, isBuy: boolean): BigJS {
const orderData = isBuy ? this.cache.asks : this.cache.bids;
return orderData.value.sumTo(index);
}
/**
* Calculate the base size that can be bought with total_cost, including fees
* @param startValue {BigJS} The total value that has already been traded
* @param totalFunds {BigJS} The quote amount to spend, including fees
* @param isBuy {boolean}
* @param fees {BigJS} fractional fee rate
*/
getSizeFromCost(startValue: BigJS, totalFunds: BigJS, isBuy: boolean, fees: BigJS = ZERO) {
const onePlusFees = ONE.plus(fees);
const nonFeeValue = totalFunds.div(onePlusFees);
const endValue = startValue.plus(nonFeeValue);
const result = this.integrateBetween(startValue, endValue, isBuy, fees, true);
// When using quote currencies, we expect the unfilled amount to be inclusive of expected fees
result.unfilled = result.unfilled.times(onePlusFees);
return result;
}
private calculateStatsFromCache(side: string, amount: Biglike, fees: BigJS): MarketOrderStats {
return this.integrateBetween(ZERO, amount, side === 'buy', fees);
}
private calculateStatsNoCache(side: string, amount: Biglike, fees: BigJS = ZERO): MarketOrderStats {
amount = Big(amount);
let remaining = Big(amount);
let totalCost = ZERO;
const orders = side === 'buy' ? this.book.asks : this.book.bids;
if (!Array.isArray(orders[0])) {
throw new Error('Use pre-caching to calculate stats on object-format orderbooks');
}
let i = 0;
let size = null;
const firstPrice = Big(orders[0].price);
let lastPrice = Big(orders[0].price);
do {
lastPrice = orders[i].price;
size = orders[i].totalSize;
// We've filled the order
if (remaining.lte(size)) {
size = Big(remaining);
remaining = ZERO;
} else { /* There's (potentially) more to be filled*/
remaining = remaining.minus(size);
}
totalCost = totalCost.plus(lastPrice.times(size));
i++;
} while (remaining.gt(0) && i < orders.length);
const feeCalc = OrderbookUtils.calcFees(fees, totalCost);
const fees_total = feeCalc.fees_total;
totalCost = feeCalc.total_cost;
const totalSize = amount.minus(remaining);
const avePrice = totalCost.div(totalSize);
const bestPrice = orders[0].price;
const slippage = avePrice.minus(bestPrice).div(bestPrice).abs();
return {
first_price: firstPrice,
last_price: lastPrice,
ave_price: avePrice,
total_size: totalSize,
total_cost: totalCost,
slippage: slippage,
fees: fees_total,
unfilled: remaining
};
}
} | the_stack |
import {Request} from '../lib/request';
import {Response} from '../lib/response';
import {AWSError} from '../lib/error';
import {Service} from '../lib/service';
import {ServiceConfigurationOptions} from '../lib/service';
import {ConfigBase as Config} from '../lib/config-base';
interface Blob {}
declare class ManagedBlockchain extends Service {
/**
* Constructs a service object. This object has one method for each API operation.
*/
constructor(options?: ManagedBlockchain.Types.ClientConfiguration)
config: Config & ManagedBlockchain.Types.ClientConfiguration;
/**
* Creates a member within a Managed Blockchain network.
*/
createMember(params: ManagedBlockchain.Types.CreateMemberInput, callback?: (err: AWSError, data: ManagedBlockchain.Types.CreateMemberOutput) => void): Request<ManagedBlockchain.Types.CreateMemberOutput, AWSError>;
/**
* Creates a member within a Managed Blockchain network.
*/
createMember(callback?: (err: AWSError, data: ManagedBlockchain.Types.CreateMemberOutput) => void): Request<ManagedBlockchain.Types.CreateMemberOutput, AWSError>;
/**
* Creates a new blockchain network using Amazon Managed Blockchain.
*/
createNetwork(params: ManagedBlockchain.Types.CreateNetworkInput, callback?: (err: AWSError, data: ManagedBlockchain.Types.CreateNetworkOutput) => void): Request<ManagedBlockchain.Types.CreateNetworkOutput, AWSError>;
/**
* Creates a new blockchain network using Amazon Managed Blockchain.
*/
createNetwork(callback?: (err: AWSError, data: ManagedBlockchain.Types.CreateNetworkOutput) => void): Request<ManagedBlockchain.Types.CreateNetworkOutput, AWSError>;
/**
* Creates a peer node in a member.
*/
createNode(params: ManagedBlockchain.Types.CreateNodeInput, callback?: (err: AWSError, data: ManagedBlockchain.Types.CreateNodeOutput) => void): Request<ManagedBlockchain.Types.CreateNodeOutput, AWSError>;
/**
* Creates a peer node in a member.
*/
createNode(callback?: (err: AWSError, data: ManagedBlockchain.Types.CreateNodeOutput) => void): Request<ManagedBlockchain.Types.CreateNodeOutput, AWSError>;
/**
* Creates a proposal for a change to the network that other members of the network can vote on, for example, a proposal to add a new member to the network. Any member can create a proposal.
*/
createProposal(params: ManagedBlockchain.Types.CreateProposalInput, callback?: (err: AWSError, data: ManagedBlockchain.Types.CreateProposalOutput) => void): Request<ManagedBlockchain.Types.CreateProposalOutput, AWSError>;
/**
* Creates a proposal for a change to the network that other members of the network can vote on, for example, a proposal to add a new member to the network. Any member can create a proposal.
*/
createProposal(callback?: (err: AWSError, data: ManagedBlockchain.Types.CreateProposalOutput) => void): Request<ManagedBlockchain.Types.CreateProposalOutput, AWSError>;
/**
* Deletes a member. Deleting a member removes the member and all associated resources from the network. DeleteMember can only be called for a specified MemberId if the principal performing the action is associated with the AWS account that owns the member. In all other cases, the DeleteMember action is carried out as the result of an approved proposal to remove a member. If MemberId is the last member in a network specified by the last AWS account, the network is deleted also.
*/
deleteMember(params: ManagedBlockchain.Types.DeleteMemberInput, callback?: (err: AWSError, data: ManagedBlockchain.Types.DeleteMemberOutput) => void): Request<ManagedBlockchain.Types.DeleteMemberOutput, AWSError>;
/**
* Deletes a member. Deleting a member removes the member and all associated resources from the network. DeleteMember can only be called for a specified MemberId if the principal performing the action is associated with the AWS account that owns the member. In all other cases, the DeleteMember action is carried out as the result of an approved proposal to remove a member. If MemberId is the last member in a network specified by the last AWS account, the network is deleted also.
*/
deleteMember(callback?: (err: AWSError, data: ManagedBlockchain.Types.DeleteMemberOutput) => void): Request<ManagedBlockchain.Types.DeleteMemberOutput, AWSError>;
/**
* Deletes a peer node from a member that your AWS account owns. All data on the node is lost and cannot be recovered.
*/
deleteNode(params: ManagedBlockchain.Types.DeleteNodeInput, callback?: (err: AWSError, data: ManagedBlockchain.Types.DeleteNodeOutput) => void): Request<ManagedBlockchain.Types.DeleteNodeOutput, AWSError>;
/**
* Deletes a peer node from a member that your AWS account owns. All data on the node is lost and cannot be recovered.
*/
deleteNode(callback?: (err: AWSError, data: ManagedBlockchain.Types.DeleteNodeOutput) => void): Request<ManagedBlockchain.Types.DeleteNodeOutput, AWSError>;
/**
* Returns detailed information about a member.
*/
getMember(params: ManagedBlockchain.Types.GetMemberInput, callback?: (err: AWSError, data: ManagedBlockchain.Types.GetMemberOutput) => void): Request<ManagedBlockchain.Types.GetMemberOutput, AWSError>;
/**
* Returns detailed information about a member.
*/
getMember(callback?: (err: AWSError, data: ManagedBlockchain.Types.GetMemberOutput) => void): Request<ManagedBlockchain.Types.GetMemberOutput, AWSError>;
/**
* Returns detailed information about a network.
*/
getNetwork(params: ManagedBlockchain.Types.GetNetworkInput, callback?: (err: AWSError, data: ManagedBlockchain.Types.GetNetworkOutput) => void): Request<ManagedBlockchain.Types.GetNetworkOutput, AWSError>;
/**
* Returns detailed information about a network.
*/
getNetwork(callback?: (err: AWSError, data: ManagedBlockchain.Types.GetNetworkOutput) => void): Request<ManagedBlockchain.Types.GetNetworkOutput, AWSError>;
/**
* Returns detailed information about a peer node.
*/
getNode(params: ManagedBlockchain.Types.GetNodeInput, callback?: (err: AWSError, data: ManagedBlockchain.Types.GetNodeOutput) => void): Request<ManagedBlockchain.Types.GetNodeOutput, AWSError>;
/**
* Returns detailed information about a peer node.
*/
getNode(callback?: (err: AWSError, data: ManagedBlockchain.Types.GetNodeOutput) => void): Request<ManagedBlockchain.Types.GetNodeOutput, AWSError>;
/**
* Returns detailed information about a proposal.
*/
getProposal(params: ManagedBlockchain.Types.GetProposalInput, callback?: (err: AWSError, data: ManagedBlockchain.Types.GetProposalOutput) => void): Request<ManagedBlockchain.Types.GetProposalOutput, AWSError>;
/**
* Returns detailed information about a proposal.
*/
getProposal(callback?: (err: AWSError, data: ManagedBlockchain.Types.GetProposalOutput) => void): Request<ManagedBlockchain.Types.GetProposalOutput, AWSError>;
/**
* Returns a listing of all invitations for the current AWS account.
*/
listInvitations(params: ManagedBlockchain.Types.ListInvitationsInput, callback?: (err: AWSError, data: ManagedBlockchain.Types.ListInvitationsOutput) => void): Request<ManagedBlockchain.Types.ListInvitationsOutput, AWSError>;
/**
* Returns a listing of all invitations for the current AWS account.
*/
listInvitations(callback?: (err: AWSError, data: ManagedBlockchain.Types.ListInvitationsOutput) => void): Request<ManagedBlockchain.Types.ListInvitationsOutput, AWSError>;
/**
* Returns a listing of the members in a network and properties of their configurations.
*/
listMembers(params: ManagedBlockchain.Types.ListMembersInput, callback?: (err: AWSError, data: ManagedBlockchain.Types.ListMembersOutput) => void): Request<ManagedBlockchain.Types.ListMembersOutput, AWSError>;
/**
* Returns a listing of the members in a network and properties of their configurations.
*/
listMembers(callback?: (err: AWSError, data: ManagedBlockchain.Types.ListMembersOutput) => void): Request<ManagedBlockchain.Types.ListMembersOutput, AWSError>;
/**
* Returns information about the networks in which the current AWS account has members.
*/
listNetworks(params: ManagedBlockchain.Types.ListNetworksInput, callback?: (err: AWSError, data: ManagedBlockchain.Types.ListNetworksOutput) => void): Request<ManagedBlockchain.Types.ListNetworksOutput, AWSError>;
/**
* Returns information about the networks in which the current AWS account has members.
*/
listNetworks(callback?: (err: AWSError, data: ManagedBlockchain.Types.ListNetworksOutput) => void): Request<ManagedBlockchain.Types.ListNetworksOutput, AWSError>;
/**
* Returns information about the nodes within a network.
*/
listNodes(params: ManagedBlockchain.Types.ListNodesInput, callback?: (err: AWSError, data: ManagedBlockchain.Types.ListNodesOutput) => void): Request<ManagedBlockchain.Types.ListNodesOutput, AWSError>;
/**
* Returns information about the nodes within a network.
*/
listNodes(callback?: (err: AWSError, data: ManagedBlockchain.Types.ListNodesOutput) => void): Request<ManagedBlockchain.Types.ListNodesOutput, AWSError>;
/**
* Returns the listing of votes for a specified proposal, including the value of each vote and the unique identifier of the member that cast the vote.
*/
listProposalVotes(params: ManagedBlockchain.Types.ListProposalVotesInput, callback?: (err: AWSError, data: ManagedBlockchain.Types.ListProposalVotesOutput) => void): Request<ManagedBlockchain.Types.ListProposalVotesOutput, AWSError>;
/**
* Returns the listing of votes for a specified proposal, including the value of each vote and the unique identifier of the member that cast the vote.
*/
listProposalVotes(callback?: (err: AWSError, data: ManagedBlockchain.Types.ListProposalVotesOutput) => void): Request<ManagedBlockchain.Types.ListProposalVotesOutput, AWSError>;
/**
* Returns a listing of proposals for the network.
*/
listProposals(params: ManagedBlockchain.Types.ListProposalsInput, callback?: (err: AWSError, data: ManagedBlockchain.Types.ListProposalsOutput) => void): Request<ManagedBlockchain.Types.ListProposalsOutput, AWSError>;
/**
* Returns a listing of proposals for the network.
*/
listProposals(callback?: (err: AWSError, data: ManagedBlockchain.Types.ListProposalsOutput) => void): Request<ManagedBlockchain.Types.ListProposalsOutput, AWSError>;
/**
* Rejects an invitation to join a network. This action can be called by a principal in an AWS account that has received an invitation to create a member and join a network.
*/
rejectInvitation(params: ManagedBlockchain.Types.RejectInvitationInput, callback?: (err: AWSError, data: ManagedBlockchain.Types.RejectInvitationOutput) => void): Request<ManagedBlockchain.Types.RejectInvitationOutput, AWSError>;
/**
* Rejects an invitation to join a network. This action can be called by a principal in an AWS account that has received an invitation to create a member and join a network.
*/
rejectInvitation(callback?: (err: AWSError, data: ManagedBlockchain.Types.RejectInvitationOutput) => void): Request<ManagedBlockchain.Types.RejectInvitationOutput, AWSError>;
/**
* Updates a member configuration with new parameters.
*/
updateMember(params: ManagedBlockchain.Types.UpdateMemberInput, callback?: (err: AWSError, data: ManagedBlockchain.Types.UpdateMemberOutput) => void): Request<ManagedBlockchain.Types.UpdateMemberOutput, AWSError>;
/**
* Updates a member configuration with new parameters.
*/
updateMember(callback?: (err: AWSError, data: ManagedBlockchain.Types.UpdateMemberOutput) => void): Request<ManagedBlockchain.Types.UpdateMemberOutput, AWSError>;
/**
* Updates a node configuration with new parameters.
*/
updateNode(params: ManagedBlockchain.Types.UpdateNodeInput, callback?: (err: AWSError, data: ManagedBlockchain.Types.UpdateNodeOutput) => void): Request<ManagedBlockchain.Types.UpdateNodeOutput, AWSError>;
/**
* Updates a node configuration with new parameters.
*/
updateNode(callback?: (err: AWSError, data: ManagedBlockchain.Types.UpdateNodeOutput) => void): Request<ManagedBlockchain.Types.UpdateNodeOutput, AWSError>;
/**
* Casts a vote for a specified ProposalId on behalf of a member. The member to vote as, specified by VoterMemberId, must be in the same AWS account as the principal that calls the action.
*/
voteOnProposal(params: ManagedBlockchain.Types.VoteOnProposalInput, callback?: (err: AWSError, data: ManagedBlockchain.Types.VoteOnProposalOutput) => void): Request<ManagedBlockchain.Types.VoteOnProposalOutput, AWSError>;
/**
* Casts a vote for a specified ProposalId on behalf of a member. The member to vote as, specified by VoterMemberId, must be in the same AWS account as the principal that calls the action.
*/
voteOnProposal(callback?: (err: AWSError, data: ManagedBlockchain.Types.VoteOnProposalOutput) => void): Request<ManagedBlockchain.Types.VoteOnProposalOutput, AWSError>;
}
declare namespace ManagedBlockchain {
export interface ApprovalThresholdPolicy {
/**
* The percentage of votes among all members that must be YES for a proposal to be approved. For example, a ThresholdPercentage value of 50 indicates 50%. The ThresholdComparator determines the precise comparison. If a ThresholdPercentage value of 50 is specified on a network with 10 members, along with a ThresholdComparator value of GREATER_THAN, this indicates that 6 YES votes are required for the proposal to be approved.
*/
ThresholdPercentage?: ThresholdPercentageInt;
/**
* The duration from the time that a proposal is created until it expires. If members cast neither the required number of YES votes to approve the proposal nor the number of NO votes required to reject it before the duration expires, the proposal is EXPIRED and ProposalActions are not carried out.
*/
ProposalDurationInHours?: ProposalDurationInt;
/**
* Determines whether the vote percentage must be greater than the ThresholdPercentage or must be greater than or equal to the ThreholdPercentage to be approved.
*/
ThresholdComparator?: ThresholdComparator;
}
export type AvailabilityZoneString = string;
export type ClientRequestTokenString = string;
export interface CreateMemberInput {
/**
* A unique, case-sensitive identifier that you provide to ensure the idempotency of the operation. An idempotent operation completes no more than one time. This identifier is required only if you make a service request directly using an HTTP client. It is generated automatically if you use an AWS SDK or the AWS CLI.
*/
ClientRequestToken: ClientRequestTokenString;
/**
* The unique identifier of the invitation that is sent to the member to join the network.
*/
InvitationId: ResourceIdString;
/**
* The unique identifier of the network in which the member is created.
*/
NetworkId: ResourceIdString;
/**
* Member configuration parameters.
*/
MemberConfiguration: MemberConfiguration;
}
export interface CreateMemberOutput {
/**
* The unique identifier of the member.
*/
MemberId?: ResourceIdString;
}
export interface CreateNetworkInput {
/**
* A unique, case-sensitive identifier that you provide to ensure the idempotency of the operation. An idempotent operation completes no more than one time. This identifier is required only if you make a service request directly using an HTTP client. It is generated automatically if you use an AWS SDK or the AWS CLI.
*/
ClientRequestToken: ClientRequestTokenString;
/**
* The name of the network.
*/
Name: NameString;
/**
* An optional description for the network.
*/
Description?: DescriptionString;
/**
* The blockchain framework that the network uses.
*/
Framework: Framework;
/**
* The version of the blockchain framework that the network uses.
*/
FrameworkVersion: FrameworkVersionString;
/**
* Configuration properties of the blockchain framework relevant to the network configuration.
*/
FrameworkConfiguration?: NetworkFrameworkConfiguration;
/**
* The voting rules used by the network to determine if a proposal is approved.
*/
VotingPolicy: VotingPolicy;
/**
* Configuration properties for the first member within the network.
*/
MemberConfiguration: MemberConfiguration;
}
export interface CreateNetworkOutput {
/**
* The unique identifier for the network.
*/
NetworkId?: ResourceIdString;
/**
* The unique identifier for the first member within the network.
*/
MemberId?: ResourceIdString;
}
export interface CreateNodeInput {
/**
* A unique, case-sensitive identifier that you provide to ensure the idempotency of the operation. An idempotent operation completes no more than one time. This identifier is required only if you make a service request directly using an HTTP client. It is generated automatically if you use an AWS SDK or the AWS CLI.
*/
ClientRequestToken: ClientRequestTokenString;
/**
* The unique identifier of the network in which this node runs.
*/
NetworkId: ResourceIdString;
/**
* The unique identifier of the member that owns this node.
*/
MemberId: ResourceIdString;
/**
* The properties of a node configuration.
*/
NodeConfiguration: NodeConfiguration;
}
export interface CreateNodeOutput {
/**
* The unique identifier of the node.
*/
NodeId?: ResourceIdString;
}
export interface CreateProposalInput {
/**
* A unique, case-sensitive identifier that you provide to ensure the idempotency of the operation. An idempotent operation completes no more than one time. This identifier is required only if you make a service request directly using an HTTP client. It is generated automatically if you use an AWS SDK or the AWS CLI.
*/
ClientRequestToken: ClientRequestTokenString;
/**
* The unique identifier of the network for which the proposal is made.
*/
NetworkId: ResourceIdString;
/**
* The unique identifier of the member that is creating the proposal. This identifier is especially useful for identifying the member making the proposal when multiple members exist in a single AWS account.
*/
MemberId: ResourceIdString;
/**
* The type of actions proposed, such as inviting a member or removing a member. The types of Actions in a proposal are mutually exclusive. For example, a proposal with Invitations actions cannot also contain Removals actions.
*/
Actions: ProposalActions;
/**
* A description for the proposal that is visible to voting members, for example, "Proposal to add Example Corp. as member."
*/
Description?: DescriptionString;
}
export interface CreateProposalOutput {
/**
* The unique identifier of the proposal.
*/
ProposalId?: ResourceIdString;
}
export interface DeleteMemberInput {
/**
* The unique identifier of the network from which the member is removed.
*/
NetworkId: ResourceIdString;
/**
* The unique identifier of the member to remove.
*/
MemberId: ResourceIdString;
}
export interface DeleteMemberOutput {
}
export interface DeleteNodeInput {
/**
* The unique identifier of the network that the node belongs to.
*/
NetworkId: ResourceIdString;
/**
* The unique identifier of the member that owns this node.
*/
MemberId: ResourceIdString;
/**
* The unique identifier of the node.
*/
NodeId: ResourceIdString;
}
export interface DeleteNodeOutput {
}
export type DescriptionString = string;
export type Edition = "STARTER"|"STANDARD"|string;
export type Enabled = boolean;
export type Framework = "HYPERLEDGER_FABRIC"|string;
export type FrameworkVersionString = string;
export interface GetMemberInput {
/**
* The unique identifier of the network to which the member belongs.
*/
NetworkId: ResourceIdString;
/**
* The unique identifier of the member.
*/
MemberId: ResourceIdString;
}
export interface GetMemberOutput {
/**
* The properties of a member.
*/
Member?: Member;
}
export interface GetNetworkInput {
/**
* The unique identifier of the network to get information about.
*/
NetworkId: ResourceIdString;
}
export interface GetNetworkOutput {
/**
* An object containing network configuration parameters.
*/
Network?: Network;
}
export interface GetNodeInput {
/**
* The unique identifier of the network to which the node belongs.
*/
NetworkId: ResourceIdString;
/**
* The unique identifier of the member that owns the node.
*/
MemberId: ResourceIdString;
/**
* The unique identifier of the node.
*/
NodeId: ResourceIdString;
}
export interface GetNodeOutput {
/**
* Properties of the node configuration.
*/
Node?: Node;
}
export interface GetProposalInput {
/**
* The unique identifier of the network for which the proposal is made.
*/
NetworkId: ResourceIdString;
/**
* The unique identifier of the proposal.
*/
ProposalId: ResourceIdString;
}
export interface GetProposalOutput {
/**
* Information about a proposal.
*/
Proposal?: Proposal;
}
export type InstanceTypeString = string;
export interface Invitation {
/**
* The unique identifier for the invitation.
*/
InvitationId?: ResourceIdString;
/**
* The date and time that the invitation was created.
*/
CreationDate?: Timestamp;
/**
* The date and time that the invitation expires. This is the CreationDate plus the ProposalDurationInHours that is specified in the ProposalThresholdPolicy. After this date and time, the invitee can no longer create a member and join the network using this InvitationId.
*/
ExpirationDate?: Timestamp;
/**
* The status of the invitation: PENDING - The invitee has not created a member to join the network, and the invitation has not yet expired. ACCEPTING - The invitee has begun creating a member, and creation has not yet completed. ACCEPTED - The invitee created a member and joined the network using the InvitationID. REJECTED - The invitee rejected the invitation. EXPIRED - The invitee neither created a member nor rejected the invitation before the ExpirationDate.
*/
Status?: InvitationStatus;
NetworkSummary?: NetworkSummary;
}
export type InvitationList = Invitation[];
export type InvitationStatus = "PENDING"|"ACCEPTED"|"ACCEPTING"|"REJECTED"|"EXPIRED"|string;
export interface InviteAction {
/**
* The AWS account ID to invite.
*/
Principal: PrincipalString;
}
export type InviteActionList = InviteAction[];
export type IsOwned = boolean;
export interface ListInvitationsInput {
/**
* The maximum number of invitations to return.
*/
MaxResults?: ProposalListMaxResults;
/**
* The pagination token that indicates the next set of results to retrieve.
*/
NextToken?: PaginationToken;
}
export interface ListInvitationsOutput {
/**
* The invitations for the network.
*/
Invitations?: InvitationList;
/**
* The pagination token that indicates the next set of results to retrieve.
*/
NextToken?: PaginationToken;
}
export interface ListMembersInput {
/**
* The unique identifier of the network for which to list members.
*/
NetworkId: ResourceIdString;
/**
* The optional name of the member to list.
*/
Name?: String;
/**
* An optional status specifier. If provided, only members currently in this status are listed.
*/
Status?: MemberStatus;
/**
* An optional Boolean value. If provided, the request is limited either to members that the current AWS account owns (true) or that other AWS accounts own (false). If omitted, all members are listed.
*/
IsOwned?: IsOwned;
/**
* The maximum number of members to return in the request.
*/
MaxResults?: MemberListMaxResults;
/**
* The pagination token that indicates the next set of results to retrieve.
*/
NextToken?: PaginationToken;
}
export interface ListMembersOutput {
/**
* An array of MemberSummary objects. Each object contains details about a network member.
*/
Members?: MemberSummaryList;
/**
* The pagination token that indicates the next set of results to retrieve.
*/
NextToken?: PaginationToken;
}
export interface ListNetworksInput {
/**
* The name of the network.
*/
Name?: String;
/**
* An optional framework specifier. If provided, only networks of this framework type are listed.
*/
Framework?: Framework;
/**
* An optional status specifier. If provided, only networks currently in this status are listed.
*/
Status?: NetworkStatus;
/**
* The maximum number of networks to list.
*/
MaxResults?: NetworkListMaxResults;
/**
* The pagination token that indicates the next set of results to retrieve.
*/
NextToken?: PaginationToken;
}
export interface ListNetworksOutput {
/**
* An array of NetworkSummary objects that contain configuration properties for each network.
*/
Networks?: NetworkSummaryList;
/**
* The pagination token that indicates the next set of results to retrieve.
*/
NextToken?: PaginationToken;
}
export interface ListNodesInput {
/**
* The unique identifier of the network for which to list nodes.
*/
NetworkId: ResourceIdString;
/**
* The unique identifier of the member who owns the nodes to list.
*/
MemberId: ResourceIdString;
/**
* An optional status specifier. If provided, only nodes currently in this status are listed.
*/
Status?: NodeStatus;
/**
* The maximum number of nodes to list.
*/
MaxResults?: NodeListMaxResults;
/**
* The pagination token that indicates the next set of results to retrieve.
*/
NextToken?: PaginationToken;
}
export interface ListNodesOutput {
/**
* An array of NodeSummary objects that contain configuration properties for each node.
*/
Nodes?: NodeSummaryList;
/**
* The pagination token that indicates the next set of results to retrieve.
*/
NextToken?: PaginationToken;
}
export interface ListProposalVotesInput {
/**
* The unique identifier of the network.
*/
NetworkId: ResourceIdString;
/**
* The unique identifier of the proposal.
*/
ProposalId: ResourceIdString;
/**
* The maximum number of votes to return.
*/
MaxResults?: ProposalListMaxResults;
/**
* The pagination token that indicates the next set of results to retrieve.
*/
NextToken?: PaginationToken;
}
export interface ListProposalVotesOutput {
/**
* The listing of votes.
*/
ProposalVotes?: ProposalVoteList;
/**
* The pagination token that indicates the next set of results to retrieve.
*/
NextToken?: PaginationToken;
}
export interface ListProposalsInput {
/**
* The unique identifier of the network.
*/
NetworkId: ResourceIdString;
/**
* The maximum number of proposals to return.
*/
MaxResults?: ProposalListMaxResults;
/**
* The pagination token that indicates the next set of results to retrieve.
*/
NextToken?: PaginationToken;
}
export interface ListProposalsOutput {
/**
* The summary of each proposal made on the network.
*/
Proposals?: ProposalSummaryList;
/**
* The pagination token that indicates the next set of results to retrieve.
*/
NextToken?: PaginationToken;
}
export interface LogConfiguration {
/**
* Indicates whether logging is enabled.
*/
Enabled?: Enabled;
}
export interface LogConfigurations {
/**
* Parameters for publishing logs to Amazon CloudWatch Logs.
*/
Cloudwatch?: LogConfiguration;
}
export interface Member {
/**
* The unique identifier of the network to which the member belongs.
*/
NetworkId?: ResourceIdString;
/**
* The unique identifier of the member.
*/
Id?: ResourceIdString;
/**
* The name of the member.
*/
Name?: NetworkMemberNameString;
/**
* An optional description for the member.
*/
Description?: DescriptionString;
/**
* Attributes relevant to a member for the blockchain framework that the Managed Blockchain network uses.
*/
FrameworkAttributes?: MemberFrameworkAttributes;
/**
* Configuration properties for logging events associated with a member.
*/
LogPublishingConfiguration?: MemberLogPublishingConfiguration;
/**
* The status of a member. CREATING - The AWS account is in the process of creating a member. AVAILABLE - The member has been created and can participate in the network. CREATE_FAILED - The AWS account attempted to create a member and creation failed. DELETING - The member and all associated resources are in the process of being deleted. Either the AWS account that owns the member deleted it, or the member is being deleted as the result of an APPROVED PROPOSAL to remove the member. DELETED - The member can no longer participate on the network and all associated resources are deleted. Either the AWS account that owns the member deleted it, or the member is being deleted as the result of an APPROVED PROPOSAL to remove the member.
*/
Status?: MemberStatus;
/**
* The date and time that the member was created.
*/
CreationDate?: Timestamp;
}
export interface MemberConfiguration {
/**
* The name of the member.
*/
Name: NetworkMemberNameString;
/**
* An optional description of the member.
*/
Description?: DescriptionString;
/**
* Configuration properties of the blockchain framework relevant to the member.
*/
FrameworkConfiguration: MemberFrameworkConfiguration;
/**
* Configuration properties for logging events associated with a member of a Managed Blockchain network.
*/
LogPublishingConfiguration?: MemberLogPublishingConfiguration;
}
export interface MemberFabricAttributes {
/**
* The user name for the initial administrator user for the member.
*/
AdminUsername?: UsernameString;
/**
* The endpoint used to access the member's certificate authority.
*/
CaEndpoint?: String;
}
export interface MemberFabricConfiguration {
/**
* The user name for the member's initial administrative user.
*/
AdminUsername: UsernameString;
/**
* The password for the member's initial administrative user. The AdminPassword must be at least eight characters long and no more than 32 characters. It must contain at least one uppercase letter, one lowercase letter, and one digit. It cannot have a single quote(‘), double quote(“), forward slash(/), backward slash(\), @, or a space.
*/
AdminPassword: PasswordString;
}
export interface MemberFabricLogPublishingConfiguration {
/**
* Configuration properties for logging events associated with a member's Certificate Authority (CA). CA logs help you determine when a member in your account joins the network, or when new peers register with a member CA.
*/
CaLogs?: LogConfigurations;
}
export interface MemberFrameworkAttributes {
/**
* Attributes of Hyperledger Fabric relevant to a member on a Managed Blockchain network that uses Hyperledger Fabric.
*/
Fabric?: MemberFabricAttributes;
}
export interface MemberFrameworkConfiguration {
/**
* Attributes of Hyperledger Fabric for a member on a Managed Blockchain network that uses Hyperledger Fabric.
*/
Fabric?: MemberFabricConfiguration;
}
export type MemberListMaxResults = number;
export interface MemberLogPublishingConfiguration {
/**
* Configuration properties for logging events associated with a member of a Managed Blockchain network using the Hyperledger Fabric framework.
*/
Fabric?: MemberFabricLogPublishingConfiguration;
}
export type MemberStatus = "CREATING"|"AVAILABLE"|"CREATE_FAILED"|"UPDATING"|"DELETING"|"DELETED"|string;
export interface MemberSummary {
/**
* The unique identifier of the member.
*/
Id?: ResourceIdString;
/**
* The name of the member.
*/
Name?: NetworkMemberNameString;
/**
* An optional description of the member.
*/
Description?: DescriptionString;
/**
* The status of the member. CREATING - The AWS account is in the process of creating a member. AVAILABLE - The member has been created and can participate in the network. CREATE_FAILED - The AWS account attempted to create a member and creation failed. DELETING - The member and all associated resources are in the process of being deleted. Either the AWS account that owns the member deleted it, or the member is being deleted as the result of an APPROVED PROPOSAL to remove the member. DELETED - The member can no longer participate on the network and all associated resources are deleted. Either the AWS account that owns the member deleted it, or the member is being deleted as the result of an APPROVED PROPOSAL to remove the member.
*/
Status?: MemberStatus;
/**
* The date and time that the member was created.
*/
CreationDate?: Timestamp;
/**
* An indicator of whether the member is owned by your AWS account or a different AWS account.
*/
IsOwned?: IsOwned;
}
export type MemberSummaryList = MemberSummary[];
export type NameString = string;
export interface Network {
/**
* The unique identifier of the network.
*/
Id?: ResourceIdString;
/**
* The name of the network.
*/
Name?: NameString;
/**
* Attributes of the blockchain framework for the network.
*/
Description?: DescriptionString;
/**
* The blockchain framework that the network uses.
*/
Framework?: Framework;
/**
* The version of the blockchain framework that the network uses.
*/
FrameworkVersion?: FrameworkVersionString;
/**
* Attributes of the blockchain framework that the network uses.
*/
FrameworkAttributes?: NetworkFrameworkAttributes;
/**
* The VPC endpoint service name of the VPC endpoint service of the network. Members use the VPC endpoint service name to create a VPC endpoint to access network resources.
*/
VpcEndpointServiceName?: String;
/**
* The voting rules for the network to decide if a proposal is accepted.
*/
VotingPolicy?: VotingPolicy;
/**
* The current status of the network.
*/
Status?: NetworkStatus;
/**
* The date and time that the network was created.
*/
CreationDate?: Timestamp;
}
export interface NetworkFabricAttributes {
/**
* The endpoint of the ordering service for the network.
*/
OrderingServiceEndpoint?: String;
/**
* The edition of Amazon Managed Blockchain that Hyperledger Fabric uses. For more information, see Amazon Managed Blockchain Pricing.
*/
Edition?: Edition;
}
export interface NetworkFabricConfiguration {
/**
* The edition of Amazon Managed Blockchain that the network uses. For more information, see Amazon Managed Blockchain Pricing.
*/
Edition: Edition;
}
export interface NetworkFrameworkAttributes {
/**
* Attributes of Hyperledger Fabric for a Managed Blockchain network that uses Hyperledger Fabric.
*/
Fabric?: NetworkFabricAttributes;
}
export interface NetworkFrameworkConfiguration {
/**
* Hyperledger Fabric configuration properties for a Managed Blockchain network that uses Hyperledger Fabric.
*/
Fabric?: NetworkFabricConfiguration;
}
export type NetworkListMaxResults = number;
export type NetworkMemberNameString = string;
export type NetworkStatus = "CREATING"|"AVAILABLE"|"CREATE_FAILED"|"DELETING"|"DELETED"|string;
export interface NetworkSummary {
/**
* The unique identifier of the network.
*/
Id?: ResourceIdString;
/**
* The name of the network.
*/
Name?: NameString;
/**
* An optional description of the network.
*/
Description?: DescriptionString;
/**
* The blockchain framework that the network uses.
*/
Framework?: Framework;
/**
* The version of the blockchain framework that the network uses.
*/
FrameworkVersion?: FrameworkVersionString;
/**
* The current status of the network.
*/
Status?: NetworkStatus;
/**
* The date and time that the network was created.
*/
CreationDate?: Timestamp;
}
export type NetworkSummaryList = NetworkSummary[];
export interface Node {
/**
* The unique identifier of the network that the node is in.
*/
NetworkId?: ResourceIdString;
/**
* The unique identifier of the member to which the node belongs.
*/
MemberId?: ResourceIdString;
/**
* The unique identifier of the node.
*/
Id?: ResourceIdString;
/**
* The instance type of the node.
*/
InstanceType?: InstanceTypeString;
/**
* The Availability Zone in which the node exists.
*/
AvailabilityZone?: AvailabilityZoneString;
/**
* Attributes of the blockchain framework being used.
*/
FrameworkAttributes?: NodeFrameworkAttributes;
/**
* Configuration properties for logging events associated with a peer node owned by a member in a Managed Blockchain network.
*/
LogPublishingConfiguration?: NodeLogPublishingConfiguration;
/**
* The state database that the node uses. Values are LevelDB or CouchDB.
*/
StateDB?: StateDBType;
/**
* The status of the node.
*/
Status?: NodeStatus;
/**
* The date and time that the node was created.
*/
CreationDate?: Timestamp;
}
export interface NodeConfiguration {
/**
* The Amazon Managed Blockchain instance type for the node.
*/
InstanceType: InstanceTypeString;
/**
* The Availability Zone in which the node exists.
*/
AvailabilityZone: AvailabilityZoneString;
/**
* Configuration properties for logging events associated with a peer node owned by a member in a Managed Blockchain network.
*/
LogPublishingConfiguration?: NodeLogPublishingConfiguration;
/**
* The state database that the node uses. Values are LevelDB or CouchDB. When using an Amazon Managed Blockchain network with Hyperledger Fabric version 1.4 or later, the default is CouchDB.
*/
StateDB?: StateDBType;
}
export interface NodeFabricAttributes {
/**
* The endpoint that identifies the peer node for all services except peer channel-based event services.
*/
PeerEndpoint?: String;
/**
* The endpoint that identifies the peer node for peer channel-based event services.
*/
PeerEventEndpoint?: String;
}
export interface NodeFabricLogPublishingConfiguration {
/**
* Configuration properties for logging events associated with chaincode execution on a peer node. Chaincode logs contain the results of instantiating, invoking, and querying the chaincode. A peer can run multiple instances of chaincode. When enabled, a log stream is created for all chaincodes, with an individual log stream for each chaincode.
*/
ChaincodeLogs?: LogConfigurations;
/**
* Configuration properties for a peer node log. Peer node logs contain messages generated when your client submits transaction proposals to peer nodes, requests to join channels, enrolls an admin peer, and lists the chaincode instances on a peer node.
*/
PeerLogs?: LogConfigurations;
}
export interface NodeFrameworkAttributes {
/**
* Attributes of Hyperledger Fabric for a peer node on a Managed Blockchain network that uses Hyperledger Fabric.
*/
Fabric?: NodeFabricAttributes;
}
export type NodeListMaxResults = number;
export interface NodeLogPublishingConfiguration {
/**
* Configuration properties for logging events associated with a node that is owned by a member of a Managed Blockchain network using the Hyperledger Fabric framework.
*/
Fabric?: NodeFabricLogPublishingConfiguration;
}
export type NodeStatus = "CREATING"|"AVAILABLE"|"CREATE_FAILED"|"UPDATING"|"DELETING"|"DELETED"|"FAILED"|string;
export interface NodeSummary {
/**
* The unique identifier of the node.
*/
Id?: ResourceIdString;
/**
* The status of the node.
*/
Status?: NodeStatus;
/**
* The date and time that the node was created.
*/
CreationDate?: Timestamp;
/**
* The Availability Zone in which the node exists.
*/
AvailabilityZone?: AvailabilityZoneString;
/**
* The EC2 instance type for the node.
*/
InstanceType?: InstanceTypeString;
}
export type NodeSummaryList = NodeSummary[];
export type PaginationToken = string;
export type PasswordString = string;
export type PrincipalString = string;
export interface Proposal {
/**
* The unique identifier of the proposal.
*/
ProposalId?: ResourceIdString;
/**
* The unique identifier of the network for which the proposal is made.
*/
NetworkId?: ResourceIdString;
/**
* The description of the proposal.
*/
Description?: DescriptionString;
/**
* The actions to perform on the network if the proposal is APPROVED.
*/
Actions?: ProposalActions;
/**
* The unique identifier of the member that created the proposal.
*/
ProposedByMemberId?: ResourceIdString;
/**
* The name of the member that created the proposal.
*/
ProposedByMemberName?: NetworkMemberNameString;
/**
* The status of the proposal. Values are as follows: IN_PROGRESS - The proposal is active and open for member voting. APPROVED - The proposal was approved with sufficient YES votes among members according to the VotingPolicy specified for the Network. The specified proposal actions are carried out. REJECTED - The proposal was rejected with insufficient YES votes among members according to the VotingPolicy specified for the Network. The specified ProposalActions are not carried out. EXPIRED - Members did not cast the number of votes required to determine the proposal outcome before the proposal expired. The specified ProposalActions are not carried out. ACTION_FAILED - One or more of the specified ProposalActions in a proposal that was approved could not be completed because of an error. The ACTION_FAILED status occurs even if only one ProposalAction fails and other actions are successful.
*/
Status?: ProposalStatus;
/**
* The date and time that the proposal was created.
*/
CreationDate?: Timestamp;
/**
* The date and time that the proposal expires. This is the CreationDate plus the ProposalDurationInHours that is specified in the ProposalThresholdPolicy. After this date and time, if members have not cast enough votes to determine the outcome according to the voting policy, the proposal is EXPIRED and Actions are not carried out.
*/
ExpirationDate?: Timestamp;
/**
* The current total of YES votes cast on the proposal by members.
*/
YesVoteCount?: VoteCount;
/**
* The current total of NO votes cast on the proposal by members.
*/
NoVoteCount?: VoteCount;
/**
* The number of votes remaining to be cast on the proposal by members. In other words, the number of members minus the sum of YES votes and NO votes.
*/
OutstandingVoteCount?: VoteCount;
}
export interface ProposalActions {
/**
* The actions to perform for an APPROVED proposal to invite an AWS account to create a member and join the network.
*/
Invitations?: InviteActionList;
/**
* The actions to perform for an APPROVED proposal to remove a member from the network, which deletes the member and all associated member resources from the network.
*/
Removals?: RemoveActionList;
}
export type ProposalDurationInt = number;
export type ProposalListMaxResults = number;
export type ProposalStatus = "IN_PROGRESS"|"APPROVED"|"REJECTED"|"EXPIRED"|"ACTION_FAILED"|string;
export interface ProposalSummary {
/**
* The unique identifier of the proposal.
*/
ProposalId?: ResourceIdString;
/**
* The description of the proposal.
*/
Description?: DescriptionString;
/**
* The unique identifier of the member that created the proposal.
*/
ProposedByMemberId?: ResourceIdString;
/**
* The name of the member that created the proposal.
*/
ProposedByMemberName?: NetworkMemberNameString;
/**
* The status of the proposal. Values are as follows: IN_PROGRESS - The proposal is active and open for member voting. APPROVED - The proposal was approved with sufficient YES votes among members according to the VotingPolicy specified for the Network. The specified proposal actions are carried out. REJECTED - The proposal was rejected with insufficient YES votes among members according to the VotingPolicy specified for the Network. The specified ProposalActions are not carried out. EXPIRED - Members did not cast the number of votes required to determine the proposal outcome before the proposal expired. The specified ProposalActions are not carried out. ACTION_FAILED - One or more of the specified ProposalActions in a proposal that was approved could not be completed because of an error.
*/
Status?: ProposalStatus;
/**
* The date and time that the proposal was created.
*/
CreationDate?: Timestamp;
/**
* The date and time that the proposal expires. This is the CreationDate plus the ProposalDurationInHours that is specified in the ProposalThresholdPolicy. After this date and time, if members have not cast enough votes to determine the outcome according to the voting policy, the proposal is EXPIRED and Actions are not carried out.
*/
ExpirationDate?: Timestamp;
}
export type ProposalSummaryList = ProposalSummary[];
export type ProposalVoteList = VoteSummary[];
export interface RejectInvitationInput {
/**
* The unique identifier of the invitation to reject.
*/
InvitationId: ResourceIdString;
}
export interface RejectInvitationOutput {
}
export interface RemoveAction {
/**
* The unique identifier of the member to remove.
*/
MemberId: ResourceIdString;
}
export type RemoveActionList = RemoveAction[];
export type ResourceIdString = string;
export type StateDBType = "LevelDB"|"CouchDB"|string;
export type String = string;
export type ThresholdComparator = "GREATER_THAN"|"GREATER_THAN_OR_EQUAL_TO"|string;
export type ThresholdPercentageInt = number;
export type Timestamp = Date;
export interface UpdateMemberInput {
/**
* The unique ID of the Managed Blockchain network to which the member belongs.
*/
NetworkId: ResourceIdString;
/**
* The unique ID of the member.
*/
MemberId: ResourceIdString;
/**
* Configuration properties for publishing to Amazon CloudWatch Logs.
*/
LogPublishingConfiguration?: MemberLogPublishingConfiguration;
}
export interface UpdateMemberOutput {
}
export interface UpdateNodeInput {
/**
* The unique ID of the Managed Blockchain network to which the node belongs.
*/
NetworkId: ResourceIdString;
/**
* The unique ID of the member that owns the node.
*/
MemberId: ResourceIdString;
/**
* The unique ID of the node.
*/
NodeId: ResourceIdString;
/**
* Configuration properties for publishing to Amazon CloudWatch Logs.
*/
LogPublishingConfiguration?: NodeLogPublishingConfiguration;
}
export interface UpdateNodeOutput {
}
export type UsernameString = string;
export type VoteCount = number;
export interface VoteOnProposalInput {
/**
* The unique identifier of the network.
*/
NetworkId: ResourceIdString;
/**
* The unique identifier of the proposal.
*/
ProposalId: ResourceIdString;
/**
* The unique identifier of the member casting the vote.
*/
VoterMemberId: ResourceIdString;
/**
* The value of the vote.
*/
Vote: VoteValue;
}
export interface VoteOnProposalOutput {
}
export interface VoteSummary {
/**
* The vote value, either YES or NO.
*/
Vote?: VoteValue;
/**
* The name of the member that cast the vote.
*/
MemberName?: NetworkMemberNameString;
/**
* The unique identifier of the member that cast the vote.
*/
MemberId?: ResourceIdString;
}
export type VoteValue = "YES"|"NO"|string;
export interface VotingPolicy {
/**
* Defines the rules for the network for voting on proposals, such as the percentage of YES votes required for the proposal to be approved and the duration of the proposal. The policy applies to all proposals and is specified when the network is created.
*/
ApprovalThresholdPolicy?: ApprovalThresholdPolicy;
}
/**
* A string in YYYY-MM-DD format that represents the latest possible API version that can be used in this service. Specify 'latest' to use the latest possible version.
*/
export type apiVersion = "2018-09-24"|"latest"|string;
export interface ClientApiVersions {
/**
* A string in YYYY-MM-DD format that represents the latest possible API version that can be used in this service. Specify 'latest' to use the latest possible version.
*/
apiVersion?: apiVersion;
}
export type ClientConfiguration = ServiceConfigurationOptions & ClientApiVersions;
/**
* Contains interfaces for use with the ManagedBlockchain client.
*/
export import Types = ManagedBlockchain;
}
export = ManagedBlockchain; | the_stack |
import * as OpaqueTypes from '@da/ui-core/lib/api/OpaqueTypes'
/* tslint:disable */
/* eslint-disable */
// @generated
// This file was automatically generated and should not be edited.
// ====================================================
// GraphQL query operation: ContractDetailsById
// ====================================================
export interface ContractDetailsById_node_CreateCommand {
__typename: "CreateCommand" | "CreatedEvent" | "DamlLfDefDataType" | "ExerciseCommand" | "ExercisedEvent" | "Template" | "Transaction";
}
export interface ContractDetailsById_node_Contract_archiveEvent {
__typename: "ExercisedEvent";
id: string;
}
export interface ContractDetailsById_node_Contract_template_choices {
__typename: "Choice";
name: string;
parameter: OpaqueTypes.DamlLfType;
}
export interface ContractDetailsById_node_Contract_template {
__typename: "Template";
id: string;
topLevelDecl: string;
choices: ContractDetailsById_node_Contract_template_choices[];
}
export interface ContractDetailsById_node_Contract {
__typename: "Contract";
id: string;
argument: OpaqueTypes.DamlLfValueRecord;
archiveEvent: ContractDetailsById_node_Contract_archiveEvent | null;
agreementText: string | null;
signatories: string[];
observers: string[];
key: OpaqueTypes.DamlLfValue | null;
template: ContractDetailsById_node_Contract_template;
}
export type ContractDetailsById_node = ContractDetailsById_node_CreateCommand | ContractDetailsById_node_Contract;
export interface ContractDetailsById {
node: ContractDetailsById_node | null;
}
export interface ContractDetailsByIdVariables {
id: string;
}
/* tslint:disable */
/* eslint-disable */
// @generated
// This file was automatically generated and should not be edited.
// ====================================================
// GraphQL mutation operation: ContractExercise
// ====================================================
export interface ContractExercise {
exercise: OpaqueTypes.CommandId;
}
export interface ContractExerciseVariables {
contractId: string;
choiceId: string;
argument?: OpaqueTypes.DamlLfValue | null;
}
/* tslint:disable */
/* eslint-disable */
// @generated
// This file was automatically generated and should not be edited.
// ====================================================
// GraphQL query operation: ContractsQuery
// ====================================================
export interface ContractsQuery_contracts_edges_node_createEvent_transaction {
__typename: "Transaction";
effectiveAt: OpaqueTypes.Time;
}
export interface ContractsQuery_contracts_edges_node_createEvent {
__typename: "CreatedEvent";
id: string;
transaction: ContractsQuery_contracts_edges_node_createEvent_transaction;
}
export interface ContractsQuery_contracts_edges_node_archiveEvent {
__typename: "ExercisedEvent";
id: string;
}
export interface ContractsQuery_contracts_edges_node_template_choices {
__typename: "Choice";
name: string;
}
export interface ContractsQuery_contracts_edges_node_template {
__typename: "Template";
id: string;
choices: ContractsQuery_contracts_edges_node_template_choices[];
}
export interface ContractsQuery_contracts_edges_node {
__typename: "Contract";
id: string;
createEvent: ContractsQuery_contracts_edges_node_createEvent;
archiveEvent: ContractsQuery_contracts_edges_node_archiveEvent | null;
argument: OpaqueTypes.DamlLfValueRecord;
template: ContractsQuery_contracts_edges_node_template;
}
export interface ContractsQuery_contracts_edges {
__typename: "ContractEdge";
node: ContractsQuery_contracts_edges_node;
}
export interface ContractsQuery_contracts {
__typename: "ContractPagination";
totalCount: number;
edges: ContractsQuery_contracts_edges[];
}
export interface ContractsQuery {
contracts: ContractsQuery_contracts;
}
export interface ContractsQueryVariables {
filter?: FilterCriterion[] | null;
search: string;
includeArchived: boolean;
count: number;
sort?: SortCriterion[] | null;
}
/* tslint:disable */
/* eslint-disable */
// @generated
// This file was automatically generated and should not be edited.
// ====================================================
// GraphQL query operation: TemplateInstance
// ====================================================
export interface TemplateInstance_node_Contract {
__typename: "Contract" | "CreateCommand" | "CreatedEvent" | "DamlLfDefDataType" | "ExerciseCommand" | "ExercisedEvent" | "Transaction";
}
export interface TemplateInstance_node_Template {
__typename: "Template";
id: string;
parameter: OpaqueTypes.DamlLfType;
topLevelDecl: string;
}
export type TemplateInstance_node = TemplateInstance_node_Contract | TemplateInstance_node_Template;
export interface TemplateInstance {
node: TemplateInstance_node | null;
}
export interface TemplateInstanceVariables {
templateId: string;
}
/* tslint:disable */
/* eslint-disable */
// @generated
// This file was automatically generated and should not be edited.
// ====================================================
// GraphQL mutation operation: CreateContract
// ====================================================
export interface CreateContract {
create: OpaqueTypes.CommandId;
}
export interface CreateContractVariables {
templateId: string;
argument?: OpaqueTypes.DamlLfValue | null;
}
/* tslint:disable */
/* eslint-disable */
// @generated
// This file was automatically generated and should not be edited.
// ====================================================
// GraphQL query operation: ContractsByTemplateParamQuery
// ====================================================
export interface ContractsByTemplateParamQuery_node_Contract {
__typename: "Contract" | "CreateCommand" | "CreatedEvent" | "DamlLfDefDataType" | "ExerciseCommand" | "ExercisedEvent" | "Transaction";
}
export interface ContractsByTemplateParamQuery_node_Template_parameterDef {
__typename: "DamlLfDefDataType";
dataType: OpaqueTypes.DamlLfDataType;
}
export interface ContractsByTemplateParamQuery_node_Template {
__typename: "Template";
id: string;
parameterDef: ContractsByTemplateParamQuery_node_Template_parameterDef;
}
export type ContractsByTemplateParamQuery_node = ContractsByTemplateParamQuery_node_Contract | ContractsByTemplateParamQuery_node_Template;
export interface ContractsByTemplateParamQuery {
node: ContractsByTemplateParamQuery_node | null;
}
export interface ContractsByTemplateParamQueryVariables {
templateId: string;
}
/* tslint:disable */
/* eslint-disable */
// @generated
// This file was automatically generated and should not be edited.
// ====================================================
// GraphQL query operation: ContractsByTemplateQuery
// ====================================================
export interface ContractsByTemplateQuery_node_Contract {
__typename: "Contract" | "CreateCommand" | "CreatedEvent" | "DamlLfDefDataType" | "ExerciseCommand" | "ExercisedEvent" | "Transaction";
}
export interface ContractsByTemplateQuery_node_Template_choices {
__typename: "Choice";
name: string;
}
export interface ContractsByTemplateQuery_node_Template_contracts_edges_node_createEvent_transaction {
__typename: "Transaction";
effectiveAt: OpaqueTypes.Time;
}
export interface ContractsByTemplateQuery_node_Template_contracts_edges_node_createEvent {
__typename: "CreatedEvent";
id: string;
transaction: ContractsByTemplateQuery_node_Template_contracts_edges_node_createEvent_transaction;
}
export interface ContractsByTemplateQuery_node_Template_contracts_edges_node_archiveEvent {
__typename: "ExercisedEvent";
id: string;
}
export interface ContractsByTemplateQuery_node_Template_contracts_edges_node_template_choices {
__typename: "Choice";
name: string;
}
export interface ContractsByTemplateQuery_node_Template_contracts_edges_node_template {
__typename: "Template";
id: string;
choices: ContractsByTemplateQuery_node_Template_contracts_edges_node_template_choices[];
}
export interface ContractsByTemplateQuery_node_Template_contracts_edges_node {
__typename: "Contract";
id: string;
createEvent: ContractsByTemplateQuery_node_Template_contracts_edges_node_createEvent;
archiveEvent: ContractsByTemplateQuery_node_Template_contracts_edges_node_archiveEvent | null;
argument: OpaqueTypes.DamlLfValueRecord;
template: ContractsByTemplateQuery_node_Template_contracts_edges_node_template;
}
export interface ContractsByTemplateQuery_node_Template_contracts_edges {
__typename: "ContractEdge";
node: ContractsByTemplateQuery_node_Template_contracts_edges_node;
}
export interface ContractsByTemplateQuery_node_Template_contracts {
__typename: "ContractPagination";
totalCount: number;
edges: ContractsByTemplateQuery_node_Template_contracts_edges[];
}
export interface ContractsByTemplateQuery_node_Template {
__typename: "Template";
id: string;
choices: ContractsByTemplateQuery_node_Template_choices[];
contracts: ContractsByTemplateQuery_node_Template_contracts;
}
export type ContractsByTemplateQuery_node = ContractsByTemplateQuery_node_Contract | ContractsByTemplateQuery_node_Template;
export interface ContractsByTemplateQuery {
node: ContractsByTemplateQuery_node | null;
}
export interface ContractsByTemplateQueryVariables {
templateId: string;
filter?: FilterCriterion[] | null;
search: string;
count: number;
sort?: SortCriterion[] | null;
includeArchived: boolean;
}
/* tslint:disable */
/* eslint-disable */
// @generated
// This file was automatically generated and should not be edited.
// ====================================================
// GraphQL query operation: TemplatesQuery
// ====================================================
export interface TemplatesQuery_templates_edges_node_contracts {
__typename: "ContractPagination";
totalCount: number;
}
export interface TemplatesQuery_templates_edges_node {
__typename: "Template";
id: string;
topLevelDecl: string;
contracts: TemplatesQuery_templates_edges_node_contracts;
}
export interface TemplatesQuery_templates_edges {
__typename: "TemplateEdge";
node: TemplatesQuery_templates_edges_node;
}
export interface TemplatesQuery_templates {
__typename: "TemplatePagination";
totalCount: number;
edges: TemplatesQuery_templates_edges[];
}
export interface TemplatesQuery {
templates: TemplatesQuery_templates;
}
export interface TemplatesQueryVariables {
filter?: FilterCriterion[] | null;
search: string;
count: number;
sort?: SortCriterion[] | null;
}
/* tslint:disable */
/* eslint-disable */
// @generated
// This file was automatically generated and should not be edited.
//==============================================================
// START Enums and Input Objects
//==============================================================
export enum Direction {
ASCENDING = "ASCENDING",
DESCENDING = "DESCENDING",
}
export interface FilterCriterion {
field: string;
value: string;
}
export interface SortCriterion {
field: string;
direction: Direction;
}
//==============================================================
// END Enums and Input Objects
//============================================================== | the_stack |
import * as pulumi from "@pulumi/pulumi";
import { input as inputs, output as outputs } from "../types";
import * as utilities from "../utilities";
/**
* Budget configuration for a billing account.
*
* To get more information about Budget, see:
*
* * [API documentation](https://cloud.google.com/billing/docs/reference/budget/rest/v1/billingAccounts.budgets)
* * How-to Guides
* * [Creating a budget](https://cloud.google.com/billing/docs/how-to/budgets)
*
* > **Warning:** If you are using User ADCs (Application Default Credentials) with this resource,
* you must specify a `billingProject` and set `userProjectOverride` to true
* in the provider configuration. Otherwise the Billing Budgets API will return a 403 error.
* Your account must have the `serviceusage.services.use` permission on the
* `billingProject` you defined.
*
* ## Example Usage
* ### Billing Budget Basic
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as gcp from "@pulumi/gcp";
*
* const account = gcp.organizations.getBillingAccount({
* billingAccount: "000000-0000000-0000000-000000",
* });
* const budget = new gcp.billing.Budget("budget", {
* billingAccount: account.then(account => account.id),
* displayName: "Example Billing Budget",
* amount: {
* specifiedAmount: {
* currencyCode: "USD",
* units: "100000",
* },
* },
* thresholdRules: [{
* thresholdPercent: 0.5,
* }],
* });
* ```
* ### Billing Budget Lastperiod
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as gcp from "@pulumi/gcp";
*
* const account = gcp.organizations.getBillingAccount({
* billingAccount: "000000-0000000-0000000-000000",
* });
* const project = gcp.organizations.getProject({});
* const budget = new gcp.billing.Budget("budget", {
* billingAccount: account.then(account => account.id),
* displayName: "Example Billing Budget",
* budgetFilter: {
* projects: [project.then(project => `projects/${project.number}`)],
* },
* amount: {
* lastPeriodAmount: true,
* },
* thresholdRules: [{
* thresholdPercent: 10,
* }],
* });
* ```
* ### Billing Budget Filter
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as gcp from "@pulumi/gcp";
*
* const account = gcp.organizations.getBillingAccount({
* billingAccount: "000000-0000000-0000000-000000",
* });
* const project = gcp.organizations.getProject({});
* const budget = new gcp.billing.Budget("budget", {
* billingAccount: account.then(account => account.id),
* displayName: "Example Billing Budget",
* budgetFilter: {
* projects: [project.then(project => `projects/${project.number}`)],
* creditTypesTreatment: "EXCLUDE_ALL_CREDITS",
* services: ["services/24E6-581D-38E5"],
* },
* amount: {
* specifiedAmount: {
* currencyCode: "USD",
* units: "100000",
* },
* },
* thresholdRules: [
* {
* thresholdPercent: 0.5,
* },
* {
* thresholdPercent: 0.9,
* spendBasis: "FORECASTED_SPEND",
* },
* ],
* });
* ```
* ### Billing Budget Notify
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as gcp from "@pulumi/gcp";
*
* const account = gcp.organizations.getBillingAccount({
* billingAccount: "000000-0000000-0000000-000000",
* });
* const project = gcp.organizations.getProject({});
* const notificationChannel = new gcp.monitoring.NotificationChannel("notificationChannel", {
* displayName: "Example Notification Channel",
* type: "email",
* labels: {
* email_address: "address@example.com",
* },
* });
* const budget = new gcp.billing.Budget("budget", {
* billingAccount: account.then(account => account.id),
* displayName: "Example Billing Budget",
* budgetFilter: {
* projects: [project.then(project => `projects/${project.number}`)],
* },
* amount: {
* specifiedAmount: {
* currencyCode: "USD",
* units: "100000",
* },
* },
* thresholdRules: [
* {
* thresholdPercent: 1,
* },
* {
* thresholdPercent: 1,
* spendBasis: "FORECASTED_SPEND",
* },
* ],
* allUpdatesRule: {
* monitoringNotificationChannels: [notificationChannel.id],
* disableDefaultIamRecipients: true,
* },
* });
* ```
*
* ## Import
*
* Budget can be imported using any of these accepted formats
*
* ```sh
* $ pulumi import gcp:billing/budget:Budget default billingAccounts/{{billing_account}}/budgets/{{name}}
* ```
*
* ```sh
* $ pulumi import gcp:billing/budget:Budget default {{billing_account}}/{{name}}
* ```
*
* ```sh
* $ pulumi import gcp:billing/budget:Budget default {{name}}
* ```
*/
export class Budget extends pulumi.CustomResource {
/**
* Get an existing Budget resource's state with the given name, ID, and optional extra
* properties used to qualify the lookup.
*
* @param name The _unique_ name of the resulting resource.
* @param id The _unique_ provider ID of the resource to lookup.
* @param state Any extra arguments used during the lookup.
* @param opts Optional settings to control the behavior of the CustomResource.
*/
public static get(name: string, id: pulumi.Input<pulumi.ID>, state?: BudgetState, opts?: pulumi.CustomResourceOptions): Budget {
return new Budget(name, <any>state, { ...opts, id: id });
}
/** @internal */
public static readonly __pulumiType = 'gcp:billing/budget:Budget';
/**
* Returns true if the given object is an instance of Budget. This is designed to work even
* when multiple copies of the Pulumi SDK have been loaded into the same process.
*/
public static isInstance(obj: any): obj is Budget {
if (obj === undefined || obj === null) {
return false;
}
return obj['__pulumiType'] === Budget.__pulumiType;
}
/**
* Defines notifications that are sent on every update to the
* billing account's spend, regardless of the thresholds defined
* using threshold rules.
* Structure is documented below.
*/
public readonly allUpdatesRule!: pulumi.Output<outputs.billing.BudgetAllUpdatesRule | undefined>;
/**
* The budgeted amount for each usage period.
* Structure is documented below.
*/
public readonly amount!: pulumi.Output<outputs.billing.BudgetAmount>;
/**
* ID of the billing account to set a budget on.
*/
public readonly billingAccount!: pulumi.Output<string>;
/**
* Filters that define which resources are used to compute the actual
* spend against the budget.
* Structure is documented below.
*/
public readonly budgetFilter!: pulumi.Output<outputs.billing.BudgetBudgetFilter>;
/**
* User data for display name in UI. Must be <= 60 chars.
*/
public readonly displayName!: pulumi.Output<string | undefined>;
/**
* Resource name of the budget. The resource name implies the scope of a budget. Values are of the form
* billingAccounts/{billingAccountId}/budgets/{budgetId}.
*/
public /*out*/ readonly name!: pulumi.Output<string>;
/**
* Rules that trigger alerts (notifications of thresholds being
* crossed) when spend exceeds the specified percentages of the
* budget.
* Structure is documented below.
*/
public readonly thresholdRules!: pulumi.Output<outputs.billing.BudgetThresholdRule[]>;
/**
* Create a Budget resource with the given unique name, arguments, and options.
*
* @param name The _unique_ name of the resource.
* @param args The arguments to use to populate this resource's properties.
* @param opts A bag of options that control this resource's behavior.
*/
constructor(name: string, args: BudgetArgs, opts?: pulumi.CustomResourceOptions)
constructor(name: string, argsOrState?: BudgetArgs | BudgetState, opts?: pulumi.CustomResourceOptions) {
let inputs: pulumi.Inputs = {};
opts = opts || {};
if (opts.id) {
const state = argsOrState as BudgetState | undefined;
inputs["allUpdatesRule"] = state ? state.allUpdatesRule : undefined;
inputs["amount"] = state ? state.amount : undefined;
inputs["billingAccount"] = state ? state.billingAccount : undefined;
inputs["budgetFilter"] = state ? state.budgetFilter : undefined;
inputs["displayName"] = state ? state.displayName : undefined;
inputs["name"] = state ? state.name : undefined;
inputs["thresholdRules"] = state ? state.thresholdRules : undefined;
} else {
const args = argsOrState as BudgetArgs | undefined;
if ((!args || args.amount === undefined) && !opts.urn) {
throw new Error("Missing required property 'amount'");
}
if ((!args || args.billingAccount === undefined) && !opts.urn) {
throw new Error("Missing required property 'billingAccount'");
}
if ((!args || args.thresholdRules === undefined) && !opts.urn) {
throw new Error("Missing required property 'thresholdRules'");
}
inputs["allUpdatesRule"] = args ? args.allUpdatesRule : undefined;
inputs["amount"] = args ? args.amount : undefined;
inputs["billingAccount"] = args ? args.billingAccount : undefined;
inputs["budgetFilter"] = args ? args.budgetFilter : undefined;
inputs["displayName"] = args ? args.displayName : undefined;
inputs["thresholdRules"] = args ? args.thresholdRules : undefined;
inputs["name"] = undefined /*out*/;
}
if (!opts.version) {
opts = pulumi.mergeOptions(opts, { version: utilities.getVersion()});
}
super(Budget.__pulumiType, name, inputs, opts);
}
}
/**
* Input properties used for looking up and filtering Budget resources.
*/
export interface BudgetState {
/**
* Defines notifications that are sent on every update to the
* billing account's spend, regardless of the thresholds defined
* using threshold rules.
* Structure is documented below.
*/
allUpdatesRule?: pulumi.Input<inputs.billing.BudgetAllUpdatesRule>;
/**
* The budgeted amount for each usage period.
* Structure is documented below.
*/
amount?: pulumi.Input<inputs.billing.BudgetAmount>;
/**
* ID of the billing account to set a budget on.
*/
billingAccount?: pulumi.Input<string>;
/**
* Filters that define which resources are used to compute the actual
* spend against the budget.
* Structure is documented below.
*/
budgetFilter?: pulumi.Input<inputs.billing.BudgetBudgetFilter>;
/**
* User data for display name in UI. Must be <= 60 chars.
*/
displayName?: pulumi.Input<string>;
/**
* Resource name of the budget. The resource name implies the scope of a budget. Values are of the form
* billingAccounts/{billingAccountId}/budgets/{budgetId}.
*/
name?: pulumi.Input<string>;
/**
* Rules that trigger alerts (notifications of thresholds being
* crossed) when spend exceeds the specified percentages of the
* budget.
* Structure is documented below.
*/
thresholdRules?: pulumi.Input<pulumi.Input<inputs.billing.BudgetThresholdRule>[]>;
}
/**
* The set of arguments for constructing a Budget resource.
*/
export interface BudgetArgs {
/**
* Defines notifications that are sent on every update to the
* billing account's spend, regardless of the thresholds defined
* using threshold rules.
* Structure is documented below.
*/
allUpdatesRule?: pulumi.Input<inputs.billing.BudgetAllUpdatesRule>;
/**
* The budgeted amount for each usage period.
* Structure is documented below.
*/
amount: pulumi.Input<inputs.billing.BudgetAmount>;
/**
* ID of the billing account to set a budget on.
*/
billingAccount: pulumi.Input<string>;
/**
* Filters that define which resources are used to compute the actual
* spend against the budget.
* Structure is documented below.
*/
budgetFilter?: pulumi.Input<inputs.billing.BudgetBudgetFilter>;
/**
* User data for display name in UI. Must be <= 60 chars.
*/
displayName?: pulumi.Input<string>;
/**
* Rules that trigger alerts (notifications of thresholds being
* crossed) when spend exceeds the specified percentages of the
* budget.
* Structure is documented below.
*/
thresholdRules: pulumi.Input<pulumi.Input<inputs.billing.BudgetThresholdRule>[]>;
} | the_stack |
import * as React from 'react';
import * as _ from 'lodash-es';
import { useTranslation } from 'react-i18next';
import DashboardCard from '@console/shared/src/components/dashboard/dashboard-card/DashboardCard';
import DashboardCardHeader from '@console/shared/src/components/dashboard/dashboard-card/DashboardCardHeader';
import DashboardCardTitle from '@console/shared/src/components/dashboard/dashboard-card/DashboardCardTitle';
import UtilizationItem, {
TopConsumerPopoverProp,
MultilineUtilizationItem,
QueryWithDescription,
LimitRequested,
trimSecondsXMutator,
} from '@console/shared/src/components/dashboard/utilization-card/UtilizationItem';
import UtilizationBody from '@console/shared/src/components/dashboard/utilization-card/UtilizationBody';
import ConsumerPopover from '@console/shared/src/components/dashboard/utilization-card/TopConsumerPopover';
import { ByteDataTypes } from '@console/shared/src/graph-helper/data-utils';
import {
Flex,
FlexItem,
PopoverPosition,
Select,
SelectOption,
SelectVariant,
} from '@patternfly/react-core';
import { DashboardItemProps, withDashboardResources } from '../../with-dashboard-resources';
import {
humanizeBinaryBytes,
humanizeCpuCores,
humanizeNumber,
humanizeDecimalBytesPerSec,
} from '../../../utils/units';
import { getRangeVectorStats, getInstantVectorStats } from '../../../graphs/utils';
import {
getMultilineQueries,
getTop25ConsumerQueries,
getUtilizationQueries,
OverviewQuery,
} from '@console/shared/src/promql/cluster-dashboard';
import { MachineConfigPoolModel, NodeModel, PodModel, ProjectModel } from '../../../../models';
import { getPrometheusQueryResponse } from '../../../../actions/dashboards';
import { Humanize } from '../../../utils/types';
import { DataPoint, PrometheusResponse } from '../../../graphs';
import { useK8sWatchResource } from '@console/internal/components/utils/k8s-watch-hook';
import { MachineConfigPoolKind, referenceForModel } from '@console/internal/module/k8s';
import { UtilizationDurationDropdown } from '@console/shared/src/components/dashboard/utilization-card/UtilizationDurationDropdown';
import { useUtilizationDuration } from '@console/shared/src/hooks/useUtilizationDuration';
export const PrometheusUtilizationItem = withDashboardResources<PrometheusUtilizationItemProps>(
({
watchPrometheus,
stopWatchPrometheusQuery,
prometheusResults,
utilizationQuery,
totalQuery,
title,
TopConsumerPopover,
humanizeValue,
byteDataType,
namespace,
isDisabled = false,
limitQuery,
requestQuery,
setLimitReqState,
}) => {
let utilization: PrometheusResponse, utilizationError: any;
let total: PrometheusResponse, totalError: any;
let max: DataPoint<number>[];
let limit: PrometheusResponse, limitError: any;
let request: PrometheusResponse, requestError: any;
let isLoading = false;
const { duration } = useUtilizationDuration();
React.useEffect(() => {
if (!isDisabled) {
watchPrometheus(utilizationQuery, namespace, duration);
totalQuery && watchPrometheus(totalQuery, namespace);
limitQuery && watchPrometheus(limitQuery, namespace, duration);
requestQuery && watchPrometheus(requestQuery, namespace, duration);
return () => {
stopWatchPrometheusQuery(utilizationQuery, duration);
totalQuery && stopWatchPrometheusQuery(totalQuery);
limitQuery && stopWatchPrometheusQuery(limitQuery, duration);
requestQuery && stopWatchPrometheusQuery(requestQuery, duration);
};
}
}, [
watchPrometheus,
stopWatchPrometheusQuery,
duration,
utilizationQuery,
totalQuery,
namespace,
isDisabled,
limitQuery,
requestQuery,
]);
if (!isDisabled) {
[utilization, utilizationError] = getPrometheusQueryResponse(
prometheusResults,
utilizationQuery,
duration,
);
[total, totalError] = getPrometheusQueryResponse(prometheusResults, totalQuery);
[limit, limitError] = getPrometheusQueryResponse(prometheusResults, limitQuery, duration);
[request, requestError] = getPrometheusQueryResponse(
prometheusResults,
requestQuery,
duration,
);
max = getInstantVectorStats(total);
isLoading = !utilization || (totalQuery && !total) || (limitQuery && !limit);
}
return (
<UtilizationItem
title={title}
utilization={utilization}
limit={limit}
requested={request}
error={utilizationError || totalError || limitError || requestError}
isLoading={isLoading}
humanizeValue={humanizeValue}
byteDataType={byteDataType}
query={[utilizationQuery, limitQuery, requestQuery]}
max={max && max.length ? max[0].y : null}
TopConsumerPopover={TopConsumerPopover}
setLimitReqState={setLimitReqState}
/>
);
},
);
export const PrometheusMultilineUtilizationItem = withDashboardResources<
PrometheusMultilineUtilizationItemProps
>(
({
watchPrometheus,
stopWatchPrometheusQuery,
prometheusResults,
queries,
title,
TopConsumerPopovers,
humanizeValue,
byteDataType,
namespace,
isDisabled = false,
}) => {
const { duration } = useUtilizationDuration();
React.useEffect(() => {
if (!isDisabled) {
queries.forEach((q) => watchPrometheus(q.query, namespace, duration));
return () => {
queries.forEach((q) => stopWatchPrometheusQuery(q.query, duration));
};
}
}, [watchPrometheus, stopWatchPrometheusQuery, duration, queries, namespace, isDisabled]);
const stats = [];
let hasError = false;
let isLoading = false;
if (!isDisabled) {
queries.forEach((query) => {
const [response, responseError] = getPrometheusQueryResponse(
prometheusResults,
query.query,
duration,
);
if (responseError) {
hasError = true;
return false;
}
if (!response) {
isLoading = true;
return false;
}
stats.push(getRangeVectorStats(response, query.desc, null, trimSecondsXMutator)?.[0] || []);
});
}
return (
<MultilineUtilizationItem
title={title}
data={stats}
error={hasError}
isLoading={isLoading}
humanizeValue={humanizeValue}
byteDataType={byteDataType}
queries={queries}
TopConsumerPopovers={TopConsumerPopovers}
/>
);
},
);
const UtilizationCardNodeFilter: React.FC<UtilizationCardNodeFilterProps> = ({
machineConfigPools,
onNodeSelect,
selectedNodes,
}) => {
const { t } = useTranslation();
const [isOpen, setIsOpen] = React.useState<boolean>(false);
const sortedMCPs = machineConfigPools.sort((a, b) => {
const order = ['worker', 'master'];
const indexA = order.indexOf(a.metadata.name);
const indexB = order.indexOf(b.metadata.name);
if (indexA === -1 && indexB === -1) {
return a.metadata.name.localeCompare(b.metadata.name);
}
if (indexA === -1) {
return 1;
}
if (indexB === -1) {
return -1;
}
return indexA - indexB;
});
const onToggle = (open: boolean): void => setIsOpen(open);
return (
<Select
variant={SelectVariant.checkbox}
aria-label={t('public~Filter by Node type')}
onToggle={onToggle}
onSelect={onNodeSelect}
selections={selectedNodes}
isOpen={isOpen}
placeholderText={t('public~Filter by Node type')}
>
{sortedMCPs.map((mcp) => (
<SelectOption key={mcp.metadata.name} value={mcp.metadata.name} />
))}
</Select>
);
};
export const UtilizationCard = () => {
const { t } = useTranslation();
const [machineConfigPools, machineConfigPoolsLoaded] = useK8sWatchResource<
MachineConfigPoolKind[]
>({
isList: true,
kind: referenceForModel(MachineConfigPoolModel),
});
// TODO: add `useUserSettings` to get default selected
const [selectedNodes, setSelectedNodes] = React.useState<string[]>([]);
// TODO: add `useUserSettingsCompatibility` to store selectedNodes
const onNodeSelect = (event: React.MouseEvent, selection: string) => {
if (selectedNodes.includes(selection)) {
setSelectedNodes(selectedNodes.filter((item) => item !== selection));
} else {
setSelectedNodes([...selectedNodes, selection]);
}
};
// if no filter is applied, show all nodes using regex
const nodeType = _.isEmpty(selectedNodes) ? '.+' : selectedNodes.join('|');
const consumerQueries = React.useMemo(() => getTop25ConsumerQueries(nodeType), [nodeType]);
const utilizationQueries = React.useMemo(() => getUtilizationQueries(nodeType), [nodeType]);
const multilineQueries = React.useMemo(() => getMultilineQueries(nodeType), [nodeType]);
const cpuPopover = React.useCallback(
React.memo<TopConsumerPopoverProp>(({ current }) => (
<ConsumerPopover
title={t('public~CPU')}
current={current}
consumers={[
{
query: consumerQueries[OverviewQuery.PROJECTS_BY_CPU],
model: ProjectModel,
metric: 'namespace',
},
{
query: consumerQueries[OverviewQuery.PODS_BY_CPU],
model: PodModel,
metric: 'pod',
},
{
query: consumerQueries[OverviewQuery.NODES_BY_CPU],
model: NodeModel,
metric: 'instance',
},
]}
humanize={humanizeCpuCores}
position={PopoverPosition.top}
/>
)),
[],
);
const memPopover = React.useCallback(
React.memo<TopConsumerPopoverProp>(({ current }) => (
<ConsumerPopover
title={t('public~Memory')}
current={current}
consumers={[
{
query: consumerQueries[OverviewQuery.PROJECTS_BY_MEMORY],
model: ProjectModel,
metric: 'namespace',
},
{
query: consumerQueries[OverviewQuery.PODS_BY_MEMORY],
model: PodModel,
metric: 'pod',
},
{
query: consumerQueries[OverviewQuery.NODES_BY_MEMORY],
model: NodeModel,
metric: 'instance',
},
]}
humanize={humanizeBinaryBytes}
position={PopoverPosition.top}
/>
)),
[],
);
const storagePopover = React.useCallback(
React.memo<TopConsumerPopoverProp>(({ current }) => (
<ConsumerPopover
title={t('public~Filesystem')}
current={current}
consumers={[
{
query: consumerQueries[OverviewQuery.PROJECTS_BY_STORAGE],
model: ProjectModel,
metric: 'namespace',
},
{
query: consumerQueries[OverviewQuery.PODS_BY_STORAGE],
model: PodModel,
metric: 'pod',
},
{
query: consumerQueries[OverviewQuery.NODES_BY_STORAGE],
model: NodeModel,
metric: 'instance',
},
]}
humanize={humanizeBinaryBytes}
position={PopoverPosition.top}
/>
)),
[],
);
const podPopover = React.useCallback(
React.memo<TopConsumerPopoverProp>(({ current }) => (
<ConsumerPopover
title={t('public~Pod count')}
current={current}
consumers={[
{
query: consumerQueries[OverviewQuery.PROJECTS_BY_PODS],
model: ProjectModel,
metric: 'namespace',
},
{
query: consumerQueries[OverviewQuery.NODES_BY_PODS],
model: NodeModel,
metric: 'node',
},
]}
humanize={humanizeNumber}
position={PopoverPosition.top}
/>
)),
[],
);
const networkInPopover = React.useCallback(
React.memo<TopConsumerPopoverProp>(({ current }) => (
<ConsumerPopover
title={t('public~Network in')}
current={current}
consumers={[
{
query: consumerQueries[OverviewQuery.PROJECTS_BY_NETWORK_IN],
model: ProjectModel,
metric: 'namespace',
},
{
query: consumerQueries[OverviewQuery.PODS_BY_NETWORK_IN],
model: PodModel,
metric: 'pod',
},
{
query: consumerQueries[OverviewQuery.NODES_BY_NETWORK_IN],
model: NodeModel,
metric: 'instance',
},
]}
humanize={humanizeDecimalBytesPerSec}
position={PopoverPosition.top}
/>
)),
[],
);
const networkOutPopover = React.useCallback(
React.memo<TopConsumerPopoverProp>(({ current }) => (
<ConsumerPopover
title={t('public~Network out')}
current={current}
consumers={[
{
query: consumerQueries[OverviewQuery.PROJECTS_BY_NETWORK_OUT],
model: ProjectModel,
metric: 'namespace',
},
{
query: consumerQueries[OverviewQuery.PODS_BY_NETWORK_OUT],
model: PodModel,
metric: 'pod',
},
{
query: consumerQueries[OverviewQuery.NODES_BY_NETWORK_OUT],
model: NodeModel,
metric: 'instance',
},
]}
humanize={humanizeDecimalBytesPerSec}
position={PopoverPosition.top}
/>
)),
[],
);
return (
machineConfigPoolsLoaded && (
<DashboardCard data-test-id="utilization-card">
<DashboardCardHeader>
<DashboardCardTitle>{t('public~Cluster utilization')}</DashboardCardTitle>
<Flex>
<FlexItem>
<UtilizationCardNodeFilter
machineConfigPools={machineConfigPools}
onNodeSelect={onNodeSelect}
selectedNodes={selectedNodes}
/>
</FlexItem>
<UtilizationDurationDropdown />
</Flex>
</DashboardCardHeader>
<UtilizationBody>
<PrometheusUtilizationItem
title={t('public~CPU')}
utilizationQuery={utilizationQueries[OverviewQuery.CPU_UTILIZATION].utilization}
totalQuery={utilizationQueries[OverviewQuery.CPU_UTILIZATION].total}
requestQuery={utilizationQueries[OverviewQuery.CPU_UTILIZATION].requests}
TopConsumerPopover={cpuPopover}
humanizeValue={humanizeCpuCores}
/>
<PrometheusUtilizationItem
title={t('public~Memory')}
utilizationQuery={utilizationQueries[OverviewQuery.MEMORY_UTILIZATION].utilization}
totalQuery={utilizationQueries[OverviewQuery.MEMORY_UTILIZATION].total}
requestQuery={utilizationQueries[OverviewQuery.MEMORY_UTILIZATION].requests}
TopConsumerPopover={memPopover}
humanizeValue={humanizeBinaryBytes}
byteDataType={ByteDataTypes.BinaryBytes}
/>
<PrometheusUtilizationItem
title={t('public~Filesystem')}
utilizationQuery={utilizationQueries[OverviewQuery.STORAGE_UTILIZATION].utilization}
totalQuery={utilizationQueries[OverviewQuery.STORAGE_UTILIZATION].total}
TopConsumerPopover={storagePopover}
humanizeValue={humanizeBinaryBytes}
byteDataType={ByteDataTypes.BinaryBytes}
/>
<PrometheusMultilineUtilizationItem
title={t('public~Network transfer')}
queries={multilineQueries[OverviewQuery.NETWORK_UTILIZATION]}
humanizeValue={humanizeDecimalBytesPerSec}
TopConsumerPopovers={[networkInPopover, networkOutPopover]}
/>
<PrometheusUtilizationItem
title={t('public~Pod count')}
utilizationQuery={utilizationQueries[OverviewQuery.POD_UTILIZATION].utilization}
TopConsumerPopover={podPopover}
humanizeValue={humanizeNumber}
/>
</UtilizationBody>
</DashboardCard>
)
);
};
type PrometheusCommonProps = {
title: string;
humanizeValue: Humanize;
byteDataType?: ByteDataTypes;
namespace?: string;
isDisabled?: boolean;
};
type PrometheusUtilizationItemProps = DashboardItemProps &
PrometheusCommonProps & {
utilizationQuery: string;
totalQuery?: string;
limitQuery?: string;
requestQuery?: string;
TopConsumerPopover?: React.ComponentType<TopConsumerPopoverProp>;
setLimitReqState?: (state: LimitRequested) => void;
};
type PrometheusMultilineUtilizationItemProps = DashboardItemProps &
PrometheusCommonProps & {
queries: QueryWithDescription[];
TopConsumerPopovers?: React.ComponentType<TopConsumerPopoverProp>[];
};
type UtilizationCardNodeFilterProps = {
machineConfigPools: MachineConfigPoolKind[];
onNodeSelect: (event: React.MouseEvent, selection: string) => void;
selectedNodes: string[];
}; | the_stack |
import chalk from 'chalk';
import * as yargs from 'yargs';
import {readdirSync, lstatSync, mkdirSync, readFileSync, writeFileSync, existsSync} from 'fs';
import {join} from 'path';
import {execSync} from 'child_process';
import inquirer = require('inquirer');
import {paramCase, pascalCase, titleCase} from 'change-case';
import {DocItem, DocItemCategory} from 'src/app/core/document-items.service';
import * as prettier from 'prettier';
let prettierConfig: prettier.Options;
console.info(chalk.gray('Gathering information...'));
const cashmereComponentDir = join(__dirname, '../projects/cashmere/src/lib');
const cashmereComponents = readdirSync(cashmereComponentDir)
.filter(i => lstatSync(join(cashmereComponentDir, i)).isDirectory() && i !== 'pipes')
.concat(
readdirSync(join(cashmereComponentDir, 'pipes')).filter(i => lstatSync(join(join(cashmereComponentDir, 'pipes'), i)).isDirectory())
);
const bitComponentDir = join(__dirname, '../projects/cashmere-bits/src/lib');
const bitComponents = readdirSync(bitComponentDir).filter(i => lstatSync(join(bitComponentDir, i)).isDirectory());
const examplesDir = join(__dirname, '../projects/cashmere-examples/src/lib');
const existingExamples = readdirSync(examplesDir).filter(
i => lstatSync(join(examplesDir, i)).isDirectory() && readdirSync(join(examplesDir, i)).length
);
const currentExampleDependencies = Object.keys(
JSON.parse(readFileSync(join(__dirname, '../projects/cashmere-examples/package.json')).toString()).peerDependencies
);
const categories = ['cashmere', 'bit'];
const exampleTypes = ['simple', 'module'];
let args = yargs
.option('category', {
alias: 'cat',
describe: 'choose a component category ("cashmere" or "bit")',
choices: categories,
required: false
})
.option('component', {
alias: 'c',
describe: `choose which component's documentation this example will be attached to`,
required: false,
choices: cashmereComponents.concat(bitComponents)
})
.option('name', {
alias: 'n',
describe: 'choose a name for your example',
type: 'string',
required: false,
default: ''
})
.option('type', {
alias: 't',
describe: 'choose either simple type example or complex requiring module',
choices: exampleTypes,
required: false
})
.option('requiredPackages', {
alias: 'r',
describe: 'are there any additional NPM packages (other than what Cashmere already depends on) required by this example?',
type: 'array',
required: false
})
.help('help').argv;
async function promptForMissingArguments() {
const input = await inquirer.prompt([
{
name: 'category',
message: `Is this example for a component in the main Cashmere library or a Cashmere Bit?`,
type: 'list',
choices: categories,
when: () => !args.category
},
{
name: 'component',
message: `Which component is this example for?`,
type: 'list',
choices: x => (x.category === 'bit' ? bitComponents : cashmereComponents),
// when the component isn't specified in args or the component that is specified in args is invalid for the specified category
when: x =>
!args.component || !((x.category || args.category) === 'bit' ? bitComponents : cashmereComponents).includes(args.component)
},
{
name: 'name',
message: 'What is the name of this example?',
type: 'input',
validate: x => (!!paramCase(x) && !existingExamples.includes(paramCase(x))) || 'Example name required and must be unique.',
when: () => !args.name || existingExamples.includes(paramCase(args.name))
},
{
name: 'type',
message:
'Is this a simple example (self-contained within single component) or do you need an entire module file for this example?',
type: 'list',
choices: exampleTypes,
when: () => !args.type
},
{
name: 'requiredPackages',
message: `Specify any new NPM packages (other than the following) required by this example as a comma-separated list:`,
suffix: currentExampleDependencies.join(','),
type: 'input',
required: false,
// Only prompt if other prompts have happened. Don't want to prompt if all other required params were given as args.
when: x => Object.keys(x).length,
// transforms the user input from comma-separated string to an array of strings
filter: x => (x || '').split(',')
}
]);
args = {...args, ...input};
args.name = paramCase(args.name);
args.requiredPackages = (args.requiredPackages || [])
.map((p: string) => p!.trim().toLowerCase())
.filter(p => !currentExampleDependencies.includes(p));
prettierConfig = (await prettier.resolveConfig(__dirname))!;
}
(async function newExample() {
await promptForMissingArguments();
createFiles();
await registerWithDocumentItemsService();
if (args.requiredPackages!.length) {
installAdditionalPackages();
}
runBuild();
console.log(chalk.green('All done! Check your pending changes for TODOs for completing your example.'));
})();
function createFiles() {
const newExampleDir = join(examplesDir, args.name);
if (!existsSync(newExampleDir)) {
mkdirSync(newExampleDir);
}
const exampleComponentFileName = join(newExampleDir, `${args.name}-example.component.ts`);
console.info(chalk.gray(`creating ${exampleComponentFileName}...`));
const componentFileContents = `import {Component} from '@angular/core';
@Component({
selector: 'hc-${args.name}-example',
templateUrl: '${args.name}-example.component.html',
// TODO: delete the SCSS file if you don't need it in the example
styleUrls: ['${args.name}-example.component.scss']
})
export class ${pascalCase(args.name)}ExampleComponent {
// TODO: implement your example here
}
`;
writeFileSync(exampleComponentFileName, prettier.format(componentFileContents, {...prettierConfig, parser: 'typescript'}));
const exampleHtmlFileName = join(newExampleDir, `${args.name}-example.component.html`);
console.info(chalk.gray(`creating ${exampleHtmlFileName}...`));
writeFileSync(exampleHtmlFileName, `\n`);
const exampleScssFileName = join(newExampleDir, `${args.name}-example.component.scss`);
console.info(chalk.gray(`creating ${exampleScssFileName}...`));
writeFileSync(exampleScssFileName, `\n`);
if (args.type === 'module') {
const exampleModuleFileName = join(newExampleDir, `${args.name}-example.module.ts`);
const componentName = `${pascalCase(args.name)}ExampleComponent`;
console.info(chalk.gray(`creating ${exampleModuleFileName}...`));
const moduleFileContents =
`import {NgModule} from '@angular/core';
import {CommonModule} from '@angular/common';
import {CashmereModule} from '../cashmere.module';
import {${componentName}} from './${args.name}-example.component';
@NgModule({
imports: [CommonModule, CashmereModule],
declarations: [${componentName}],
exports: [${componentName}],
entryComponents: [${componentName}]
})
export class ${pascalCase(args.name)}ExampleModule {
}
`.trim() + '\n';
writeFileSync(exampleModuleFileName, prettier.format(moduleFileContents, {...prettierConfig, parser: 'typescript'}));
}
}
async function registerWithDocumentItemsService() {
console.info(chalk.gray(`registering example with document items service...`));
const docItemsDirectory = join(__dirname, '../src/app/core');
let docItemsFile: string;
if (args.category === 'cashmere') {
docItemsFile = join(docItemsDirectory, './cashmere-components-document-items.json');
} else if (args.category === 'bit') {
docItemsFile = join(docItemsDirectory, './cashmere-bits-document-items.json');
} else {
console.warn(
chalk.bold.yellowBright('Warning: cannot register with document items service since an unrecognized category was provided.')
);
return;
}
const docItems = JSON.parse(readFileSync(docItemsFile).toString());
if (!docItems[args.component!]) {
console.warn(
chalk.yellowBright(
`It seems that the component for this example has not yet been registered with the document items service. Let's take care of that right now.`
)
);
const answers = await inquirer.prompt([
{
name: 'category',
message: 'What accordion would this component fit under on the Cashmere docs site?',
choices: ['buttons', 'forms', 'layout', 'nav', 'pipes', 'popups', 'table'] as DocItemCategory[]
}
]);
docItems[args.component!] = {
category: answers.category,
name: titleCase(args.component!),
examples: [],
id: args.component,
usageDoc: existsSync(
join(args.type === 'bit' ? bitComponentDir : cashmereComponentDir, args.component!, `${args.component}.md`)
),
hideApi: false
} as DocItem;
const cashmereModulePath = join(__dirname, '../src/app/shared/cashmere.module.ts');
const cashmereModule = readFileSync(cashmereModulePath).toString();
if (!cashmereModule.includes(`${pascalCase(args.component!)}Module`)) {
writeFileSync(cashmereModulePath, `// TODO: add ${pascalCase(args.component!)}Module to this file\n${cashmereModule}`);
console.warn(
chalk.yellowBright(
`It seems that the component for this example has not yet been added to the CashmereModule in 'src/app/shared/cashmere.module.ts'. You will need to do this on your own.`
)
);
}
}
docItems[args.component!].examples.push(args.name);
const jsonString = JSON.stringify(docItems);
writeFileSync(docItemsFile, prettier.format(jsonString, {...prettierConfig, parser: 'json'}));
}
function installAdditionalPackages() {
console.info(chalk.gray('installing additional NPM packages to the docs project...'));
execSync(`npm install --save ${args.requiredPackages!.join(' ')}`, {cwd: join(__dirname, '../'), stdio: 'inherit'});
console.info(chalk.gray('installing additional NPM packages to the examples project...'));
const cashmereExamplesProjectDir = join(__dirname, '../projects/cashmere-examples');
execSync(`npm install --save ${args.requiredPackages!.join(' ')}`, {cwd: cashmereExamplesProjectDir, stdio: 'inherit'});
console.info(chalk.gray('moving dependencies to peer dependencies...'));
const packageJsonPath = join(cashmereExamplesProjectDir, 'package.json');
const packageJson = JSON.parse(readFileSync(packageJsonPath).toString());
packageJson.peerDependencies = {...packageJson.peerDependencies, ...packageJson.dependencies};
delete packageJson.dependencies;
const jsonString = JSON.stringify(packageJson);
writeFileSync(packageJsonPath, prettier.format(jsonString, {...prettierConfig, parser: 'json'}));
}
function runBuild() {
console.info(chalk.gray('Performing initial build with the new example...'));
execSync('npm run build', {cwd: join(__dirname, '../'), stdio: 'inherit'});
} | the_stack |
import { Location, MarkdownString, TestItem, TestMessage } from 'vscode';
import { INVOCATION_PREFIX } from '../../constants';
import { dataCache, ITestItemData } from '../../controller/testItemDataCache';
import { createTestItem, updateOrCreateTestItem } from '../../controller/utils';
import { IJavaTestItem, IRunTestContext, TestKind, TestLevel } from '../../types';
import { RunnerResultAnalyzer } from '../baseRunner/RunnerResultAnalyzer';
import { findTestLocation, setTestState, TestResultState } from '../utils';
export class JUnitRunnerResultAnalyzer extends RunnerResultAnalyzer {
private testOutputMapping: Map<string, ITestInfo> = new Map();
private triggeredTestsMapping: Map<string, TestItem> = new Map();
private currentTestState: TestResultState;
private currentItem: TestItem | undefined;
private currentDuration: number = 0;
private traces: MarkdownString;
private assertionFailure: TestMessage | undefined;
private recordingType: RecordingType;
private expectString: string;
private actualString: string;
private projectName: string;
private incompleteTestSuite: ITestInfo[] = [];
constructor(protected testContext: IRunTestContext) {
super(testContext);
this.projectName = testContext.projectName;
const queue: TestItem[] = [...testContext.testItems];
while (queue.length) {
const item: TestItem = queue.shift()!;
const testLevel: TestLevel | undefined = dataCache.get(item)?.testLevel;
if (testLevel === undefined || testLevel === TestLevel.Invocation) {
continue;
} else if (testLevel === TestLevel.Method && item.parent) {
this.triggeredTestsMapping.set(item.parent.id, item.parent);
} else {
item.children.forEach((child: TestItem) => {
queue.push(child);
});
}
this.triggeredTestsMapping.set(item.id, item);
}
}
public analyzeData(data: string): void {
const lines: string[] = data.split(/\r?\n/);
for (const line of lines) {
if (!line) {
continue;
}
this.processData(line);
this.testContext.testRun.appendOutput(line + '\r\n');
}
}
public processData(data: string): void {
if (data.startsWith(MessageId.TestTree)) {
this.enlistToTestMapping(data.substring(MessageId.TestTree.length).trim());
} else if (data.startsWith(MessageId.TestStart)) {
const item: TestItem | undefined = this.getTestItem(data.substr(MessageId.TestStart.length));
if (!item) {
return;
}
if (item.id !== this.currentItem?.id) {
this.initializeCache(item);
}
this.testContext.testRun.started(item);
const start: number = Date.now();
if (this.currentDuration === 0) {
this.currentDuration = -start;
} else if (this.currentDuration > 0) {
// Some test cases may executed multiple times (@RepeatedTest), we need to calculate the time for each execution
this.currentDuration -= start;
}
} else if (data.startsWith(MessageId.TestEnd)) {
if (!this.currentItem) {
return;
}
if (this.currentDuration < 0) {
const end: number = Date.now();
this.currentDuration += end;
}
if (data.indexOf(MessageId.IGNORE_TEST_PREFIX) > -1) {
this.currentTestState = TestResultState.Skipped;
} else if (this.currentTestState === TestResultState.Running) {
this.currentTestState = TestResultState.Passed;
}
setTestState(this.testContext.testRun, this.currentItem, this.currentTestState, undefined, this.currentDuration);
} else if (data.startsWith(MessageId.TestFailed)) {
if (data.indexOf(MessageId.ASSUMPTION_FAILED_TEST_PREFIX) > -1) {
this.currentTestState = TestResultState.Skipped;
} else {
this.currentTestState = TestResultState.Failed;
}
} else if (data.startsWith(MessageId.TestError)) {
let item: TestItem | undefined = this.getTestItem(data.substr(MessageId.TestError.length));
if (!item) {
if (this.testContext.testItems.length === 1) {
item = this.testContext.testItems[0];
} else {
// todo: Report error when we cannot find the target test item?
return;
}
}
if (item.id !== this.currentItem?.id) {
this.initializeCache(item);
}
this.currentTestState = TestResultState.Errored;
} else if (data.startsWith(MessageId.TraceStart)) {
this.traces = new MarkdownString();
this.traces.isTrusted = true;
this.traces.supportHtml = true;
this.recordingType = RecordingType.StackTrace;
} else if (data.startsWith(MessageId.TraceEnd)) {
if (!this.currentItem) {
return;
}
const testMessage: TestMessage = new TestMessage(this.traces);
this.tryAppendMessage(this.currentItem, testMessage, this.currentTestState);
this.recordingType = RecordingType.None;
if (this.currentTestState === TestResultState.Errored) {
setTestState(this.testContext.testRun, this.currentItem, this.currentTestState);
}
} else if (data.startsWith(MessageId.ExpectStart)) {
this.recordingType = RecordingType.ExpectMessage;
} else if (data.startsWith(MessageId.ExpectEnd)) {
this.recordingType = RecordingType.None;
this.expectString = this.expectString.replace(/\n$/, '');
} else if (data.startsWith(MessageId.ActualStart)) {
this.recordingType = RecordingType.ActualMessage;
} else if (data.startsWith(MessageId.ActualEnd)) {
this.recordingType = RecordingType.None;
this.actualString = this.actualString.replace(/\n$/, '');
if (!this.assertionFailure && this.expectString && this.actualString) {
this.assertionFailure = TestMessage.diff(`Expected [${this.expectString}] but was [${this.actualString}]`, this.expectString, this.actualString);
}
} else if (this.recordingType === RecordingType.ExpectMessage) {
this.expectString += data + '\n';
} else if (this.recordingType === RecordingType.ActualMessage) {
this.actualString += data + '\n';
} else if (this.recordingType === RecordingType.StackTrace) {
if (!this.assertionFailure) {
const assertionRegExp: RegExp = /expected.*:.*<(.+?)>.*but.*:.*<(.+?)>/mi;
const assertionResults: RegExpExecArray | null = assertionRegExp.exec(data);
if (assertionResults && assertionResults.length === 3) {
this.assertionFailure = TestMessage.diff(`Expected [${assertionResults[1]}] but was [${assertionResults[2]}]`, assertionResults[1], assertionResults[2]);
}
}
this.processStackTrace(data, this.traces, this.assertionFailure, this.currentItem, this.projectName);
}
}
protected getTestItem(message: string): TestItem | undefined {
const index: string = message.substring(0, message.indexOf(',')).trim();
return this.testOutputMapping.get(index)?.testItem;
}
protected getTestId(message: string): string {
/**
* The following regex expression is used to parse the test runner's output, which match the following components:
* '(?:@AssumptionFailure: |@Ignore: )?' - indicate if the case is ignored due to assumption failure or disabled
* '(.*?)' - test method name
* '\(([^)]*)\)[^(]*$' - class fully qualified name which wrapped by the last paired brackets, see:
* https://github.com/microsoft/vscode-java-test/issues/1075
*/
const regexp: RegExp = /(?:@AssumptionFailure: |@Ignore: )?(.*?)\(([^)]*)\)[^(]*$/;
const matchResults: RegExpExecArray | null = regexp.exec(message);
if (matchResults && matchResults.length === 3) {
return `${this.projectName}@${matchResults[2]}#${matchResults[1]}`;
}
// In case the output is class level, i.e.: `%ERROR 2,a.class.FullyQualifiedName`
const indexOfSpliter: number = message.lastIndexOf(',');
if (indexOfSpliter > -1) {
return `${this.projectName}@${message.slice(indexOfSpliter + 1)}`;
}
return `${this.projectName}@${message}`;
}
protected initializeCache(item: TestItem): void {
this.currentTestState = TestResultState.Running;
this.currentItem = item;
this.currentDuration = 0;
this.assertionFailure = undefined;
this.expectString = '';
this.actualString = '';
this.recordingType = RecordingType.None;
}
protected getStacktraceFilter(): string[] {
return [
'org.eclipse.jdt.internal.junit.runner.',
'org.eclipse.jdt.internal.junit4.runner.',
'org.eclipse.jdt.internal.junit5.runner.',
'org.eclipse.jdt.internal.junit.ui.',
'junit.framework.TestCase',
'junit.framework.TestResult',
'junit.framework.TestResult$1',
'junit.framework.TestSuite',
'junit.framework.Assert',
'org.junit.',
'java.lang.reflect.Method.invoke',
'sun.reflect.',
'jdk.internal.reflect.',
];
}
private enlistToTestMapping(message: string): void {
const regExp: RegExp = /([^\\,]|\\\,?)+/gm;
// See MessageId.TestTree's comment for its format
const result: RegExpMatchArray | null = message.match(regExp);
if (result && result.length > 6) {
const index: string = result[0];
const testId: string = this.getTestId(result[1]);
const isSuite: boolean = result[2] === 'true';
const testCount: number = parseInt(result[3], 10);
const isDynamic: boolean = result[4] === 'true';
const parentIndex: string = result[5];
const displayName: string = result[6].replace(/\\,/g, ',');
const uniqueId: string | undefined = this.testContext.kind === TestKind.JUnit5 ?
result[8]?.replace(/\\,/g, ',') : undefined;
let testItem: TestItem | undefined;
if (isDynamic) {
const parentInfo: ITestInfo | undefined = this.testOutputMapping.get(parentIndex);
const parent: TestItem | undefined = parentInfo?.testItem;
if (parent) {
const parentData: ITestItemData | undefined = dataCache.get(parent);
if (parentData?.testLevel === TestLevel.Method) {
testItem = updateOrCreateTestItem(parent, {
children: [],
uri: parent.uri?.toString(),
range: parent.range,
jdtHandler: parentData.jdtHandler,
fullName: parentData.fullName,
label: this.getTestMethodName(displayName),
// prefer uniqueId, as it does not change when re-running only a single invocation:
id: uniqueId ? `${INVOCATION_PREFIX}${uniqueId}`
: `${INVOCATION_PREFIX}${parent.id}[#${parent.children.size + 1}]`,
projectName: parentData.projectName,
testKind: parentData.testKind,
testLevel: TestLevel.Invocation,
uniqueId
});
}
}
} else {
testItem = this.triggeredTestsMapping.get(testId);
if (this.incompleteTestSuite.length) {
const suiteIdx: number = this.incompleteTestSuite.length - 1;
const parentSuite: ITestInfo = this.incompleteTestSuite[suiteIdx];
parentSuite.testCount--;
if (parentSuite.testCount <= 0) {
this.incompleteTestSuite.pop();
}
if (!testItem && parentSuite.testItem) {
const itemData: IJavaTestItem | undefined = {
children: [],
uri: undefined,
range: undefined,
jdtHandler: '',
fullName: testId.substr(testId.indexOf('@') + 1),
label: this.getTestMethodName(displayName),
id: `${INVOCATION_PREFIX}${testId}`,
projectName: this.projectName,
testKind: this.testContext.kind,
testLevel: TestLevel.Invocation,
};
testItem = createTestItem(itemData, parentSuite.testItem);
}
}
if (isSuite && testCount > 0) {
this.incompleteTestSuite.push({
testId,
testCount,
testItem,
});
}
if (testItem && dataCache.get(testItem)?.testKind === TestKind.JUnit5 && testItem.label !== displayName) {
testItem.description = displayName;
}
}
this.testOutputMapping.set(index, {
testId,
testCount,
testItem,
});
}
}
private async tryAppendMessage(item: TestItem, testMessage: TestMessage, testState: TestResultState): Promise<void> {
if (this.testMessageLocation) {
testMessage.location = this.testMessageLocation;
this.testMessageLocation = undefined;
} else if (item.uri && item.range) {
testMessage.location = new Location(item.uri, item.range);
} else {
let id: string = item.id;
if (id.startsWith(INVOCATION_PREFIX)) {
id = id.substring(INVOCATION_PREFIX.length);
if (this.testContext.kind === TestKind.JUnit) {
// change test[0] -> test
// to fix: https://github.com/microsoft/vscode-java-test/issues/1296
const indexOfParameterizedTest: number = id.lastIndexOf('[');
if (indexOfParameterizedTest > -1) {
id = id.substring(0, id.lastIndexOf('['));
}
}
}
const location: Location | undefined = await findTestLocation(id);
testMessage.location = location;
}
setTestState(this.testContext.testRun, item, testState, testMessage);
}
// See: org.eclipse.jdt.internal.junit.model.TestCaseElement#getTestMethodName()
private getTestMethodName(testName: string): string {
const index: number = testName.lastIndexOf('(');
if (index > 0) {
return testName.substring(0, index);
}
return testName;
}
}
enum MessageId {
/**
* Notification about a test inside the test suite.
* TEST_TREE + testId + "," + testName + "," + isSuite + "," + testCount + "," + isDynamicTest +
* "," + parentId + "," + displayName + "," + parameterTypes + "," + uniqueId
* isSuite = "true" or "false"
* isDynamicTest = "true" or "false"
* parentId = the unique id of its parent if it is a dynamic test, otherwise can be "-1"
* displayName = the display name of the test
* parameterTypes = comma-separated list of method parameter types if applicable, otherwise an
* empty string
* uniqueId = the unique ID of the test provided by JUnit launcher, otherwise an empty string
*/
TestTree = '%TSTTREE',
TestStart = '%TESTS',
TestEnd = '%TESTE',
TestFailed = '%FAILED',
TestError = '%ERROR',
ExpectStart = '%EXPECTS',
ExpectEnd = '%EXPECTE',
ActualStart = '%ACTUALS',
ActualEnd = '%ACTUALE',
TraceStart = '%TRACES',
TraceEnd = '%TRACEE',
IGNORE_TEST_PREFIX = '@Ignore: ',
ASSUMPTION_FAILED_TEST_PREFIX = '@AssumptionFailure: ',
}
interface ITestInfo {
testId: string;
testCount: number;
testItem: TestItem | undefined;
}
enum RecordingType {
None,
StackTrace,
ExpectMessage,
ActualMessage,
} | the_stack |
/* global mhaStrings */
/* global mhaDates */
/* exported Received */
const Received = (function () {
"use strict";
const receivedRows = [];
let sortColumn = "hop";
let sortOrder = 1;
// Builds array of values for each header in receivedHeaderNames.
// This algorithm should work regardless of the order of the headers, given:
// - The date, if present, is always at the end, separated by a ";".
// Values not attached to a header will not be reflected in output.
const parseHeader = function (receivedHeader) {
const ReceivedField = function (_label: string, _value?) {
return {
label: _label,
value: _value !== undefined ? _value : "",
toString: function () { return this.value; }
};
};
const receivedFields = {};
receivedFields["sourceHeader"] = ReceivedField("", receivedHeader);
receivedFields["hop"] = ReceivedField(mhaStrings.mhaReceivedHop);
receivedFields["from"] = ReceivedField(mhaStrings.mhaReceivedFrom);
receivedFields["by"] = ReceivedField(mhaStrings.mhaReceivedBy);
receivedFields["with"] = ReceivedField(mhaStrings.mhaReceivedWith);
receivedFields["id"] = ReceivedField(mhaStrings.mhaReceivedId);
receivedFields["for"] = ReceivedField(mhaStrings.mhaReceivedFor);
receivedFields["via"] = ReceivedField(mhaStrings.mhaReceivedVia);
receivedFields["date"] = ReceivedField(mhaStrings.mhaReceivedDate);
receivedFields["delay"] = ReceivedField(mhaStrings.mhaReceivedDelay);
receivedFields["percent"] = ReceivedField(mhaStrings.mhaReceivedPercent, 0);
receivedFields["delaySort"] = ReceivedField("", -1);
receivedFields["dateNum"] = ReceivedField("");
receivedFields.toString = function () {
const str = [];
for (const fieldName in receivedFields) {
if (receivedFields[fieldName].label && receivedFields[fieldName].toString()) {
str.push(receivedFields[fieldName].label + ": " + receivedFields[fieldName].toString());
}
}
return str.join("\n");
}
const setField = function (fieldName, fieldValue) {
if (!fieldName || !fieldValue || !receivedFields[fieldName.toLowerCase()]) {
return false;
}
if (receivedFields[fieldName.toLowerCase()].value) { receivedFields[fieldName.toLowerCase()].value += "; " + fieldValue; }
else { receivedFields[fieldName.toLowerCase()].value = fieldValue; }
return false;
}
if (receivedHeader) {
// Strip linefeeds first
receivedHeader = receivedHeader.replace(/\r|\n|\r\n/g, ' ')
// Build array of header locations
const headerMatches = [];
// Some bad dates don't wrap UTC in paren - fix that first
receivedHeader = receivedHeader.replace(/UTC|\(UTC\)/gi, "(UTC)");
// Read out the date first, then clear it from the string
let iDate = receivedHeader.lastIndexOf(";");
// No semicolon means no date - or maybe there's one there?
// Sendgrid is bad about this
if (iDate === -1) {
// First try to find a day of the week
receivedHeader = receivedHeader.replace(/\s*(Mon|Tue|Wed|Thu|Fri|Sat|Sun)/g, "; $1");
iDate = receivedHeader.lastIndexOf(";");
}
if (iDate === -1) {
// Next we look for year-month-day, 4-1/2-1/2
receivedHeader = receivedHeader.replace(/\s*(\d{4}-\d{1,2}-\d{1,2})/g, "; $1");
iDate = receivedHeader.lastIndexOf(";");
}
if (iDate !== -1 && receivedHeader.length !== iDate + 1) {
const dateField = receivedHeader.substring(iDate + 1);
receivedHeader = receivedHeader.substring(0, iDate);
const parsedDate = mhaDates.parseDate(dateField);
if (parsedDate) {
receivedFields["date"].value = parsedDate.date;
receivedFields["dateNum"].value = parsedDate.dateNum;
}
}
// Scan for malformed postFix headers
// Received: by example.com (Postfix, from userid 1001)
// id 1234ABCD; Thu, 21 Aug 2014 12:12:48 +0200 (CEST)
const postFix = receivedHeader.match(/(.*)by (.*? \(Postfix, from userid .*?\))(.*)/mi);
if (postFix) {
setField("by", postFix[2]);
receivedHeader = postFix[1] + postFix[3];
}
// Scan for malformed qmail headers
// Received: (qmail 10876 invoked from network); 24 Aug 2014 16:13:38 -0000
const qmail = receivedHeader.match(/(.*)\((qmail .*? invoked from .*?)\)(.*)/mi);
if (qmail) {
setField("by", qmail[2]);
receivedHeader = qmail[1] + qmail[3];
}
// Split up the string now so we can look for our headers
const tokens = receivedHeader.split(/\s+/);
let fieldName;
for (fieldName in receivedFields) {
tokens.some(function (token, iToken) {
if (fieldName.toLowerCase() === token.toLowerCase()) {
headerMatches.push({ fieldName: fieldName, iToken: iToken });
// We don't return true so we can match any duplicate headers
// In doing this, we risk failing to parse a string where a header
// keyword appears as the value for another header
// Both situations are invalid input
// We're just picking which one we'd prefer to handle
}
});
}
// Next bit assumes headerMatches[fieldName,iToken] is increasing on iToken.
// Sort it so it is.
headerMatches.sort(function (a, b) { return a.iToken - b.iToken; });
headerMatches.forEach(function (headerMatch, iMatch) {
let iNextTokenHeader;
if (iMatch + 1 < headerMatches.length) {
iNextTokenHeader = headerMatches[iMatch + 1].iToken;
} else {
iNextTokenHeader = tokens.length;
}
setField(headerMatch.fieldName, tokens.slice(headerMatch.iToken + 1, iNextTokenHeader).join(" ").trim())
});
}
return receivedFields;
};
function exists() { return receivedRows.length > 0; }
function doSort(col) {
if (sortColumn === col) {
sortOrder *= -1;
} else {
sortColumn = col;
sortOrder = 1;
}
if (sortColumn + "Sort" in receivedRows[0]) {
col = col + "Sort";
}
receivedRows.sort((a, b) => {
return this.sortOrder * (a[col] < b[col] ? -1 : 1);
});
}
function add(receivedHeader) { receivedRows.push(parseHeader(receivedHeader)); }
// Computes min/sec from the diff of current and last.
// Returns nothing if last or current is NaN.
function computeTime(current, last) {
const time = [];
if (isNaN(current) || isNaN(last)) { return ""; }
let diff = current - last;
let iDelay;
let printedMinutes = false;
if (Math.abs(diff) < 1000) {
return "0 " + mhaStrings.mhaSeconds;
}
if (diff < 0) {
time.push(mhaStrings.mhaNegative);
diff = -diff;
}
if (diff >= 1000 * 60) {
iDelay = Math.floor(diff / 1000 / 60);
time.push(iDelay, " ");
if (iDelay === 1) {
time.push(mhaStrings.mhaMinute);
} else {
time.push(mhaStrings.mhaMinutes);
}
diff -= iDelay * 1000 * 60;
printedMinutes = true;
}
if (printedMinutes && diff) {
time.push(" ");
}
if (!printedMinutes || diff) {
iDelay = Math.floor(diff / 1000);
time.push(iDelay, " ");
if (iDelay === 1) {
time.push(mhaStrings.mhaSecond);
} else {
time.push(mhaStrings.mhaSeconds);
}
}
return time.join("");
}
function computeDeltas() {
// Process received headers in reverse order
receivedRows.reverse();
// Parse rows and compute values needed for the "Delay" column
let iStartTime = 0;
let iEndTime = 0;
let iLastTime = NaN;
let iDelta = 0; // This will be the sum of our positive deltas
receivedRows.forEach(function (row) {
if (!isNaN(row.dateNum)) {
if (!isNaN(iLastTime) && iLastTime < row.dateNum) {
iDelta += row.dateNum - iLastTime;
}
iStartTime = iStartTime || row.dateNum;
iEndTime = row.dateNum;
iLastTime = row.dateNum;
}
});
iLastTime = NaN;
receivedRows.forEach(function (row, index) {
row.hop.value = index + 1;
row.delay.value = computeTime(row.dateNum, iLastTime);
if (!isNaN(row.dateNum) && !isNaN(iLastTime) && iDelta !== 0) {
row.delaySort.value = row.dateNum.value - iLastTime;
// Only positive delays will get percentage bars. Negative delays will be color coded at render time.
if (row.delaySort.value > 0) {
row.percent.value = 100 * row.delaySort.value / iDelta;
}
}
if (!isNaN(row.dateNum)) {
iLastTime = row.dateNum.value;
}
});
// Total time is still last minus first, even if negative.
return iEndTime !== iStartTime ? computeTime(iEndTime, iStartTime) : "";
}
return {
tableName: "receivedHeaders",
add: add,
exists: exists,
doSort: doSort,
computeDeltas: computeDeltas,
get receivedRows() { return receivedRows; },
get sortColumn() { return sortColumn; },
get sortOrder() { return sortOrder; },
parseHeader: parseHeader, // For testing only
computeTime: computeTime, // For testing only
toString: function () {
if (!exists()) return "";
const ret = ["Received"];
const rows = [];
receivedRows.forEach(function (row) {
rows.push(row);
});
if (rows.length) ret.push(rows.join("\n\n"));
return ret.join("\n");
}
};
}); | the_stack |
const self =
typeof window !== 'undefined'
? window // if in browser
: typeof WorkerGlobalScope !== 'undefined' &&
self instanceof WorkerGlobalScope
? self // if in worker
: {}; // if in node js
/**
* Prism: Lightweight, robust, elegant syntax highlighting
* MIT license http://www.opensource.org/licenses/mit-license.php/
* @author Lea Verou http://lea.verou.me
*/
export const Prism = (function () {
// Private helper vars
var lang = /\blang(?:uage)?-(?!\*)(\w+)\b/i;
var _ = (self.Prism = {
util: {
encode: function (tokens) {
if (tokens instanceof Token) {
return new Token(
tokens.type,
_.util.encode(tokens.content),
tokens.alias
);
} else if (_.util.type(tokens) === 'Array') {
return tokens.map(_.util.encode);
} else {
return tokens
.replace(/&/g, '&')
.replace(/</g, '<')
.replace(/\u00a0/g, ' ');
}
},
type: function (o) {
return Object.prototype.toString.call(o).match(/\[object (\w+)\]/)[1];
},
// Deep clone a language definition (e.g. to extend it)
clone: function (o) {
var type = _.util.type(o);
switch (type) {
case 'Object':
var clone = {};
for (var key in o) {
if (o.hasOwnProperty(key)) {
clone[key] = _.util.clone(o[key]);
}
}
return clone;
case 'Array':
return o.slice();
}
return o;
},
},
languages: {
extend: function (id, redef) {
var lang = _.util.clone(_.languages[id]);
for (var key in redef) {
lang[key] = redef[key];
}
return lang;
},
/**
* Insert a token before another token in a language literal
* As this needs to recreate the object (we cannot actually insert before keys in object literals),
* we cannot just provide an object, we need anobject and a key.
* @param inside The key (or language id) of the parent
* @param before The key to insert before. If not provided, the function appends instead.
* @param insert Object with the key/value pairs to insert
* @param root The object that contains `inside`. If equal to Prism.languages, it can be omitted.
*/
insertBefore: function (inside, before, insert, root) {
root = root || _.languages;
var grammar = root[inside];
if (arguments.length == 2) {
insert = arguments[1];
for (var newToken in insert) {
if (insert.hasOwnProperty(newToken)) {
grammar[newToken] = insert[newToken];
}
}
return grammar;
}
var ret = {};
for (var token in grammar) {
if (grammar.hasOwnProperty(token)) {
if (token == before) {
for (var newToken in insert) {
if (insert.hasOwnProperty(newToken)) {
ret[newToken] = insert[newToken];
}
}
}
ret[token] = grammar[token];
}
}
// Update references in other language definitions
_.languages.DFS(_.languages, function (key, value) {
if (value === root[inside] && key != inside) {
this[key] = ret;
}
});
return (root[inside] = ret);
},
// Traverse a language definition with Depth First Search
DFS: function (o, callback, type) {
for (var i in o) {
if (o.hasOwnProperty(i)) {
callback.call(o, i, o[i], type || i);
if (_.util.type(o[i]) === 'Object') {
_.languages.DFS(o[i], callback);
} else if (_.util.type(o[i]) === 'Array') {
_.languages.DFS(o[i], callback, i);
}
}
}
},
},
highlightAll: function (async, callback) {
var elements = document.querySelectorAll(
'code[class*="language-"], [class*="language-"] code, code[class*="lang-"], [class*="lang-"] code'
);
for (var i = 0, element; (element = elements[i++]); ) {
_.highlightElement(element, async === true, callback);
}
},
highlightElement: function (element, async, callback) {
// Find language
var language,
grammar,
parent = element;
while (parent && !lang.test(parent.className)) {
parent = parent.parentNode;
}
if (parent) {
language = (parent.className.match(lang) || [, ''])[1];
grammar = _.languages[language];
}
if (!grammar) {
return;
}
// Set language on the element, if not present
element.className =
element.className.replace(lang, '').replace(/\s+/g, ' ') +
' language-' +
language;
// Set language on the parent, for styling
parent = element.parentNode;
if (/pre/i.test(parent.nodeName)) {
parent.className =
parent.className.replace(lang, '').replace(/\s+/g, ' ') +
' language-' +
language;
}
var code = element.textContent;
if (!code) {
return;
}
var env = {
element: element,
language: language,
grammar: grammar,
code: code,
};
_.hooks.run('before-highlight', env);
if (async && self.Worker) {
var worker = new Worker(_.filename);
worker.onmessage = function (evt) {
env.highlightedCode = Token.stringify(JSON.parse(evt.data), language);
_.hooks.run('before-insert', env);
env.element.innerHTML = env.highlightedCode;
callback && callback.call(env.element);
_.hooks.run('after-highlight', env);
};
worker.postMessage(
JSON.stringify({
language: env.language,
code: env.code,
})
);
} else {
env.highlightedCode = _.highlight(env.code, env.grammar, env.language);
_.hooks.run('before-insert', env);
env.element.innerHTML = env.highlightedCode;
callback && callback.call(element);
_.hooks.run('after-highlight', env);
}
},
highlight: function (text, grammar, language) {
var tokens = _.tokenize(text, grammar);
return Token.stringify(_.util.encode(tokens), language);
},
tokenize: function (text, grammar, language) {
var Token = _.Token;
var strarr = [text];
var rest = grammar.rest;
if (rest) {
for (var token in rest) {
grammar[token] = rest[token];
}
delete grammar.rest;
}
tokenloop: for (var token in grammar) {
if (!grammar.hasOwnProperty(token) || !grammar[token]) {
continue;
}
var patterns = grammar[token];
patterns = _.util.type(patterns) === 'Array' ? patterns : [patterns];
for (var j = 0; j < patterns.length; ++j) {
var pattern = patterns[j],
inside = pattern.inside,
lookbehind = !!pattern.lookbehind,
lookbehindLength = 0,
alias = pattern.alias;
pattern = pattern.pattern || pattern;
for (var i = 0; i < strarr.length; i++) {
// Don’t cache length as it changes during the loop
var str = strarr[i];
if (strarr.length > text.length) {
// Something went terribly wrong, ABORT, ABORT!
break tokenloop;
}
if (str instanceof Token) {
continue;
}
pattern.lastIndex = 0;
var match = pattern.exec(str);
if (match) {
if (lookbehind) {
lookbehindLength = match[1].length;
}
var from = match.index - 1 + lookbehindLength,
match = match[0].slice(lookbehindLength),
len = match.length,
to = from + len,
before = str.slice(0, from + 1),
after = str.slice(to + 1);
var args = [i, 1];
if (before) {
args.push(before);
}
var wrapped = new Token(
token,
inside ? _.tokenize(match, inside) : match,
alias
);
args.push(wrapped);
if (after) {
args.push(after);
}
Array.prototype.splice.apply(strarr, args);
}
}
}
}
return strarr;
},
hooks: {
all: {},
add: function (name, callback) {
var hooks = _.hooks.all;
hooks[name] = hooks[name] || [];
hooks[name].push(callback);
},
run: function (name, env) {
var callbacks = _.hooks.all[name];
if (!callbacks || !callbacks.length) {
return;
}
for (var i = 0, callback; (callback = callbacks[i++]); ) {
callback(env);
}
},
},
});
var Token = (_.Token = function (type, content, alias) {
this.type = type;
this.content = content;
this.alias = alias;
});
Token.stringify = function (o, language, parent) {
if (typeof o == 'string') {
return o;
}
if (Object.prototype.toString.call(o) == '[object Array]') {
return o
.map(function (element) {
return Token.stringify(element, language, o);
})
.join('');
}
var env = {
type: o.type,
content: Token.stringify(o.content, language, parent),
tag: 'span',
classes: ['token', o.type],
attributes: {},
language: language,
parent: parent,
};
if (env.type == 'comment') {
env.attributes['spellcheck'] = 'true';
}
if (o.alias) {
var aliases = _.util.type(o.alias) === 'Array' ? o.alias : [o.alias];
Array.prototype.push.apply(env.classes, aliases);
}
_.hooks.run('wrap', env);
var attributes = '';
for (var name in env.attributes) {
attributes += name + '="' + (env.attributes[name] || '') + '"';
}
return (
'<' +
env.tag +
' class="' +
env.classes.join(' ') +
'" ' +
attributes +
'>' +
env.content +
'</' +
env.tag +
'>'
);
};
if (!self.document) {
if (!self.addEventListener) {
// in Node.js
return self.Prism;
}
// In worker
self.addEventListener(
'message',
function (evt) {
var message = JSON.parse(evt.data),
lang = message.language,
code = message.code;
self.postMessage(
JSON.stringify(_.util.encode(_.tokenize(code, _.languages[lang])))
);
self.close();
},
false
);
return self.Prism;
}
// Get current script and highlight
var script = document.getElementsByTagName('script');
script = script[script.length - 1];
if (script) {
_.filename = script.src;
if (document.addEventListener && !script.hasAttribute('data-manual')) {
document.addEventListener('DOMContentLoaded', _.highlightAll);
}
}
return self.Prism;
})();
// if (typeof module !== 'undefined' && module.exports) {
// module.exports = Prism;
// }
/* **********************************************
Begin prism-markup.js
********************************************** */
Prism.languages.markup = {
comment: /<!--[\w\W]*?-->/g,
prolog: /<\?.+?\?>/,
doctype: /<!DOCTYPE.+?>/,
cdata: /<!\[CDATA\[[\w\W]*?]]>/i,
tag: {
pattern:
/<\/?[\w:-]+\s*(?:\s+[\w:-]+(?:=(?:("|')(\\?[\w\W])*?\1|[^\s'">=]+))?\s*)*\/?>/gi,
inside: {
tag: {
pattern: /^<\/?[\w:-]+/i,
inside: {
punctuation: /^<\/?/,
namespace: /^[\w-]+?:/,
},
},
'attr-value': {
pattern: /=(?:('|")[\w\W]*?(\1)|[^\s>]+)/gi,
inside: {
punctuation: /=|>|"/g,
},
},
punctuation: /\/?>/g,
'attr-name': {
pattern: /[\w:-]+/g,
inside: {
namespace: /^[\w-]+?:/,
},
},
},
},
entity: /\&#?[\da-z]{1,8};/gi,
};
// Plugin to make entity title show the real entity, idea by Roman Komarov
Prism.hooks.add('wrap', function (env) {
if (env.type === 'entity') {
env.attributes['title'] = env.content.replace(/&/, '&');
}
});
/* **********************************************
Begin prism-css.js
********************************************** */
Prism.languages.css = {
comment: /\/\*[\w\W]*?\*\//g,
atrule: {
pattern: /@[\w-]+?.*?(;|(?=\s*{))/gi,
inside: {
punctuation: /[;:]/g,
},
},
url: /url\((["']?).*?\1\)/gi,
selector: /[^\{\}\s][^\{\};]*(?=\s*\{)/g,
property: /(\b|\B)[\w-]+(?=\s*:)/gi,
string: /("|')(\\?.)*?\1/g,
important: /\B!important\b/gi,
punctuation: /[\{\};:]/g,
function: /[-a-z0-9]+(?=\()/gi,
};
if (Prism.languages.markup) {
Prism.languages.insertBefore('markup', 'tag', {
style: {
pattern: /<style[\w\W]*?>[\w\W]*?<\/style>/gi,
inside: {
tag: {
pattern: /<style[\w\W]*?>|<\/style>/gi,
inside: Prism.languages.markup.tag.inside,
},
rest: Prism.languages.css,
},
alias: 'language-css',
},
});
Prism.languages.insertBefore(
'inside',
'attr-value',
{
'style-attr': {
pattern: /\s*style=("|').+?\1/gi,
inside: {
'attr-name': {
pattern: /^\s*style/gi,
inside: Prism.languages.markup.tag.inside,
},
punctuation: /^\s*=\s*['"]|['"]\s*$/,
'attr-value': {
pattern: /.+/gi,
inside: Prism.languages.css,
},
},
alias: 'language-css',
},
},
Prism.languages.markup.tag
);
}
/* **********************************************
Begin prism-clike.js
********************************************** */
Prism.languages.clike = {
comment: [
{
pattern: /(^|[^\\])\/\*[\w\W]*?\*\//g,
lookbehind: true,
},
{
pattern: /(^|[^\\:])\/\/.*?(\r?\n|$)/g,
lookbehind: true,
},
],
string: /("|')(\\?.)*?\1/g,
'class-name': {
pattern:
/((?:(?:class|interface|extends|implements|trait|instanceof|new)\s+)|(?:catch\s+\())[a-z0-9_\.\\]+/gi,
lookbehind: true,
inside: {
punctuation: /(\.|\\)/,
},
},
keyword:
/\b(if|else|while|do|for|return|in|instanceof|function|new|try|throw|catch|finally|null|break|continue)\b/g,
boolean: /\b(true|false)\b/g,
function: {
pattern: /[a-z0-9_]+\(/gi,
inside: {
punctuation: /\(/,
},
},
number: /\b-?(0x[\dA-Fa-f]+|\d*\.?\d+([Ee]-?\d+)?)\b/g,
operator: /[-+]{1,2}|!|<=?|>=?|={1,3}|&{1,2}|\|?\||\?|\*|\/|\~|\^|\%/g,
ignore: /&(lt|gt|amp);/gi,
punctuation: /[{}[\];(),.:]/g,
};
/* **********************************************
Begin prism-javascript.js
********************************************** */
Prism.languages.javascript = Prism.languages.extend('clike', {
keyword:
/\b(break|case|catch|class|const|continue|debugger|default|delete|do|else|enum|export|extends|false|finally|for|function|get|if|implements|import|in|instanceof|interface|let|new|null|package|private|protected|public|return|set|static|super|switch|this|throw|true|try|typeof|var|void|while|with|yield)\b/g,
number: /\b-?(0x[\dA-Fa-f]+|\d*\.?\d+([Ee]-?\d+)?|NaN|-?Infinity)\b/g,
});
Prism.languages.insertBefore('javascript', 'keyword', {
regex: {
pattern:
/(^|[^/])\/(?!\/)(\[.+?]|\\.|[^/\r\n])+\/[gim]{0,3}(?=\s*($|[\r\n,.;})]))/g,
lookbehind: true,
},
});
if (Prism.languages.markup) {
Prism.languages.insertBefore('markup', 'tag', {
script: {
pattern: /<script[\w\W]*?>[\w\W]*?<\/script>/gi,
inside: {
tag: {
pattern: /<script[\w\W]*?>|<\/script>/gi,
inside: Prism.languages.markup.tag.inside,
},
rest: Prism.languages.javascript,
},
alias: 'language-javascript',
},
});
}
/* **********************************************
Begin prism-file-highlight.js
********************************************** */
(function () {
if (!self.Prism || !self.document || !document.querySelector) {
return;
}
var Extensions = {
js: 'javascript',
html: 'markup',
svg: 'markup',
xml: 'markup',
py: 'python',
rb: 'ruby',
};
Array.prototype.slice
.call(document.querySelectorAll('pre[data-src]'))
.forEach(function (pre) {
var src = pre.getAttribute('data-src');
var extension = (src.match(/\.(\w+)$/) || [, ''])[1];
var language = Extensions[extension] || extension;
var code = document.createElement('code');
code.className = 'language-' + language;
pre.textContent = '';
code.textContent = 'Loading…';
pre.appendChild(code);
var xhr = new XMLHttpRequest();
xhr.open('GET', src, true);
xhr.onreadystatechange = function () {
if (xhr.readyState == 4) {
if (xhr.status < 400 && xhr.responseText) {
code.textContent = xhr.responseText;
Prism.highlightElement(code);
} else if (xhr.status >= 400) {
code.textContent =
'✖ Error ' +
xhr.status +
' while fetching file: ' +
xhr.statusText;
} else {
code.textContent = '✖ Error: File does not exist or is empty';
}
}
};
xhr.send(null);
});
})(); | the_stack |
import BScroll, { Boundary } from '@better-scroll/core'
import Wheel from '../index'
jest.mock('@better-scroll/core')
const createWheel = (wheelOptions: Object) => {
const wrapper = document.createElement('div')
const content = document.createElement('div')
wrapper.appendChild(content)
const scroll = new BScroll(wrapper, { wheel: wheelOptions })
const wheel = new Wheel(scroll)
return { scroll, wheel }
}
const addPropertiesToWheel = <T extends Object>(wheel: Wheel, obj: T) => {
for (const key in obj) {
;(wheel as any)[key] = obj[key]
}
return wheel
}
describe('wheel plugin tests', () => {
let scroll: BScroll
let wheel: Wheel
beforeEach(() => {
const created = createWheel({})
// create DOM
wheel = created.wheel
scroll = created.scroll
})
afterEach(() => {
jest.clearAllMocks()
})
it('should proxy properties to BScroll instance', () => {
expect(scroll.proxy).toBeCalled()
expect(scroll.proxy).toHaveBeenLastCalledWith([
{
key: 'wheelTo',
sourceKey: 'plugins.wheel.wheelTo'
},
{
key: 'getSelectedIndex',
sourceKey: 'plugins.wheel.getSelectedIndex'
},
{
key: 'restorePosition',
sourceKey: 'plugins.wheel.restorePosition'
}
])
})
it('should handle options', () => {
expect(wheel.options.rotate).toBe(25)
expect(wheel.options.adjustTime).toBe(400)
expect(wheel.options.selectedIndex).toBe(0)
expect(wheel.options.wheelWrapperClass).toBe('wheel-scroll')
expect(wheel.options.wheelItemClass).toBe('wheel-item')
expect(wheel.options.wheelDisabledItemClass).toBe('wheel-disabled-item')
})
it('should refresh BehaviorX and BehaviorY boundary', () => {
const { scrollBehaviorX, scrollBehaviorY } = scroll.scroller
expect(scrollBehaviorX.refresh).toBeCalled()
expect(scrollBehaviorY.refresh).toBeCalled()
})
it('should handle selectedIndex', () => {
// default
expect(wheel.selectedIndex).toBe(0)
// specified
const { wheel: wheel2 } = createWheel({
selectedIndex: 2
})
expect(wheel2.selectedIndex).toBe(2)
})
it('should trigger scroll.scrollTo when invoking wheelTo method', () => {
addPropertiesToWheel(wheel, {
itemHeight: 40
})
wheel.wheelTo(0)
expect(scroll.scrollTo).toBeCalled()
expect(scroll.scrollTo).toHaveBeenLastCalledWith(0, -0, 0, undefined)
})
it('should return seletedIndex when invoking getSelectedIndex', () => {
const { wheel: wheel2 } = createWheel({
selectedIndex: 2
})
expect(wheel2.getSelectedIndex()).toBe(2)
})
it('should support scrollTo somewhere by selectedIndex when initialized', () => {
addPropertiesToWheel(wheel, {
selectedIndex: 1,
itemHeight: 50
})
const postion = {
x: 100,
y: 100
}
// manually trigger
scroll.hooks.trigger(scroll.hooks.eventTypes.beforeInitialScrollTo, postion)
expect(postion).toMatchObject({
x: 0,
y: -50
})
})
it('should invoke wheelTo when scroll.scroller trigger checkClick hook', () => {
let div = document.createElement('div')
addPropertiesToWheel(wheel, {
items: [div],
target: div,
wheelTo: jest.fn()
})
scroll.scroller.hooks.trigger('checkClick')
expect(wheel.wheelTo).toBeCalled()
expect(wheel.wheelTo).toHaveBeenCalledWith(0, 400, expect.anything())
// if target element is not found
addPropertiesToWheel(wheel, {
items: [div],
target: null,
wheelTo: jest.fn()
})
let ret = scroll.scroller.hooks.trigger('checkClick')
expect(ret).toBe(true)
})
it('should invoke findNearestValidWheel when scroll.scroller trigger scrollTo hook', () => {
let endPoint = { x: 0, y: -20 }
let div = document.createElement('div')
addPropertiesToWheel(wheel, {
items: [div],
target: div,
itemHeight: 40,
wheelTo: jest.fn()
})
scroll.scroller.hooks.trigger('scrollTo', endPoint)
expect(endPoint.y).toBe(-0)
})
it('should change position when scroll.scroller trigger scrollToElement hook', () => {
let div = document.createElement('div')
addPropertiesToWheel(wheel, {
items: [div],
target: div,
itemHeight: 40
})
let pos = {
top: -20,
left: 0
}
div.className = 'wheel-item'
scroll.scroller.hooks.trigger('scrollToElement', div, pos)
expect(pos).toEqual({
top: -0,
left: 0
})
// mismatch target element
let div1 = document.createElement('div')
let pos1 = {
top: -40,
left: 0
}
addPropertiesToWheel(wheel, {
items: [div1],
target: div1,
itemHeight: 40
})
let ret = scroll.scroller.hooks.trigger('scrollToElement', div1, pos1)
expect(ret).toBe(true)
expect(pos1).toMatchObject({
top: -40,
left: 0
})
})
it('should change target when scroll.scroller.actionsHandler trigger beforeStart hook', () => {
let e = {} as any
let div = document.createElement('div')
e.target = div
scroll.scroller.actionsHandler.hooks.trigger('beforeStart', e)
expect(wheel.target).toEqual(div)
})
it('should modify boundary when scrollBehaviorY or scrollBehaviorX computedBoundary', () => {
let div = document.createElement('div')
let cachedXBoundary = {} as Boundary
let cachedYBoundary = {} as Boundary
addPropertiesToWheel(wheel, {
items: [div, div],
itemHight: 50
})
const { scrollBehaviorX, scrollBehaviorY } = scroll.scroller
// append two element
scroll.scroller.content.appendChild(document.createElement('div'))
scroll.scroller.content.appendChild(document.createElement('div'))
scrollBehaviorY.contentSize = 100
// manually trigger
scrollBehaviorX.hooks.trigger(
scrollBehaviorX.hooks.eventTypes.computeBoundary,
cachedXBoundary
)
scrollBehaviorY.hooks.trigger(
scrollBehaviorY.hooks.eventTypes.computeBoundary,
cachedYBoundary
)
expect(cachedXBoundary).toMatchObject({
minScrollPos: 0,
maxScrollPos: 0
})
expect(cachedYBoundary).toMatchObject({
minScrollPos: 0,
maxScrollPos: -50
})
})
it('should change momentumInfo when scroll.scroller.scrollBehaviorY trigger momentum or end hook', () => {
let momentumInfo = {
destination: 0,
rate: 15
}
let div = document.createElement('div')
addPropertiesToWheel(wheel, {
items: [div],
target: div,
itemHeight: 40
})
scroll.scroller.scrollBehaviorY.hooks.trigger('momentum', momentumInfo)
expect(momentumInfo).toEqual({
destination: -0,
rate: 4
})
scroll.scroller.scrollBehaviorY.currentPos = -20
scroll.scroller.scrollBehaviorY.hooks.trigger('end', momentumInfo)
expect(momentumInfo).toEqual({
destination: -0,
rate: 4,
duration: 400
})
scroll.scroller.scrollBehaviorY.hooks.trigger('momentum', momentumInfo, 800)
expect(momentumInfo).toEqual({
destination: -0,
rate: 4,
duration: 400
})
})
it('scroll.hooks.refresh ', () => {
let newContent = document.createElement('p')
let div = document.createElement('div')
addPropertiesToWheel(wheel, {
items: [div],
target: div,
itemHeight: 40
})
wheel.options.selectedIndex = 1
scroll.hooks.trigger(scroll.hooks.eventTypes.refresh, newContent)
expect(scroll.scrollTo).toBeCalledWith(0, -40, 0, undefined)
})
it('scroll.scroller.animater.hooks.time ', () => {
let div = document.createElement('div')
addPropertiesToWheel(wheel, {
items: [div]
})
const animater = scroll.scroller.animater
animater.hooks.trigger(animater.hooks.eventTypes.time, 100)
expect(div.style.transitionDuration).toBe('100ms')
})
it('scroll.scroller.animater.hooks.timeFunction ', () => {
let div = document.createElement('div')
addPropertiesToWheel(wheel, {
items: [div]
})
const animater = scroll.scroller.animater
animater.hooks.trigger(
animater.hooks.eventTypes.timeFunction,
'cubic-bezier(0.23, 1, 0.32, 1)'
)
expect(div.style.transitionTimingFunction).toBe(
'cubic-bezier(0.23, 1, 0.32, 1)'
)
})
it('scroll.scroller.animater.hooks.callStop', () => {
let div1 = document.createElement('div')
let div2 = document.createElement('div')
addPropertiesToWheel(wheel, {
items: [div1, div2],
itemHeight: 40,
wheelItemsAllDisabled: false
})
scroll.y = -41
scroll.maxScrollY = -80
scroll.scroller.animater.hooks.trigger('callStop')
expect(scroll.scrollTo).toBeCalledWith(0, -40, 0, undefined)
})
it('scroll.scroller.animater.translater.hooks.translate', () => {
let div = document.createElement('div')
addPropertiesToWheel(wheel, {
items: [div],
itemHeight: 40,
wheelItemsAllDisabled: false
})
const translater = scroll.scroller.animater.translater
translater.hooks.trigger(translater.hooks.eventTypes.translate, {
x: 0,
y: -20
})
expect(wheel.selectedIndex).toEqual(0)
})
it('scroll.scroller.hooks.minDistanceScroll ', () => {
let div = document.createElement('div')
addPropertiesToWheel(wheel, {
items: [div]
})
const scroller = scroll.scroller
scroller.animater.forceStopped = true
scroller.hooks.trigger(scroller.hooks.eventTypes.minDistanceScroll)
expect(scroller.animater.forceStopped).toBe(false)
})
it('scrollEnd event', () => {
let div1 = document.createElement('div')
let div2 = document.createElement('div')
addPropertiesToWheel(wheel, {
itemHeight: 40,
items: [div1, div2]
})
scroll.maxScrollY = -80
scroll.scroller.animater.forceStopped = true
// stopped from an animation,
// prevent user's scrollEnd callback triggered twice
const ret = scroll.trigger(scroll.eventTypes.scrollEnd, { y: 0 })
expect(ret).toBe(true)
wheel.isAdjustingPosition = true
// update selectedIndex
scroll.trigger(scroll.eventTypes.scrollEnd, { y: -41 })
expect(wheel.getSelectedIndex()).toBe(1)
expect(wheel.isAdjustingPosition).toBe(false)
})
it('wheel.restorePosition()', () => {
addPropertiesToWheel(wheel, {
itemHeight: 40
})
// simulate bs is scrolling
scroll.pending = true
wheel.restorePosition()
expect(scroll.scroller.animater.clearTimer).toBeCalled()
expect(scroll.scrollTo).toBeCalledWith(0, -0, 0, undefined)
})
it('should support disable wheel items', () => {
let div1 = document.createElement('div')
let div2 = document.createElement('div')
const scroller = scroll.scroller
const position = { y: -41 }
addPropertiesToWheel(wheel, {
items: [div1, div2],
itemHeight: 40,
wheelItemsAllDisabled: false
})
scroll.y = -41
scroll.maxScrollY = -80
div2.className = 'wheel-disabled-item'
scroller.hooks.trigger(scroller.hooks.eventTypes.scrollTo, position)
expect(position.y).toBe(-0)
div1.className = 'wheel-disabled-item'
wheel.wheelItemsAllDisabled = true
scroller.hooks.trigger(scroller.hooks.eventTypes.scrollTo, position)
expect(position.y).toBe(-0)
let div3 = document.createElement('div')
let position3 = {
y: -39
}
addPropertiesToWheel(wheel, {
items: [div1, div2, div3],
itemHeight: 40,
wheelItemsAllDisabled: false
})
scroller.hooks.trigger(scroller.hooks.eventTypes.scrollTo, position3)
expect(position3.y).toBe(-80)
})
}) | the_stack |
import {Request} from '../lib/request';
import {Response} from '../lib/response';
import {AWSError} from '../lib/error';
import {Service} from '../lib/service';
import {ServiceConfigurationOptions} from '../lib/service';
import {ConfigBase as Config} from '../lib/config-base';
interface Blob {}
declare class BackupGateway extends Service {
/**
* Constructs a service object. This object has one method for each API operation.
*/
constructor(options?: BackupGateway.Types.ClientConfiguration)
config: Config & BackupGateway.Types.ClientConfiguration;
/**
* Associates a backup gateway with your server. After you complete the association process, you can back up and restore your VMs through the gateway.
*/
associateGatewayToServer(params: BackupGateway.Types.AssociateGatewayToServerInput, callback?: (err: AWSError, data: BackupGateway.Types.AssociateGatewayToServerOutput) => void): Request<BackupGateway.Types.AssociateGatewayToServerOutput, AWSError>;
/**
* Associates a backup gateway with your server. After you complete the association process, you can back up and restore your VMs through the gateway.
*/
associateGatewayToServer(callback?: (err: AWSError, data: BackupGateway.Types.AssociateGatewayToServerOutput) => void): Request<BackupGateway.Types.AssociateGatewayToServerOutput, AWSError>;
/**
* Creates a backup gateway. After you create a gateway, you can associate it with a server using the AssociateGatewayToServer operation.
*/
createGateway(params: BackupGateway.Types.CreateGatewayInput, callback?: (err: AWSError, data: BackupGateway.Types.CreateGatewayOutput) => void): Request<BackupGateway.Types.CreateGatewayOutput, AWSError>;
/**
* Creates a backup gateway. After you create a gateway, you can associate it with a server using the AssociateGatewayToServer operation.
*/
createGateway(callback?: (err: AWSError, data: BackupGateway.Types.CreateGatewayOutput) => void): Request<BackupGateway.Types.CreateGatewayOutput, AWSError>;
/**
* Deletes a backup gateway.
*/
deleteGateway(params: BackupGateway.Types.DeleteGatewayInput, callback?: (err: AWSError, data: BackupGateway.Types.DeleteGatewayOutput) => void): Request<BackupGateway.Types.DeleteGatewayOutput, AWSError>;
/**
* Deletes a backup gateway.
*/
deleteGateway(callback?: (err: AWSError, data: BackupGateway.Types.DeleteGatewayOutput) => void): Request<BackupGateway.Types.DeleteGatewayOutput, AWSError>;
/**
* Deletes a hypervisor.
*/
deleteHypervisor(params: BackupGateway.Types.DeleteHypervisorInput, callback?: (err: AWSError, data: BackupGateway.Types.DeleteHypervisorOutput) => void): Request<BackupGateway.Types.DeleteHypervisorOutput, AWSError>;
/**
* Deletes a hypervisor.
*/
deleteHypervisor(callback?: (err: AWSError, data: BackupGateway.Types.DeleteHypervisorOutput) => void): Request<BackupGateway.Types.DeleteHypervisorOutput, AWSError>;
/**
* Disassociates a backup gateway from the specified server. After the disassociation process finishes, the gateway can no longer access the virtual machines on the server.
*/
disassociateGatewayFromServer(params: BackupGateway.Types.DisassociateGatewayFromServerInput, callback?: (err: AWSError, data: BackupGateway.Types.DisassociateGatewayFromServerOutput) => void): Request<BackupGateway.Types.DisassociateGatewayFromServerOutput, AWSError>;
/**
* Disassociates a backup gateway from the specified server. After the disassociation process finishes, the gateway can no longer access the virtual machines on the server.
*/
disassociateGatewayFromServer(callback?: (err: AWSError, data: BackupGateway.Types.DisassociateGatewayFromServerOutput) => void): Request<BackupGateway.Types.DisassociateGatewayFromServerOutput, AWSError>;
/**
* Connect to a hypervisor by importing its configuration.
*/
importHypervisorConfiguration(params: BackupGateway.Types.ImportHypervisorConfigurationInput, callback?: (err: AWSError, data: BackupGateway.Types.ImportHypervisorConfigurationOutput) => void): Request<BackupGateway.Types.ImportHypervisorConfigurationOutput, AWSError>;
/**
* Connect to a hypervisor by importing its configuration.
*/
importHypervisorConfiguration(callback?: (err: AWSError, data: BackupGateway.Types.ImportHypervisorConfigurationOutput) => void): Request<BackupGateway.Types.ImportHypervisorConfigurationOutput, AWSError>;
/**
* Lists backup gateways owned by an Amazon Web Services account in an Amazon Web Services Region. The returned list is ordered by gateway Amazon Resource Name (ARN).
*/
listGateways(params: BackupGateway.Types.ListGatewaysInput, callback?: (err: AWSError, data: BackupGateway.Types.ListGatewaysOutput) => void): Request<BackupGateway.Types.ListGatewaysOutput, AWSError>;
/**
* Lists backup gateways owned by an Amazon Web Services account in an Amazon Web Services Region. The returned list is ordered by gateway Amazon Resource Name (ARN).
*/
listGateways(callback?: (err: AWSError, data: BackupGateway.Types.ListGatewaysOutput) => void): Request<BackupGateway.Types.ListGatewaysOutput, AWSError>;
/**
* Lists your hypervisors.
*/
listHypervisors(params: BackupGateway.Types.ListHypervisorsInput, callback?: (err: AWSError, data: BackupGateway.Types.ListHypervisorsOutput) => void): Request<BackupGateway.Types.ListHypervisorsOutput, AWSError>;
/**
* Lists your hypervisors.
*/
listHypervisors(callback?: (err: AWSError, data: BackupGateway.Types.ListHypervisorsOutput) => void): Request<BackupGateway.Types.ListHypervisorsOutput, AWSError>;
/**
* Lists the tags applied to the resource identified by its Amazon Resource Name (ARN).
*/
listTagsForResource(params: BackupGateway.Types.ListTagsForResourceInput, callback?: (err: AWSError, data: BackupGateway.Types.ListTagsForResourceOutput) => void): Request<BackupGateway.Types.ListTagsForResourceOutput, AWSError>;
/**
* Lists the tags applied to the resource identified by its Amazon Resource Name (ARN).
*/
listTagsForResource(callback?: (err: AWSError, data: BackupGateway.Types.ListTagsForResourceOutput) => void): Request<BackupGateway.Types.ListTagsForResourceOutput, AWSError>;
/**
* Lists your virtual machines.
*/
listVirtualMachines(params: BackupGateway.Types.ListVirtualMachinesInput, callback?: (err: AWSError, data: BackupGateway.Types.ListVirtualMachinesOutput) => void): Request<BackupGateway.Types.ListVirtualMachinesOutput, AWSError>;
/**
* Lists your virtual machines.
*/
listVirtualMachines(callback?: (err: AWSError, data: BackupGateway.Types.ListVirtualMachinesOutput) => void): Request<BackupGateway.Types.ListVirtualMachinesOutput, AWSError>;
/**
* Set the maintenance start time for a gateway.
*/
putMaintenanceStartTime(params: BackupGateway.Types.PutMaintenanceStartTimeInput, callback?: (err: AWSError, data: BackupGateway.Types.PutMaintenanceStartTimeOutput) => void): Request<BackupGateway.Types.PutMaintenanceStartTimeOutput, AWSError>;
/**
* Set the maintenance start time for a gateway.
*/
putMaintenanceStartTime(callback?: (err: AWSError, data: BackupGateway.Types.PutMaintenanceStartTimeOutput) => void): Request<BackupGateway.Types.PutMaintenanceStartTimeOutput, AWSError>;
/**
* Tag the resource.
*/
tagResource(params: BackupGateway.Types.TagResourceInput, callback?: (err: AWSError, data: BackupGateway.Types.TagResourceOutput) => void): Request<BackupGateway.Types.TagResourceOutput, AWSError>;
/**
* Tag the resource.
*/
tagResource(callback?: (err: AWSError, data: BackupGateway.Types.TagResourceOutput) => void): Request<BackupGateway.Types.TagResourceOutput, AWSError>;
/**
* Tests your hypervisor configuration to validate that backup gateway can connect with the hypervisor and its resources.
*/
testHypervisorConfiguration(params: BackupGateway.Types.TestHypervisorConfigurationInput, callback?: (err: AWSError, data: BackupGateway.Types.TestHypervisorConfigurationOutput) => void): Request<BackupGateway.Types.TestHypervisorConfigurationOutput, AWSError>;
/**
* Tests your hypervisor configuration to validate that backup gateway can connect with the hypervisor and its resources.
*/
testHypervisorConfiguration(callback?: (err: AWSError, data: BackupGateway.Types.TestHypervisorConfigurationOutput) => void): Request<BackupGateway.Types.TestHypervisorConfigurationOutput, AWSError>;
/**
* Removes tags from the resource.
*/
untagResource(params: BackupGateway.Types.UntagResourceInput, callback?: (err: AWSError, data: BackupGateway.Types.UntagResourceOutput) => void): Request<BackupGateway.Types.UntagResourceOutput, AWSError>;
/**
* Removes tags from the resource.
*/
untagResource(callback?: (err: AWSError, data: BackupGateway.Types.UntagResourceOutput) => void): Request<BackupGateway.Types.UntagResourceOutput, AWSError>;
/**
* Updates a gateway's name. Specify which gateway to update using the Amazon Resource Name (ARN) of the gateway in your request.
*/
updateGatewayInformation(params: BackupGateway.Types.UpdateGatewayInformationInput, callback?: (err: AWSError, data: BackupGateway.Types.UpdateGatewayInformationOutput) => void): Request<BackupGateway.Types.UpdateGatewayInformationOutput, AWSError>;
/**
* Updates a gateway's name. Specify which gateway to update using the Amazon Resource Name (ARN) of the gateway in your request.
*/
updateGatewayInformation(callback?: (err: AWSError, data: BackupGateway.Types.UpdateGatewayInformationOutput) => void): Request<BackupGateway.Types.UpdateGatewayInformationOutput, AWSError>;
/**
* Updates a hypervisor metadata, including its host, username, and password. Specify which hypervisor to update using the Amazon Resource Name (ARN) of the hypervisor in your request.
*/
updateHypervisor(params: BackupGateway.Types.UpdateHypervisorInput, callback?: (err: AWSError, data: BackupGateway.Types.UpdateHypervisorOutput) => void): Request<BackupGateway.Types.UpdateHypervisorOutput, AWSError>;
/**
* Updates a hypervisor metadata, including its host, username, and password. Specify which hypervisor to update using the Amazon Resource Name (ARN) of the hypervisor in your request.
*/
updateHypervisor(callback?: (err: AWSError, data: BackupGateway.Types.UpdateHypervisorOutput) => void): Request<BackupGateway.Types.UpdateHypervisorOutput, AWSError>;
}
declare namespace BackupGateway {
export type ActivationKey = string;
export interface AssociateGatewayToServerInput {
/**
* The Amazon Resource Name (ARN) of the gateway. Use the ListGateways operation to return a list of gateways for your account and Amazon Web Services Region.
*/
GatewayArn: GatewayArn;
/**
* The Amazon Resource Name (ARN) of the server that hosts your virtual machines.
*/
ServerArn: ServerArn;
}
export interface AssociateGatewayToServerOutput {
/**
* The Amazon Resource Name (ARN) of a gateway.
*/
GatewayArn?: GatewayArn;
}
export interface CreateGatewayInput {
/**
* The activation key of the created gateway.
*/
ActivationKey: ActivationKey;
/**
* The display name of the created gateway.
*/
GatewayDisplayName: Name;
/**
* The type of created gateway.
*/
GatewayType: GatewayType;
/**
* A list of up to 50 tags to assign to the gateway. Each tag is a key-value pair.
*/
Tags?: Tags;
}
export interface CreateGatewayOutput {
/**
* The Amazon Resource Name (ARN) of the gateway you create.
*/
GatewayArn?: GatewayArn;
}
export type DayOfMonth = number;
export type DayOfWeek = number;
export interface DeleteGatewayInput {
/**
* The Amazon Resource Name (ARN) of the gateway to delete.
*/
GatewayArn: GatewayArn;
}
export interface DeleteGatewayOutput {
/**
* The Amazon Resource Name (ARN) of the gateway you deleted.
*/
GatewayArn?: GatewayArn;
}
export interface DeleteHypervisorInput {
/**
* The Amazon Resource Name (ARN) of the hypervisor to delete.
*/
HypervisorArn: ServerArn;
}
export interface DeleteHypervisorOutput {
/**
* The Amazon Resource Name (ARN) of the hypervisor you deleted.
*/
HypervisorArn?: ServerArn;
}
export interface DisassociateGatewayFromServerInput {
/**
* The Amazon Resource Name (ARN) of the gateway to disassociate.
*/
GatewayArn: GatewayArn;
}
export interface DisassociateGatewayFromServerOutput {
/**
* The Amazon Resource Name (ARN) of the gateway you disassociated.
*/
GatewayArn?: GatewayArn;
}
export interface Gateway {
/**
* The Amazon Resource Name (ARN) of the gateway. Use the ListGateways operation to return a list of gateways for your account and Amazon Web Services Region.
*/
GatewayArn?: GatewayArn;
/**
* The display name of the gateway.
*/
GatewayDisplayName?: Name;
/**
* The type of the gateway.
*/
GatewayType?: GatewayType;
/**
* The hypervisor ID of the gateway.
*/
HypervisorId?: HypervisorId;
/**
* The last time Backup gateway communicated with the gateway, in Unix format and UTC time.
*/
LastSeenTime?: Time;
}
export type GatewayArn = string;
export type GatewayType = "BACKUP_VM"|string;
export type Gateways = Gateway[];
export type Host = string;
export type HourOfDay = number;
export interface Hypervisor {
/**
* The server host of the hypervisor. This can be either an IP address or a fully-qualified domain name (FQDN).
*/
Host?: Host;
/**
* The Amazon Resource Name (ARN) of the hypervisor.
*/
HypervisorArn?: ServerArn;
/**
* The Amazon Resource Name (ARN) of the Key Management Service used to encrypt the hypervisor.
*/
KmsKeyArn?: KmsKeyArn;
/**
* The name of the hypervisor.
*/
Name?: Name;
/**
* The state of the hypervisor.
*/
State?: HypervisorState;
}
export type HypervisorId = string;
export type HypervisorState = "PENDING"|"ONLINE"|"OFFLINE"|"ERROR"|string;
export type Hypervisors = Hypervisor[];
export interface ImportHypervisorConfigurationInput {
/**
* The server host of the hypervisor. This can be either an IP address or a fully-qualified domain name (FQDN).
*/
Host: Host;
/**
* The Key Management Service for the hypervisor.
*/
KmsKeyArn?: KmsKeyArn;
/**
* The name of the hypervisor.
*/
Name: Name;
/**
* The password for the hypervisor.
*/
Password?: Password;
/**
* The tags of the hypervisor configuration to import.
*/
Tags?: Tags;
/**
* The username for the hypervisor.
*/
Username?: Username;
}
export interface ImportHypervisorConfigurationOutput {
/**
* The Amazon Resource Name (ARN) of the hypervisor you disassociated.
*/
HypervisorArn?: ServerArn;
}
export type KmsKeyArn = string;
export interface ListGatewaysInput {
/**
* The maximum number of gateways to list.
*/
MaxResults?: MaxResults;
/**
* The next item following a partial list of returned resources. For example, if a request is made to return MaxResults number of resources, NextToken allows you to return more items in your list starting at the location pointed to by the next token.
*/
NextToken?: NextToken;
}
export interface ListGatewaysOutput {
/**
* A list of your gateways.
*/
Gateways?: Gateways;
/**
* The next item following a partial list of returned resources. For example, if a request is made to return maxResults number of resources, NextToken allows you to return more items in your list starting at the location pointed to by the next token.
*/
NextToken?: NextToken;
}
export interface ListHypervisorsInput {
/**
* The maximum number of hypervisors to list.
*/
MaxResults?: MaxResults;
/**
* The next item following a partial list of returned resources. For example, if a request is made to return maxResults number of resources, NextToken allows you to return more items in your list starting at the location pointed to by the next token.
*/
NextToken?: NextToken;
}
export interface ListHypervisorsOutput {
/**
* A list of your Hypervisor objects, ordered by their Amazon Resource Names (ARNs).
*/
Hypervisors?: Hypervisors;
/**
* The next item following a partial list of returned resources. For example, if a request is made to return maxResults number of resources, NextToken allows you to return more items in your list starting at the location pointed to by the next token.
*/
NextToken?: NextToken;
}
export interface ListTagsForResourceInput {
/**
* The Amazon Resource Name (ARN) of the resource's tags to list.
*/
ResourceArn: ResourceArn;
}
export interface ListTagsForResourceOutput {
/**
* The Amazon Resource Name (ARN) of the resource's tags that you listed.
*/
ResourceArn?: ResourceArn;
/**
* A list of the resource's tags.
*/
Tags?: Tags;
}
export interface ListVirtualMachinesInput {
/**
* The maximum number of virtual machines to list.
*/
MaxResults?: MaxResults;
/**
* The next item following a partial list of returned resources. For example, if a request is made to return maxResults number of resources, NextToken allows you to return more items in your list starting at the location pointed to by the next token.
*/
NextToken?: NextToken;
}
export interface ListVirtualMachinesOutput {
/**
* The next item following a partial list of returned resources. For example, if a request is made to return maxResults number of resources, NextToken allows you to return more items in your list starting at the location pointed to by the next token.
*/
NextToken?: NextToken;
/**
* A list of your VirtualMachine objects, ordered by their Amazon Resource Names (ARNs).
*/
VirtualMachines?: VirtualMachines;
}
export type MaxResults = number;
export type MinuteOfHour = number;
export type Name = string;
export type NextToken = string;
export type Password = string;
export type Path = string;
export interface PutMaintenanceStartTimeInput {
/**
* The day of the month start maintenance on a gateway. Valid values range from Sunday to Saturday.
*/
DayOfMonth?: DayOfMonth;
/**
* The day of the week to start maintenance on a gateway.
*/
DayOfWeek?: DayOfWeek;
/**
* The Amazon Resource Name (ARN) for the gateway, used to specify its maintenance start time.
*/
GatewayArn: GatewayArn;
/**
* The hour of the day to start maintenance on a gateway.
*/
HourOfDay: HourOfDay;
/**
* The minute of the hour to start maintenance on a gateway.
*/
MinuteOfHour: MinuteOfHour;
}
export interface PutMaintenanceStartTimeOutput {
/**
* The Amazon Resource Name (ARN) of a gateway for which you set the maintenance start time.
*/
GatewayArn?: GatewayArn;
}
export type ResourceArn = string;
export type ServerArn = string;
export interface Tag {
/**
* The key part of a tag's key-value pair. The key can't start with aws:.
*/
Key: TagKey;
/**
* The key part of a value's key-value pair.
*/
Value: TagValue;
}
export type TagKey = string;
export type TagKeys = TagKey[];
export interface TagResourceInput {
/**
* The Amazon Resource Name (ARN) of the resource to tag.
*/
ResourceARN: ResourceArn;
/**
* A list of tags to assign to the resource.
*/
Tags: Tags;
}
export interface TagResourceOutput {
/**
* The Amazon Resource Name (ARN) of the resource you tagged.
*/
ResourceARN?: ResourceArn;
}
export type TagValue = string;
export type Tags = Tag[];
export interface TestHypervisorConfigurationInput {
/**
* The Amazon Resource Name (ARN) of the gateway to the hypervisor to test.
*/
GatewayArn: GatewayArn;
/**
* The server host of the hypervisor. This can be either an IP address or a fully-qualified domain name (FQDN).
*/
Host: Host;
/**
* The password for the hypervisor.
*/
Password?: Password;
/**
* The username for the hypervisor.
*/
Username?: Username;
}
export interface TestHypervisorConfigurationOutput {
}
export type Time = Date;
export interface UntagResourceInput {
/**
* The Amazon Resource Name (ARN) of the resource from which to remove tags.
*/
ResourceARN: ResourceArn;
/**
* The list of tag keys specifying which tags to remove.
*/
TagKeys: TagKeys;
}
export interface UntagResourceOutput {
/**
* The Amazon Resource Name (ARN) of the resource from which you removed tags.
*/
ResourceARN?: ResourceArn;
}
export interface UpdateGatewayInformationInput {
/**
* The Amazon Resource Name (ARN) of the gateway to update.
*/
GatewayArn: GatewayArn;
/**
* The updated display name of the gateway.
*/
GatewayDisplayName?: Name;
}
export interface UpdateGatewayInformationOutput {
/**
* The Amazon Resource Name (ARN) of the gateway you updated.
*/
GatewayArn?: GatewayArn;
}
export interface UpdateHypervisorInput {
/**
* The updated host of the hypervisor. This can be either an IP address or a fully-qualified domain name (FQDN).
*/
Host?: Host;
/**
* The Amazon Resource Name (ARN) of the hypervisor to update.
*/
HypervisorArn: ServerArn;
/**
* The updated password for the hypervisor.
*/
Password?: Password;
/**
* The updated username for the hypervisor.
*/
Username?: Username;
}
export interface UpdateHypervisorOutput {
/**
* The Amazon Resource Name (ARN) of the hypervisor you updated.
*/
HypervisorArn?: ServerArn;
}
export type Username = string;
export interface VirtualMachine {
/**
* The host name of the virtual machine.
*/
HostName?: Name;
/**
* The ID of the virtual machine's hypervisor.
*/
HypervisorId?: string;
/**
* The most recent date a virtual machine was backed up, in Unix format and UTC time.
*/
LastBackupDate?: Time;
/**
* The name of the virtual machine.
*/
Name?: Name;
/**
* The path of the virtual machine.
*/
Path?: Path;
/**
* The Amazon Resource Name (ARN) of the virtual machine.
*/
ResourceArn?: ResourceArn;
}
export type VirtualMachines = VirtualMachine[];
/**
* A string in YYYY-MM-DD format that represents the latest possible API version that can be used in this service. Specify 'latest' to use the latest possible version.
*/
export type apiVersion = "2021-01-01"|"latest"|string;
export interface ClientApiVersions {
/**
* A string in YYYY-MM-DD format that represents the latest possible API version that can be used in this service. Specify 'latest' to use the latest possible version.
*/
apiVersion?: apiVersion;
}
export type ClientConfiguration = ServiceConfigurationOptions & ClientApiVersions;
/**
* Contains interfaces for use with the BackupGateway client.
*/
export import Types = BackupGateway;
}
export = BackupGateway; | the_stack |
import _ from 'lodash';
import {
MediaItemBase,
mediaItemColumns,
MediaItemItemsResponse,
} from 'src/entity/mediaItem';
import { Database } from 'src/dbconfig';
import { Seen } from 'src/entity/seen';
import { UserRating, userRatingColumns } from 'src/entity/userRating';
import { GetItemsArgs } from 'src/repository/mediaItem';
import { TvEpisode, tvEpisodeColumns } from 'src/entity/tvepisode';
import { Image } from 'src/entity/image';
import { Knex } from 'knex';
import { List, listItemColumns } from 'src/entity/list';
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export const getItemsKnex = async (args: any): Promise<any> => {
const { page } = args;
const { sqlQuery, sqlCountQuery, sqlPaginationQuery } = await getItemsKnexSql(
args
);
if (page) {
const [resCount, res] = await Database.knex.transaction(async (trx) => {
const resCount = await sqlCountQuery.transacting(trx);
const res = await sqlPaginationQuery.transacting(trx);
return [resCount, res];
});
const itemsPerPage = 40;
const total = Number(resCount[0].count);
const from = itemsPerPage * (page - 1);
const to = Math.min(total, itemsPerPage * page);
const totalPages = Math.ceil(total / itemsPerPage);
if (from > total) {
throw new Error('Invalid page number');
}
const data = res.map(mapRawResult);
return {
from: from,
to: to,
data: data,
total: total,
page: page,
totalPages: totalPages,
};
} else {
const res = await sqlQuery;
return res.map(mapRawResult);
}
};
const getItemsKnexSql = async (args: GetItemsArgs) => {
const {
onlyOnWatchlist,
mediaType,
userId,
filter,
orderBy,
page,
onlySeenItems,
sortOrder,
onlyWithNextEpisodesToWatch,
onlyWithNextAiring,
mediaItemIds,
onlyWithUserRating,
onlyWithoutUserRating,
onlyWithProgress,
} = args;
const currentDateString = new Date().toISOString();
const watchlist = await Database.knex('list')
.select('id')
.where('userId', userId)
.where('isWatchlist', true)
.first();
if (watchlist === undefined) {
throw new Error(`user ${userId} has no watchlist`);
}
const watchlistId = watchlist.id;
const query = Database.knex
.select(generateColumnNames('firstUnwatchedEpisode', tvEpisodeColumns))
.select(generateColumnNames('listItem', listItemColumns))
.select(generateColumnNames('upcomingEpisode', tvEpisodeColumns))
.select(generateColumnNames('lastAiredEpisode', tvEpisodeColumns))
.select(generateColumnNames('userRating', userRatingColumns))
.select(generateColumnNames('mediaItem', mediaItemColumns))
.select({
lastSeenAt: 'lastSeen.date',
'lastSeen2.mediaItemId': 'lastSeen2.mediaItemId',
numberOfEpisodes: 'numberOfEpisodes',
unseenEpisodesCount: 'unseenEpisodesCount',
seenEpisodesCount: 'seenEpisodesCount',
poster: 'poster.id',
backdrop: 'backdrop.id',
progress: 'progress',
})
.from<MediaItemBase>('mediaItem')
.leftJoin<Seen>(
(qb) =>
qb
.select('mediaItemId')
.max('date', { as: 'date' })
.from<Seen>('seen')
.where('userId', userId)
.groupBy('mediaItemId')
.as('lastSeen'),
'lastSeen.mediaItemId',
'mediaItem.id'
)
.leftJoin<Seen>(
(qb) =>
qb
.select('mediaItemId')
.max('date', { as: 'date' })
.from<Seen>('seen')
.where('userId', userId)
.where('type', 'seen')
.groupBy('mediaItemId')
.as('lastSeen2'),
'lastSeen2.mediaItemId',
'mediaItem.id'
)
// Number of episodes
.leftJoin<TvEpisode>(
(qb) =>
qb
.select('tvShowId')
.count('*', { as: 'numberOfEpisodes' })
.from<TvEpisode>('episode')
.whereNot('isSpecialEpisode', true)
.andWhereNot('releaseDate', '')
.andWhereNot('releaseDate', null)
.andWhere('releaseDate', '<=', currentDateString)
.groupBy('tvShowId')
.as('numberOfEpisodes'),
'numberOfEpisodes.tvShowId',
'mediaItem.id'
)
// On watchlist
.leftJoin<List>('listItem', (qb) => {
qb.on('listItem.mediaItemId', 'mediaItem.id')
.andOnNull('listItem.seasonId')
.andOnNull('listItem.episodeId')
.andOnVal('listItem.listId', watchlistId);
})
// Upcoming episode
.leftJoin<TvEpisode>(
(qb) =>
qb
.from<TvEpisode>('episode')
.select('tvShowId')
.min('seasonAndEpisodeNumber', {
as: 'upcomingEpisodeSeasonAndEpisodeNumber',
})
.where('isSpecialEpisode', false)
.where('releaseDate', '>=', currentDateString)
.groupBy('tvShowId')
.as('upcomingEpisodeHelper'),
'upcomingEpisodeHelper.tvShowId',
'mediaItem.id'
)
.leftJoin<TvEpisode>(
Database.knex.ref('episode').as('upcomingEpisode'),
(qb) =>
qb
.on('upcomingEpisode.tvShowId', 'mediaItem.id')
.andOn(
'upcomingEpisode.seasonAndEpisodeNumber',
'upcomingEpisodeSeasonAndEpisodeNumber'
)
)
// Last aired episode
.leftJoin<TvEpisode>(
(qb) =>
qb
.from<TvEpisode>('episode')
.select('tvShowId')
.max('seasonAndEpisodeNumber', {
as: 'lastAiredEpisodeSeasonAndEpisodeNumber',
})
.where('isSpecialEpisode', false)
.where('releaseDate', '<', currentDateString)
.groupBy('tvShowId')
.as('lastAiredEpisodeHelper'),
'lastAiredEpisodeHelper.tvShowId',
'mediaItem.id'
)
.leftJoin<TvEpisode>(
Database.knex.ref('episode').as('lastAiredEpisode'),
(qb) =>
qb
.on('lastAiredEpisode.tvShowId', 'mediaItem.id')
.andOn(
'lastAiredEpisode.seasonAndEpisodeNumber',
'lastAiredEpisodeSeasonAndEpisodeNumber'
)
)
// Seen episodes count
.leftJoin<Seen>(
(qb) =>
qb
.select('mediaItemId')
.count('*', { as: 'seenEpisodesCount' })
.from((qb: Knex.QueryBuilder) =>
qb
.select('mediaItemId')
.from<Seen>('seen')
.where('type', 'seen')
.where('userId', userId)
.whereNotNull('episodeId')
.groupBy('mediaItemId', 'episodeId')
.leftJoin('episode', 'episode.id', 'seen.episodeId')
.whereNot('episode.isSpecialEpisode', true)
.as('seen')
)
.groupBy('mediaItemId')
.as('seenEpisodes'),
'seenEpisodes.mediaItemId',
'mediaItem.id'
)
// First unwatched episode and unseen episodes count
.leftJoin<TvEpisode>(
(qb) =>
qb
.from<TvEpisode>('episode')
.select('tvShowId')
.min('seasonAndEpisodeNumber', {
as: 'seasonAndEpisodeNumber',
})
.count('*', { as: 'unseenEpisodesCount' })
.leftJoin<Seen>('seen', (qb) =>
qb.on('seen.episodeId', 'episode.id').andOnVal('seen.type', 'seen')
)
.whereNot('episode.isSpecialEpisode', true)
.andWhereNot('episode.releaseDate', '')
.andWhereNot('episode.releaseDate', null)
.andWhere('episode.releaseDate', '<=', currentDateString)
.andWhere((qb) => {
qb.where('seen.userId', '<>', userId).orWhereNull('seen.userId');
})
.groupBy('tvShowId')
.as('firstUnwatchedEpisodeHelper'),
'firstUnwatchedEpisodeHelper.tvShowId',
'mediaItem.id'
)
.leftJoin<TvEpisode>(
Database.knex.ref('episode').as('firstUnwatchedEpisode'),
(qb) =>
qb
.on('firstUnwatchedEpisode.tvShowId', 'mediaItem.id')
.andOn(
'firstUnwatchedEpisode.seasonAndEpisodeNumber',
'firstUnwatchedEpisodeHelper.seasonAndEpisodeNumber'
)
)
// User rating
.leftJoin<UserRating>(
(qb) =>
qb
.from('userRating')
.whereNotNull('userRating.rating')
.orWhereNotNull('userRating.review')
.as('userRating'),
(qb) =>
qb
.on('userRating.mediaItemId', 'mediaItem.id')
.andOnVal('userRating.userId', userId)
.andOnNull('userRating.episodeId')
.andOnNull('userRating.seasonId')
)
// Poster
.leftJoin<Image>(
(qb) =>
qb
.from('image')
.where('type', 'poster')
.whereNull('seasonId')
.as('poster'),
'poster.mediaItemId',
'mediaItem.id'
)
// Backdrop
.leftJoin<Image>(
(qb) =>
qb
.from('image')
.where('type', 'backdrop')
.whereNull('seasonId')
.as('backdrop'),
'backdrop.mediaItemId',
'mediaItem.id'
)
// Progress
.leftJoin<Seen>(
(qb) =>
qb
.from<Seen>('seen')
.select('mediaItemId')
.max('date', { as: 'progressDate' })
.whereNull('episodeId')
.where('type', 'progress')
.where('userId', userId)
.groupBy('mediaItemId')
.as('progressHelper'),
'progressHelper.mediaItemId',
'mediaItem.id'
)
.leftJoin<Seen>(
(qb) =>
qb
.from<Seen>('seen')
.select('date')
.max('progress', { as: 'progress' })
.groupBy('date')
.where('type', 'progress')
.where('userId', userId)
.whereNot('progress', 1)
.as('progress'),
(qb) =>
qb
.on('progressHelper.mediaItemId', 'mediaItem.id')
.andOn('progress.date', 'progressDate')
);
if (Array.isArray(mediaItemIds)) {
query.whereIn('mediaItem.id', mediaItemIds);
} else {
query.where((qb) =>
qb
.whereNotNull('listItem.mediaItemId')
.orWhereNotNull('lastSeen.mediaItemId')
);
if (onlyOnWatchlist) {
query.whereNotNull('listItem.mediaItemId');
}
if (onlySeenItems === true) {
query.whereNotNull('lastSeen2.mediaItemId');
}
if (onlySeenItems === false) {
query
.andWhereNot((qb) =>
qb
.where('mediaItem.mediaType', 'tv')
.andWhere('firstUnwatchedEpisode.tvShowId', null)
)
.andWhere((qb) =>
qb
.where('mediaItem.mediaType', 'tv')
.orWhere('mediaItem.releaseDate', '<=', currentDateString)
);
}
// Media type
if (mediaType) {
query.andWhere('mediaItem.mediaType', mediaType);
}
// Filter
if (filter && filter.trim().length > 0) {
query.andWhere('mediaItem.title', 'LIKE', `%${filter}%`);
}
// Next airing
if (onlyWithNextAiring) {
if (mediaType) {
if (mediaType === 'tv') {
query.andWhere('upcomingEpisode.releaseDate', '>', currentDateString);
} else {
query.andWhere('mediaItem.releaseDate', '>', currentDateString);
}
} else {
query.andWhere((qb) =>
qb
.where((qb) =>
qb
.whereNot('mediaItem.mediaType', 'tv')
.andWhere('mediaItem.releaseDate', '>', currentDateString)
)
.orWhere((qb) =>
qb
.where('mediaItem.mediaType', 'tv')
.andWhere('upcomingEpisode.releaseDate', '>', currentDateString)
)
);
}
query.whereNotNull('listItem.mediaItemId');
}
// nextEpisodesToWatchSubQuery
if (onlyWithNextEpisodesToWatch === true) {
query
.where('seenEpisodesCount', '>', 0)
.andWhere('unseenEpisodesCount', '>', 0);
}
if (onlyWithUserRating === true) {
query.whereNotNull('userRating.rating');
}
if (onlyWithoutUserRating === true) {
query.whereNull('userRating.rating');
}
if (onlyWithProgress) {
query.where((qb) =>
qb
.where((qb) =>
qb.whereNot('mediaItem.mediaType', 'tv').whereNotNull('progress')
)
.orWhere((qb) =>
qb
.where('mediaItem.mediaType', 'tv')
.where('seenEpisodesCount', '>', 0)
.andWhere('unseenEpisodesCount', '>', 0)
)
);
}
}
if (orderBy && sortOrder) {
if (
sortOrder.toLowerCase() !== 'asc' &&
sortOrder.toLowerCase() !== 'desc'
) {
throw new Error('Sort order should by either asc or desc');
}
switch (orderBy) {
case 'title':
query.orderBy('mediaItem.title', sortOrder);
break;
case 'releaseDate':
query.orderBy('mediaItem.releaseDate', sortOrder);
query.orderBy('mediaItem.title', 'asc');
break;
case 'status':
query.orderBy('mediaItem.status', sortOrder);
query.orderBy('mediaItem.title', 'asc');
break;
case 'mediaType':
query.orderBy('mediaItem.mediaType', sortOrder);
query.orderBy('mediaItem.title', 'asc');
break;
case 'unseenEpisodes':
query.orderBy('unseenEpisodesCount', sortOrder);
query.orderBy('mediaItem.title', 'asc');
break;
case 'lastSeen':
query.orderBy('lastSeenAt', sortOrder);
query.orderBy('mediaItem.title', 'asc');
break;
case 'nextAiring':
query.orderByRaw(`CASE
WHEN "mediaItem"."mediaType" = 'tv' THEN "upcomingEpisode"."releaseDate"
ELSE "mediaItem"."releaseDate"
END ${sortOrder} NULLS LAST`);
query.orderBy('mediaItem.title', 'asc');
break;
case 'lastAiring':
query.orderByRaw(`CASE
WHEN "mediaItem"."mediaType" = 'tv' THEN "lastAiredEpisode"."releaseDate"
ELSE CASE
WHEN "mediaItem"."releaseDate" >= '${currentDateString}' THEN NULL
ELSE "mediaItem"."releaseDate"
END
END ${sortOrder} NULLS LAST`);
query.orderBy('mediaItem.title', 'asc');
break;
case 'progress':
query.orderByRaw(`CASE
WHEN "mediaItem"."mediaType" = 'tv' THEN "unseenEpisodesCount"
ELSE "progress"
END ${sortOrder}`);
query.orderBy('mediaItem.title', 'asc');
break;
default:
throw new Error(`Unsupported orderBy value: ${orderBy}`);
}
}
const sqlCountQuery = query
.clone()
.clearOrder()
.clearSelect()
.count('*', { as: 'count' });
let sqlPaginationQuery;
if (page) {
const itemsPerPage = 40;
const skip = itemsPerPage * (page - 1);
const take = itemsPerPage;
sqlPaginationQuery = query.clone().limit(take).offset(skip);
}
return {
sqlQuery: query,
sqlCountQuery: sqlCountQuery,
sqlPaginationQuery: sqlPaginationQuery,
};
};
export const generateColumnNames = <
Prefix extends string,
Properties extends ReadonlyArray<string>
>(
prefix: Prefix,
properties: Properties
) => {
return properties.reduce(
(previous, property) => ({
...previous,
[`${prefix}.${property}`]: `${prefix}.${property}`,
}),
{}
) as {
[Key in `${Prefix}.${Properties[number]}`]: Key;
};
};
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const mapRawResult = (row: any): MediaItemItemsResponse => {
return {
id: row['mediaItem.id'],
tmdbId: row['mediaItem.tmdbId'],
tvmazeId: row['mediaItem.tvmazeId'],
igdbId: row['mediaItem.igdbId'],
openlibraryId: row['mediaItem.openlibraryId'],
tvdbId: row['mediaItem.tvdbId'],
traktId: row['mediaItem.traktId'],
imdbId: row['mediaItem.imdbId'],
audibleId: row['mediaItem.audibleId'],
mediaType: row['mediaItem.mediaType'],
numberOfSeasons: row['mediaItem.numberOfSeasons'],
status: row['mediaItem.status'],
platform: row['mediaItem.platform']
? JSON.parse(row['mediaItem.platform'])
: null,
title: row['mediaItem.title'],
slug: row['mediaItem.slug'],
originalTitle: row['mediaItem.originalTitle'],
tmdbRating: row['mediaItem.tmdbRating'],
runtime: row['mediaItem.runtime'],
releaseDate: row['mediaItem.releaseDate'],
overview: row['mediaItem.overview'],
lastTimeUpdated: row['mediaItem.lastTimeUpdated'],
source: row['mediaItem.source'],
network: row['mediaItem.network'],
language: row['mediaItem.language'],
genres: row['mediaItem.genres']?.split(','),
authors: row['mediaItem.authors']?.split(','),
narrators: row['mediaItem.narrators']?.split(','),
url: row['mediaItem.url'],
developer: row['mediaItem.developer'],
lastSeenAt: row['lastSeenAt'],
progress: row['progress'],
poster: row['poster'] ? `/img/${row['poster']}` : null,
posterSmall: row['poster'] ? `/img/${row['poster']}?size=small` : null,
backdrop: row['backdrop'] ? `/img/${row['backdrop']}` : null,
hasDetails: false,
seen:
row['mediaItem.mediaType'] === 'tv'
? row.numberOfEpisodes > 0 && !row.unseenEpisodesCount
: Boolean(row['lastSeen2.mediaItemId']),
onWatchlist: Boolean(row['listItem.id']),
unseenEpisodesCount: row.unseenEpisodesCount || 0,
seenEpisodesCount: row['seenEpisodesCount'],
numberOfEpisodes: row.numberOfEpisodes,
nextAiring:
row['mediaItem.mediaType'] === 'tv'
? row['upcomingEpisode.releaseDate']
: row['mediaItem.releaseDate'],
lastAiring:
row['mediaItem.mediaType'] === 'tv'
? row['lastAiredEpisode.releaseDate']
: row['mediaItem.releaseDate'],
userRating: row['userRating.id']
? {
id: row['userRating.id'],
date: row['userRating.date'],
mediaItemId: row['userRating.mediaItemId'],
rating: row['userRating.rating'],
review: row['userRating.review'],
userId: row['userRating.userId'],
episodeId: row['userRating.episodeId'],
seasonId: row['userRating.seasonId'],
}
: undefined,
firstUnwatchedEpisode: row['firstUnwatchedEpisode.id']
? {
id: row['firstUnwatchedEpisode.id'],
title: row['firstUnwatchedEpisode.title'],
description: row['firstUnwatchedEpisode.description'],
episodeNumber: row['firstUnwatchedEpisode.episodeNumber'],
seasonNumber: row['firstUnwatchedEpisode.seasonNumber'],
releaseDate: row['firstUnwatchedEpisode.releaseDate'],
tvShowId: row['firstUnwatchedEpisode.tvShowId'],
tmdbId: row['firstUnwatchedEpisode.tmdbId'],
imdbId: row['firstUnwatchedEpisode.imdbId'],
tvdbId: row['firstUnwatchedEpisode.tvdbId'],
traktId: row['firstUnwatchedEpisode.traktId'],
runtime: row['firstUnwatchedEpisode.runtime'],
seasonId: row['firstUnwatchedEpisode.seasonId'],
isSpecialEpisode: Boolean(
row['firstUnwatchedEpisode.isSpecialEpisode']
),
userRating: undefined,
seenHistory: undefined,
lastSeenAt: undefined,
}
: undefined,
upcomingEpisode: row['upcomingEpisode.releaseDate']
? {
id: row['upcomingEpisode.id'],
title: row['upcomingEpisode.title'],
description: row['upcomingEpisode.description'],
episodeNumber: row['upcomingEpisode.episodeNumber'],
seasonNumber: row['upcomingEpisode.seasonNumber'],
releaseDate: row['upcomingEpisode.releaseDate'],
runtime: row['upcomingEpisode.runtime'],
tvShowId: row['upcomingEpisode.tvShowId'],
tmdbId: row['upcomingEpisode.tmdbId'],
imdbId: row['upcomingEpisode.imdbId'],
tvdbId: row['upcomingEpisode.tvdbId'],
traktId: row['upcomingEpisode.traktId'],
seasonId: row['upcomingEpisode.seasonId'],
isSpecialEpisode: Boolean(row['upcomingEpisode.isSpecialEpisode']),
userRating: undefined,
seenHistory: undefined,
lastSeenAt: undefined,
seen: false,
}
: undefined,
lastAiredEpisode: row['lastAiredEpisode.id']
? {
id: row['lastAiredEpisode.id'],
title: row['lastAiredEpisode.title'],
description: row['lastAiredEpisode.description'],
episodeNumber: row['lastAiredEpisode.episodeNumber'],
seasonNumber: row['lastAiredEpisode.seasonNumber'],
releaseDate: row['lastAiredEpisode.releaseDate'],
runtime: row['lastAiredEpisode.runtime'],
tvShowId: row['lastAiredEpisode.tvShowId'],
tmdbId: row['lastAiredEpisode.tmdbId'],
imdbId: row['lastAiredEpisode.imdbId'],
tvdbId: row['lastAiredEpisode.tvdbId'],
traktId: row['lastAiredEpisode.traktId'],
seasonId: row['lastAiredEpisode.seasonId'],
isSpecialEpisode: Boolean(row['lastAiredEpisode.isSpecialEpisode']),
userRating: undefined,
seenHistory: undefined,
lastSeenAt: undefined,
seen: false,
}
: undefined,
} as unknown as MediaItemItemsResponse;
};
export class QueryBuilderHelper {
static mapFirstUnwatchedEpisode(row: Record<string, unknown>) {
return {
description: row['mediaItem.firstUnwatchedEpisode.description'],
episodeNumber: row['mediaItem.firstUnwatchedEpisode.episodeNumber'],
id: row['mediaItem.firstUnwatchedEpisode.id'],
imdbId: row['mediaItem.firstUnwatchedEpisode.imdbId'],
isSpecialEpisode: Boolean(
row['mediaItem.firstUnwatchedEpisode.isSpecialEpisode']
),
releaseDate: row['mediaItem.firstUnwatchedEpisode.releaseDate'],
runtime: row['mediaItem.firstUnwatchedEpisode.runtime'],
seasonId: row['mediaItem.firstUnwatchedEpisode.seasonId'],
seasonNumber: row['mediaItem.firstUnwatchedEpisode.seasonNumber'],
title: row['mediaItem.firstUnwatchedEpisode.title'],
tmdbId: row['mediaItem.firstUnwatchedEpisode.tmdbId'],
traktId: row['mediaItem.firstUnwatchedEpisode.traktId'],
tvdbId: row['mediaItem.firstUnwatchedEpisode.tvdbId'],
tvShowId: row['mediaItem.firstUnwatchedEpisode.tvShowId'],
};
}
static firstUnwatchedEpisode<
TRecord = unknown,
TResult = Record<string, unknown>[]
>(
query: Knex.QueryBuilder<TRecord, TResult>,
userId: number,
mediaItemId: string
) {
return query
.select({
'firstUnwatchedEpisode.episodeNumber':
'firstUnwatchedEpisode.episodeNumber',
'firstUnwatchedEpisode.seasonNumber':
'firstUnwatchedEpisode.seasonNumber',
'firstUnwatchedEpisode.title': 'firstUnwatchedEpisode.title',
'firstUnwatchedEpisode.releaseDate':
'firstUnwatchedEpisode.releaseDate',
'firstUnwatchedEpisode.description':
'firstUnwatchedEpisode.description',
'firstUnwatchedEpisode.id': 'firstUnwatchedEpisode.id',
'firstUnwatchedEpisode.imdbId': 'firstUnwatchedEpisode.imdbId',
'firstUnwatchedEpisode.runtime': 'firstUnwatchedEpisode.runtime',
'firstUnwatchedEpisode.seasonId': 'firstUnwatchedEpisode.seasonId',
'firstUnwatchedEpisode.tmdbId': 'firstUnwatchedEpisode.tmdbId',
'firstUnwatchedEpisode.tvShowId': 'firstUnwatchedEpisode.tvShowId',
'firstUnwatchedEpisode.isSpecialEpisode':
'firstUnwatchedEpisode.isSpecialEpisode',
'firstUnwatchedEpisode.traktId': 'firstUnwatchedEpisode.traktId',
'firstUnwatchedEpisode.tvdbId': 'firstUnwatchedEpisode.tvdbId',
})
.leftJoin<TvEpisode>(
(qb) =>
qb
.from<TvEpisode>('episode')
.select('tvShowId')
.min('seasonAndEpisodeNumber', {
as: 'seasonAndEpisodeNumber',
})
.leftJoin<Seen>('seen', (qb) =>
qb
.on('seen.episodeId', 'episode.id')
.andOnVal('seen.type', 'seen')
)
.whereNot('episode.isSpecialEpisode', true)
.andWhereNot('episode.releaseDate', '')
.andWhereNot('episode.releaseDate', null)
.andWhere('episode.releaseDate', '<=', new Date().toISOString())
.andWhere((qb) => {
qb.where('seen.userId', '<>', userId).orWhereNull('seen.userId');
})
.groupBy('tvShowId')
.as('firstUnwatchedEpisodeHelper'),
'firstUnwatchedEpisodeHelper.tvShowId',
mediaItemId
)
.leftJoin<TvEpisode>(
Database.knex.ref('episode').as('firstUnwatchedEpisode'),
(qb) =>
qb
.on('firstUnwatchedEpisode.tvShowId', mediaItemId)
.andOn(
'firstUnwatchedEpisode.seasonAndEpisodeNumber',
'firstUnwatchedEpisodeHelper.seasonAndEpisodeNumber'
)
);
}
} | the_stack |
import { expect } from "chai";
import "test/helper/ToneAudioBuffer";
import { getContext } from "../Global";
import { ToneAudioBuffer } from "./ToneAudioBuffer";
const testFile = "./audio/sine.wav";
describe("ToneAudioBuffer", () => {
context("basic", () => {
it("can be created and disposed", () => {
const buff = new ToneAudioBuffer(testFile);
buff.dispose();
});
it("loads a file from a url string", done => {
const buffer = new ToneAudioBuffer(testFile, (buff) => {
expect(buff).to.be.instanceof(ToneAudioBuffer);
buffer.dispose();
done();
});
});
it("has a duration", done => {
const buffer = new ToneAudioBuffer(testFile, () => {
expect(buffer.duration).to.be.closeTo(3, 0.01);
buffer.dispose();
done();
});
});
it("can be constructed with no arguments", () => {
const buffer = new ToneAudioBuffer();
expect(buffer.length).to.equal(0);
expect(buffer.duration).to.equal(0);
expect(buffer.numberOfChannels).to.equal(0);
buffer.dispose();
});
it("can get the number of channels", done => {
const buffer = new ToneAudioBuffer(testFile, () => {
expect(buffer.numberOfChannels).to.be.equal(1);
buffer.dispose();
done();
});
});
it("can get the length of the buffer", done => {
const buffer = new ToneAudioBuffer(testFile, () => {
expect(buffer.length).to.be.a("number");
expect(buffer.length).to.be.above(130000);
buffer.dispose();
done();
});
});
it("can be constructed with an options object", done => {
const buffer = new ToneAudioBuffer({
onload: () => {
buffer.dispose();
done();
},
reverse: true,
url: testFile,
});
expect(buffer.reverse).to.equal(true);
});
it("takes an AudioBuffer in the constructor method", async () => {
const audioBuffer = await ToneAudioBuffer.load(testFile);
const buffer = new ToneAudioBuffer({
url: audioBuffer,
});
const testOne = new ToneAudioBuffer(buffer.get());
expect(testOne.get()).to.equal(buffer.get());
testOne.dispose();
buffer.dispose();
});
it("takes a loaded ToneAudioBuffer in the constructor method", async () => {
const audioBuffer = await ToneAudioBuffer.fromUrl(testFile);
const buffer = new ToneAudioBuffer({
url: audioBuffer,
});
const testOne = new ToneAudioBuffer(buffer);
expect(testOne.get()).to.equal(buffer.get());
testOne.dispose();
buffer.dispose();
});
it("takes an unloaded Tone.ToneAudioBuffer in the constructor method", done => {
const unloadedToneAudioBuffer = new ToneAudioBuffer(testFile);
const buffer = new ToneAudioBuffer({
onload(): void {
const testOne = new ToneAudioBuffer(buffer);
expect(unloadedToneAudioBuffer.get()).to.equal(buffer.get());
unloadedToneAudioBuffer.dispose();
buffer.dispose();
done();
},
url: unloadedToneAudioBuffer,
});
});
it("takes Tone.ToneAudioBuffer in the set method", done => {
const buffer = new ToneAudioBuffer({
url: testFile,
onload(): void {
const testOne = new ToneAudioBuffer(testFile);
testOne.set(buffer);
expect(testOne.get()).to.equal(buffer.get());
testOne.dispose();
buffer.dispose();
done();
},
});
});
});
context("loading", () => {
it("invokes the error callback if there is a problem with the file", done => {
const buffer = new ToneAudioBuffer("nosuchfile.wav", () => {
throw new Error("shouldn't invoke this function");
}, e => {
buffer.dispose();
done();
});
});
it("invokes the error callback on static .load method", async () => {
let hadError = false;
try {
await ToneAudioBuffer.load("nosuchfile.wav");
} catch (e) {
hadError = true;
}
expect(hadError).to.equal(true);
});
it("can load a file with fallback extensions", async () => {
const buffer = await ToneAudioBuffer.load("./audio/sine.[nope|nada|wav]");
expect(buffer).to.exist;
});
it("takes the first supported format when multiple extensions are provided", async () => {
const buffer = await ToneAudioBuffer.load("./audio/sine.[wav|nope]");
expect(buffer).to.exist;
});
it("instance .load method returns Promise", done => {
const promise = (new ToneAudioBuffer()).load(testFile);
expect(promise).to.have.property("then");
promise.then((buff) => {
expect(buff).to.be.instanceOf(ToneAudioBuffer);
done();
});
promise.catch(() => {
throw new Error("shouldn't invoke this function");
});
});
it("invokes the error callback if the file is corrupt", done => {
const buffer = new ToneAudioBuffer("./audio/corrupt.wav", () => {
throw new Error("shouldn't invoke this function");
}, e => {
buffer.dispose();
done();
});
});
});
context("buffer manipulation", () => {
it("returns an empty array if there is no channel data", () => {
const buffer = new ToneAudioBuffer();
expect(buffer.getChannelData(0).length).to.equal(0);
buffer.dispose();
});
it("can get the channel data as an array", done => {
const buffer = new ToneAudioBuffer(testFile, () => {
expect(buffer.getChannelData(0)).to.be.an.instanceOf(Float32Array);
expect(buffer.getChannelData(0).length).to.be.above(130000);
buffer.dispose();
done();
});
});
it("can reverse a buffer", done => {
const buffer = new ToneAudioBuffer(testFile, () => {
const buffArray = buffer.get() as AudioBuffer;
const lastSample = buffArray[buffArray.length - 1];
buffer.reverse = true;
expect((buffer.get() as AudioBuffer)[0]).to.equal(lastSample);
// setting reverse again has no effect
buffer.reverse = true;
expect((buffer.get() as AudioBuffer)[0]).to.equal(lastSample);
buffer.dispose();
done();
});
});
it("can convert from an array", () => {
const buffer = new ToneAudioBuffer();
const arr = new Float32Array(0.5 * buffer.sampleRate);
arr[0] = 0.5;
buffer.fromArray(arr);
expect(buffer.duration).to.equal(0.5);
expect(buffer.numberOfChannels).to.equal(1);
// test the first sample of the first channel to see if it's the same
expect(buffer.toArray(0)[0]).to.equal(0.5);
buffer.dispose();
});
it("can create a buffer from an array using the static method", () => {
const arr = new Float32Array(0.5 * getContext().sampleRate);
arr[0] = 0.5;
const buffer = ToneAudioBuffer.fromArray(arr);
expect(buffer.duration).to.equal(0.5);
expect(buffer.numberOfChannels).to.equal(1);
// test the first sample of the first channel to see if it's the same
expect(buffer.toArray(0)[0]).to.equal(0.5);
// should return the same thing without the channel argument as well
expect(buffer.toArray()[0]).to.equal(0.5);
buffer.dispose();
});
it("can convert from a multidimentional array", () => {
const buffer = new ToneAudioBuffer();
const arr = [new Float32Array(0.5 * buffer.sampleRate), new Float32Array(0.5 * buffer.sampleRate)];
arr[0][0] = 0.5;
buffer.fromArray(arr);
expect(buffer.duration).to.equal(0.5);
expect(buffer.numberOfChannels).to.equal(2);
expect(buffer.toArray(0)[0]).to.equal(0.5);
buffer.dispose();
});
it("can convert to and from an array", () => {
const buffer = new ToneAudioBuffer();
const arr = [new Float32Array(0.5 * buffer.sampleRate), new Float32Array(0.5 * buffer.sampleRate)];
arr[0][0] = 0.5;
buffer.fromArray(arr);
expect(buffer.toArray(0)[0]).to.equal(0.5);
expect(buffer.toArray()[0][0]).to.equal(0.5);
// with a selected channel
expect(buffer.toArray(0)[0]).to.equal(0.5);
buffer.dispose();
});
it("can slice a portion of the array", async () => {
const buffer = await ToneAudioBuffer.fromUrl(testFile);
// original duration
expect(buffer.duration).to.be.closeTo(3, 0.01);
const sliced1 = buffer.slice(0, 1);
// confirm they have the same values
const offset = Math.floor(buffer.sampleRate * 0.9);
// does not modify the original
expect(buffer.duration).to.be.closeTo(3, 0.01);
expect(sliced1.duration).to.be.closeTo(1, 0.01);
const sliced2 = sliced1.slice(0.5);
expect(sliced2.duration).to.be.closeTo(0.5, 0.01);
const sliced3 = buffer.slice(2);
expect(sliced3.toArray(0)[Math.floor(0.5 * buffer.sampleRate) + 1])
.to.equal(buffer.toArray(0)[Math.floor(2.5 * buffer.sampleRate) + 1]);
buffer.dispose();
sliced1.dispose();
sliced2.dispose();
sliced3.dispose();
});
it("slice can extend the buffer also", async () => {
const buffer = await ToneAudioBuffer.fromUrl(testFile);
// original duration
expect(buffer.duration).to.be.closeTo(3, 0.01);
const sliced = buffer.slice(0, 4);
expect(sliced.duration).to.be.closeTo(4, 0.01);
buffer.dispose();
sliced.dispose();
});
it("can convert a buffer to mono", () => {
const buffer = new ToneAudioBuffer();
const arr = [new Float32Array(0.5 * buffer.sampleRate), new Float32Array(0.5 * buffer.sampleRate)];
arr[0][0] = 0.5;
buffer.fromArray(arr);
expect(buffer.duration).to.equal(0.5);
expect(buffer.numberOfChannels).to.equal(2);
buffer.toMono();
expect(buffer.numberOfChannels).to.equal(1);
// should have averaged the two first samples
expect(buffer.toArray()[0]).to.equal(0.25);
buffer.dispose();
});
it("can use just the second channel of a buffer when making mono", () => {
const buffer = new ToneAudioBuffer();
const arr = [new Float32Array(0.5 * buffer.sampleRate), new Float32Array(0.5 * buffer.sampleRate)];
arr[0][0] = 0.5;
buffer.fromArray(arr);
expect(buffer.duration).to.equal(0.5);
expect(buffer.numberOfChannels).to.equal(2);
buffer.toMono(1);
expect(buffer.numberOfChannels).to.equal(1);
// should have averaged the two first samples
expect(buffer.toArray()[0]).to.equal(0);
buffer.dispose();
});
});
context("static methods", () => {
it("Test if the browser supports the given type", () => {
expect(ToneAudioBuffer.supportsType("test.wav")).to.equal(true);
expect(ToneAudioBuffer.supportsType("wav")).to.equal(true);
expect(ToneAudioBuffer.supportsType("path/to/test.wav")).to.equal(true);
expect(ToneAudioBuffer.supportsType("path/to/test.nope")).to.equal(false);
});
it("can be constructed with ToneAudioBuffer.fromUrl", done => {
ToneAudioBuffer.fromUrl("nosuchfile.wav").then(() => {
throw new Error("shouldn't invoke this function");
}).catch(() => {
done();
});
});
});
context("ToneAudioBuffer.loaded()", () => {
it("returns a promise", () => {
expect(ToneAudioBuffer.loaded()).to.have.property("then");
});
it("is invoked when all the buffers are loaded", async () => {
const buff0 = new ToneAudioBuffer(testFile);
const buff1 = new ToneAudioBuffer(testFile);
await ToneAudioBuffer.loaded();
expect(buff0.loaded).to.equal(true);
expect(buff1.loaded).to.equal(true);
});
it("can be setup before the urls", async () => {
const loadedPromise = ToneAudioBuffer.loaded();
const buff0 = new ToneAudioBuffer(testFile);
const buff1 = new ToneAudioBuffer(testFile);
await loadedPromise;
expect(buff0.loaded).to.equal(true);
expect(buff1.loaded).to.equal(true);
});
it("invokes loaded even if there is an error", () => {
ToneAudioBuffer.fromUrl(testFile);
ToneAudioBuffer.fromUrl("nosuchfile.wav");
return ToneAudioBuffer.loaded();
});
});
}); | the_stack |
import { Browser, setStyleAttribute as setBaseStyleAttribute, getComponent, detach, isNullOrUndefined, removeClass, extend, isUndefined } from '@syncfusion/ej2-base';
import { StyleType, CollaborativeEditArgs, CellSaveEventArgs, ICellRenderer, IAriaOptions, completeAction } from './index';
import { HideShowEventArgs, invalidData } from './../common/index';
import { Cell, ColumnModel, duplicateSheet, getSheetIndex, getSheetIndexFromAddress, getSheetIndexFromId, getSheetNameFromAddress, hideShow, moveSheet, protectsheetHandler, refreshRibbonIcons, replace, replaceAll, setLinkModel, setLockCells, updateSheetFromDataSource } from '../../workbook/index';
import { IOffset, clearViewer, deleteImage, createImageElement, refreshImgCellObj, removeDataValidation } from './index';
import { Spreadsheet, removeSheetTab, rowHeightChanged, initiateFilterUI, deleteChart, IRenderer } from '../index';
import { SheetModel, getColumnsWidth, getSwapRange, CellModel, CellStyleModel, clearCells, RowModel, cFUndo } from '../../workbook/index';
import { RangeModel, getRangeIndexes, wrap, setRowHeight, insertModel, InsertDeleteModelArgs, getColumnWidth } from '../../workbook/index';
import { BeforeSortEventArgs, SortEventArgs, initiateSort, getIndexesFromAddress, getRowHeight, isLocked } from '../../workbook/index';
import { cellValidation, clearCFRule, ConditionalFormatModel, getColumn, getRow, updateCell } from '../../workbook/index';
import { getCell, setChart, refreshChartSize, HighlightCell, TopBottom, DataBar, ColorScale, IconSet, CFColor } from '../../workbook/index';
import { setCFRule, setMerge, Workbook, setAutoFill, getautofillDDB, getRowsHeight, ChartModel, deleteModel } from '../../workbook/index';
import { workbookFormulaOperation, DefineNameModel, getAddressInfo, getSheet, setCellFormat } from '../../workbook/index';
import { checkUniqueRange, checkConditionalFormat, ActionEventArgs, skipHiddenIdx, isFilterHidden } from '../../workbook/index';
import { applyProtect, chartDesignTab, copy, cut, freeze, goToSheet, hideSheet, paste, performUndoRedo, refreshChartCellObj, removeHyperlink, removeWorkbookProtection, setProtectWorkbook, sheetNameUpdate, showSheet, updateToggleItem } from './event';
/**
* The function used to update Dom using requestAnimationFrame.
*
* @param {Function} fn - Function that contains the actual action
* @returns {void}
* @hidden
*/
export function getUpdateUsingRaf(fn: Function): void {
requestAnimationFrame(() => {
fn();
});
}
/**
* The function used to remove the dom element children.
*
* @param {Element} parent - Specify the parent
* @returns {void} - The function used to get colgroup width based on the row index.
* @hidden
*/
export function removeAllChildren(parent: Element): void {
while (parent.firstChild) {
parent.removeChild(parent.firstChild);
}
}
/**
* The function used to get colgroup width based on the row index.
*
* @param {number} index - Specify the index
* @returns {number} - The function used to get colgroup width based on the row index.
* @hidden
*/
export function getColGroupWidth(index: number): number {
let width: number = 30;
if (index.toString().length > 3) {
width = index.toString().length * 10;
}
return width;
}
let scrollAreaWidth: number = null;
let textLineHeight: number = 1.24;
/**
* @hidden
* @returns {number} - To get scrollbar width
*/
export function getScrollBarWidth(): number {
if (scrollAreaWidth !== null) { return scrollAreaWidth; }
const htmlDivNode: HTMLDivElement = document.createElement('div');
let result: number = 0;
htmlDivNode.style.cssText = 'width:100px;height: 100px;overflow: scroll;position: absolute;top: -9999px;';
document.body.appendChild(htmlDivNode);
result = (htmlDivNode.offsetWidth - htmlDivNode.clientWidth) | 0;
document.body.removeChild(htmlDivNode);
return scrollAreaWidth = result;
}
const classes: string[] = ['e-ribbon', 'e-formula-bar-panel', 'e-sheet-tab-panel', 'e-header-toolbar'];
/**
* @hidden
* @param {HTMLElement} element - Specify the element.
* @param {string[]} classList - Specify the classList.
* @returns {number} - get Siblings Height
*/
export function getSiblingsHeight(element: HTMLElement, classList: string[] = classes): number {
const previous: number = getHeightFromDirection(element, 'previous', classList);
const next: number = getHeightFromDirection(element, 'next', classList);
return previous + next;
}
/**
* @param {HTMLElement} element - Specify the element.
* @param {string} direction - Specify the direction.
* @param {string[]} classList - Specify the classList.
* @returns {number} - get Height FromDirection
*/
function getHeightFromDirection(element: HTMLElement, direction: string, classList: string[]): number {
let sibling: HTMLElement = (element)[direction + 'ElementSibling'];
let result: number = 0;
while (sibling) {
if (classList.some((value: string) => sibling.classList.contains(value))) {
result += sibling.offsetHeight;
}
sibling = (sibling)[direction + 'ElementSibling'];
}
return result;
}
/**
* @hidden
* @param {Spreadsheet} context - Specify the spreadsheet.
* @param {number[]} range - Specify the range.
* @param {boolean} isModify - Specify the boolean value.
* @returns {boolean} - Returns boolean value.
*/
export function inView(context: Spreadsheet, range: number[], isModify?: boolean): boolean {
if (context.scrollSettings.enableVirtualization) {
const sheet: SheetModel = context.getActiveSheet();
const frozenRow: number = context.frozenRowCount(sheet);
const frozenCol: number = context.frozenColCount(sheet);
const topIdx: number = context.viewport.topIndex + frozenRow;
const leftIdx: number = context.viewport.leftIndex + frozenCol;
const bottomIdx: number = context.viewport.bottomIndex;
const rightIdx: number = context.viewport.rightIndex;
if (sheet.frozenRows || sheet.frozenColumns) {
if (context.insideViewport(range[0], range[1]) || context.insideViewport(range[2], range[3])) {
return true;
}
} else if (topIdx <= range[0] && bottomIdx >= range[2] && leftIdx <= range[1] && rightIdx >= range[3]) {
return true;
}
let inView: boolean = false;
if (isModify) {
if (range[0] < topIdx && range[2] < topIdx || range[0] > bottomIdx && range[2] > bottomIdx) {
return false;
} else {
if (range[0] < topIdx && range[2] > topIdx && range[0] >= frozenRow) {
range[0] = topIdx;
inView = true;
}
if (range[2] > bottomIdx) {
range[2] = bottomIdx;
inView = true;
}
}
if (range[1] < leftIdx && range[3] < leftIdx || range[1] > rightIdx && range[3] > rightIdx) {
return false;
} else {
if (range[1] < leftIdx && range[3] > leftIdx && range[1] >= frozenCol) {
range[1] = leftIdx;
inView = true;
}
if (range[3] > rightIdx) {
range[3] = rightIdx;
inView = true;
}
}
}
return inView;
} else {
return true;
}
}
/**
* To get the top left cell position in viewport.
*
* @hidden
* @param {SheetModel} sheet - Specify the sheet.
* @param {number[]} indexes - Specify the indexes.
* @param {number} frozenRow - Specify the frozen row.
* @param {number} frozenColumn - Specify the frozen column
* @param {number} freezeScrollHeight - Specify the freeze scroll height
* @param {number} freezeScrollWidth - Specify the freeze scroll width
* @param {number} rowHdrWidth - Specify the row header width
* @returns {number} - To get the top left cell position in viewport.
*/
export function getCellPosition(
sheet: SheetModel, indexes: number[],
frozenRow?: number, frozenColumn?: number, freezeScrollHeight?: number, freezeScrollWidth?: number,
rowHdrWidth?: number): { top: number, left: number } {
let i: number; const offset: { left: IOffset, top: IOffset } = { left: { idx: 0, size: 0 }, top: { idx: 0, size: 0 } };
let top: number = offset.top.size;
let left: number = offset.left.size;
for (i = offset.top.idx; i < indexes[0]; i++) {
if (frozenRow) {
if (frozenRow - 1 < indexes[0] && i < frozenRow) { continue; }
}
top += getRowHeight(sheet, i, true);
}
for (i = offset.left.idx; i < indexes[1]; i++) {
if (frozenColumn && frozenColumn - 1 < indexes[1] && i < frozenColumn) { continue; }
left += getColumnWidth(sheet, i, null, true);
}
if (frozenRow && indexes[0] < frozenRow) {
if (sheet.showHeaders) { top += 30; }
if (freezeScrollHeight) { top -= freezeScrollHeight; }
}
if (frozenColumn && indexes[1] < frozenColumn) {
if (sheet.showHeaders) { left += rowHdrWidth ? rowHdrWidth : 30; }
if (freezeScrollWidth) { left -= freezeScrollWidth; }
}
return { top: top, left: left };
}
/**
* @param {Spreadsheet} parent - Specify the parent
* @param {HTMLElement} ele - Specify the element
* @param {number[]} range - Specify the range
* @param {string} cls - Specify the class name
* @param {boolean} preventAnimation - Specify the preventAnimation.
* @param {boolean} isMultiRange - Specify the multi range selection.
* @param {boolean} removeCls - Specify to remove the class from selection.
* @returns {void} - To set the position
* @hidden
*/
export function setPosition(
parent: Spreadsheet, ele: HTMLElement, range: number[], cls: string = 'e-selection', preventAnimation?: boolean, isMultiRange?: boolean,
removeCls?: boolean): Promise<null> | void {
const sheet: SheetModel = parent.getActiveSheet();
if (sheet.frozenRows || sheet.frozenColumns) {
let content: HTMLElement;
const frozenRow: number = parent.frozenRowCount(sheet); const frozenCol: number = parent.frozenColCount(sheet);
if (cls === 'e-active-cell') {
if (range[0] < frozenRow || range[1] < frozenCol) {
ele.style.display = 'none';
content = range[0] < frozenRow && range[1] < frozenCol ? parent.getSelectAllContent() :
(range[0] < frozenRow ? parent.getColumnHeaderContent() : parent.getRowHeaderContent());
let rangeEle: HTMLElement = content.querySelector('.' + cls);
if (!rangeEle) { rangeEle = ele.cloneNode(true) as HTMLElement; content.appendChild(rangeEle); }
ele = rangeEle;
locateElem(
parent, ele, range, sheet, parent.enableRtl, frozenRow, frozenCol, preventAnimation, true,
parent.viewport.beforeFreezeHeight, parent.viewport.beforeFreezeWidth, parent.sheetModule.colGroupWidth);
} else {
locateElem(parent, ele, range, sheet, parent.enableRtl, frozenRow, frozenCol, preventAnimation);
}
if (ele.style.display) { ele.style.display = ''; }
removeRangeEle(parent.getSelectAllContent(), content, 'e-active-cell');
removeRangeEle(parent.getColumnHeaderContent(), content, 'e-active-cell');
removeRangeEle(parent.getRowHeaderContent(), content, 'e-active-cell');
} else if (cls === 'e-autofill') {
let contentElem: HTMLElement;
const freezeRow: number = parent.frozenRowCount(sheet); const freezeCol: number = parent.frozenColCount(sheet);
if (range[0] < freezeRow || range[1] < freezeCol) {
ele.style.display = 'none';
contentElem = range[0] < freezeRow && range[1] < freezeCol ? parent.getSelectAllContent() :
(range[0] < freezeRow ? parent.getColumnHeaderContent() : parent.getRowHeaderContent());
let rangeEle: HTMLElement = contentElem.querySelector('.' + cls);
if (!rangeEle) { rangeEle = ele.cloneNode(true) as HTMLElement; contentElem.appendChild(rangeEle); }
ele = rangeEle;
locateElem(
parent, ele, range, sheet, parent.enableRtl, freezeRow, freezeCol, preventAnimation, true,
parent.viewport.beforeFreezeHeight, parent.viewport.beforeFreezeWidth, parent.sheetModule.colGroupWidth, 'e-autofill');
}
else {
locateElem(
parent, ele, range, sheet, parent.enableRtl, freezeRow, freezeCol, preventAnimation, false, 0, 0, 0, 'e-autofill');
}
if (ele.style.display) { ele.style.display = ''; }
removeRangeEle(parent.getSelectAllContent(), contentElem, 'e-autofill');
removeRangeEle(parent.getColumnHeaderContent(), contentElem, 'e-autofill');
removeRangeEle(parent.getRowHeaderContent(), contentElem, 'e-autofill');
} else if (cls === 'e-filloption') {
let contentElem: HTMLElement;
const freezeRow: number = parent.frozenRowCount(sheet); const freezeCol: number = parent.frozenColCount(sheet);
if ((range[0] < freezeRow || range[1] < freezeCol)) {
if (range[3] + 1 === freezeCol && range[2] + 1 > freezeRow) {
locateElem(
parent, parent.getMainContent().querySelector('.e-filloption'), range, sheet, parent.enableRtl, freezeRow,
freezeCol, preventAnimation, false, 0, 0, 0, 'e-filloption', true, { left: -4 });
} else if (range[2] + 1 === freezeRow && range[3] + 1 > freezeCol) {
locateElem(
parent, parent.getMainContent().querySelector('.e-filloption'), range, sheet, parent.enableRtl, freezeRow,
freezeCol, preventAnimation, false, 0, 0, 0, 'e-filloption', true, { top: -4 });
} else if (range[3] + 1 === freezeCol && range[2] + 1 < freezeRow) { // for upper side
contentElem = parent.getColumnHeaderContent();
const rangeElem: HTMLElement = contentElem.querySelector('.' + cls);
if (!rangeElem) {
parent.notify(getautofillDDB, { id: parent.element.id + '_autofilloptionbtn', appendElem: contentElem });
}
ele = parent.autofillModule.autoFillDropDown.element;
locateElem(
parent, ele, range, sheet, parent.enableRtl, freezeRow, freezeCol, preventAnimation, false, 0, 0, 0, 'e-filloption',
true, { left: -4 });
} else if (range[2] + 1 === freezeRow && range[3] + 1 === freezeCol) { // corner cell
locateElem(
parent, parent.getMainContent().querySelector('.e-filloption'), range, sheet, parent.enableRtl, freezeRow,
freezeCol, preventAnimation, false, 0, 0, 0, 'e-filloption', true, { top: -4, left: -4 });
}
else {
contentElem = range[0] < freezeRow && range[1] < freezeCol ? parent.getSelectAllContent() :
(range[0] < freezeRow ? parent.getColumnHeaderContent() : parent.getRowHeaderContent());
const rangeEle: HTMLElement = contentElem.querySelector('.' + cls);
if (!rangeEle) {
parent.notify(getautofillDDB, { id: parent.element.id + '_autofilloptionbtn', appendElem: contentElem });
}
ele = parent.autofillModule.autoFillDropDown.element;
locateElem(
parent, ele, range, sheet, parent.enableRtl, freezeRow, freezeCol, preventAnimation, true, parent.viewport.
beforeFreezeHeight, parent.viewport.beforeFreezeWidth, parent.sheetModule.colGroupWidth, 'e-filloption', true);
}
}
else {
locateElem(
parent, parent.getMainContent().querySelector('.e-filloption'), range, sheet, parent.enableRtl, freezeRow, freezeCol,
preventAnimation, false, 0, 0, 0, 'e-filloption', true);
}
if (ele.style.display) { ele.style.display = ''; }
removeRangeEle(parent.getSelectAllContent(), contentElem, 'e-filloption');
removeRangeEle(parent.getColumnHeaderContent(), contentElem, 'e-filloption');
removeRangeEle(parent.getRowHeaderContent(), contentElem, 'e-filloption');
} else {
const swapRange: number[] = getSwapRange(range);
if (swapRange[0] < frozenRow || swapRange[1] < frozenCol) {
if (!ele.classList.contains('e-multi-range')) {
ele.classList.add('e-hide');
}
const ranges: number[][] = [];
if (swapRange[0] < frozenRow && swapRange[1] < frozenCol) {
if (swapRange[2] < frozenRow && swapRange[3] < frozenCol) {
ranges.push(range);
if (!isMultiRange) {
removeRangeEle(parent.getColumnHeaderContent(), content, cls, true);
removeRangeEle(parent.getRowHeaderContent(), content, cls, true);
}
} else if (swapRange[2] > frozenRow - 1) {
if (swapRange[3] < frozenCol) {
if (!isMultiRange) {
removeRangeEle(parent.getColumnHeaderContent(), content, cls, true);
}
ranges.push([swapRange[0], swapRange[1], frozenRow - 1, swapRange[3]]);
ranges.push([frozenRow, swapRange[1], swapRange[2], swapRange[3]]);
} else {
ranges.push([swapRange[0], swapRange[1], frozenRow - 1, frozenCol - 1]);
ranges.push([frozenRow, swapRange[1], swapRange[2], frozenCol - 1]);
ranges.push([swapRange[0], frozenCol, frozenRow - 1, swapRange[3]]);
ranges.push([frozenRow, frozenCol, swapRange[2], swapRange[3]]);
}
} else {
if (swapRange[2] < frozenRow) {
ranges.push([swapRange[0], swapRange[1], swapRange[2], frozenCol - 1]);
ranges.push([swapRange[0], frozenCol, swapRange[2], swapRange[3]]);
if (!isMultiRange) {
removeRangeEle(parent.getRowHeaderContent(), content, cls, true);
}
} else {
ranges.push([frozenRow, swapRange[1], swapRange[2], frozenCol - 1]);
ranges.push([swapRange[0], swapRange[1], frozenRow - 1, frozenCol - 1]);
ranges.push([frozenRow, frozenCol, swapRange[2], swapRange[3]]);
ranges.push([swapRange[0], frozenCol, frozenRow - 1, swapRange[3]]);
}
}
} else if (swapRange[0] < frozenRow) {
if (swapRange[2] < frozenRow) {
ranges.push(range);
if (!isMultiRange) {
removeRangeEle(parent.getRowHeaderContent(), content, cls, true);
}
} else {
ranges.push([swapRange[0], swapRange[1], frozenRow - 1, swapRange[3]]);
ranges.push([frozenRow, swapRange[1], swapRange[2], swapRange[3]]);
if (!isMultiRange) {
removeRangeEle(parent.getSelectAllContent(), content, cls, true);
removeRangeEle(parent.getRowHeaderContent(), content, cls, true);
}
}
} else {
if (swapRange[3] < frozenCol) {
ranges.push(range);
if (!isMultiRange) {
removeRangeEle(parent.getSelectAllContent(), content, cls, true);
}
} else {
ranges.push([swapRange[0], swapRange[1], swapRange[2], frozenCol - 1]);
ranges.push([swapRange[0], frozenCol, swapRange[2], swapRange[3]]);
if (!isMultiRange) {
removeRangeEle(parent.getSelectAllContent(), content, cls, true);
removeRangeEle(parent.getColumnHeaderContent(), content, cls, true);
}
}
}
let removeEle: Element;
ranges.forEach((rng: number[]): void => {
content = rng[2] < frozenRow && rng[3] < frozenCol ? parent.getSelectAllContent() :
(rng[2] < frozenRow ? parent.getColumnHeaderContent() : (rng[3] < frozenCol ?
parent.getRowHeaderContent() : parent.getMainContent() as HTMLElement));
let rangeEle: HTMLElement;
if (cls === 'e-copy-indicator' || cls === 'e-range-indicator') {
rangeEle = ele.cloneNode(true) as HTMLElement; content.appendChild(rangeEle);
if (frozenRow) {
if (rng[2] + 1 === frozenRow) {
ranges.forEach((subRng: number[]): void => {
if (subRng !== rng) {
removeEle = rangeEle.getElementsByClassName('e-bottom')[0];
if (removeEle && subRng[0] === frozenRow) { detach(removeEle); }
}
});
}
if (rng[0] === frozenRow && content.parentElement.classList.contains('e-main-panel')) {
ranges.forEach((subRng: number[]): void => {
if (subRng !== rng) {
removeEle = rangeEle.getElementsByClassName('e-top')[0];
if (removeEle && subRng[2] + 1 === frozenRow) { detach(removeEle); }
}
});
}
}
if (frozenCol) {
if (rng[3] + 1 === frozenCol) {
ranges.forEach((subRng: number[]): void => {
if (subRng !== rng) {
removeEle = rangeEle.getElementsByClassName('e-right')[0];
if (removeEle && subRng[1] === frozenCol) { detach(removeEle); }
}
});
}
if (rng[1] === frozenCol && (content.classList.contains('e-sheet-content') || content.classList.contains('e-column-header'))) {
ranges.forEach((subRng: number[]): void => {
if (subRng !== rng) {
removeEle = rangeEle.getElementsByClassName('e-left')[0];
if (removeEle && subRng[3] + 1 === frozenCol) { detach(removeEle); }
}
});
}
}
} else {
rangeEle = content.querySelector('.' + cls);
if (!rangeEle) {
rangeEle = ele.cloneNode(true) as HTMLElement;
if (isMultiRange && !rangeEle.classList.contains('e-multi-range')) {
rangeEle.classList.add('e-multi-range');
}
content.appendChild(rangeEle);
}
if (removeCls) {
rangeEle.classList.remove(cls);
}
}
locateElem(
parent, rangeEle, rng, sheet, parent.enableRtl, frozenRow, frozenCol, preventAnimation, false,
parent.viewport.beforeFreezeHeight, parent.viewport.beforeFreezeWidth, parent.sheetModule.colGroupWidth);
if (rangeEle.classList.contains('e-hide')) {
rangeEle.classList.remove('e-hide');
}
});
} else {
if (!isMultiRange) {
removeRangeEle(parent.getSelectAllContent(), null, cls, true);
removeRangeEle(parent.getColumnHeaderContent(), null, cls, true);
removeRangeEle(parent.getRowHeaderContent(), null, cls, true);
}
locateElem(parent, ele, range, sheet, parent.enableRtl, frozenRow, frozenCol, preventAnimation);
if (cls === 'e-range-indicator' || !parent.getMainContent().querySelector('.' + cls)) {
parent.getMainContent().appendChild(ele);
}
if (ele.classList.contains('e-hide')) {
ele.classList.remove('e-hide');
}
if (removeCls) {
ele.classList.remove(cls);
}
}
}
} else {
const promise: Promise<null> = locateElem(parent, ele, range, sheet, parent.enableRtl, 0, 0, preventAnimation) as Promise<null>;
if (ele && !parent.getMainContent().querySelector('.' + cls)) { parent.getMainContent().appendChild(ele); }
return promise;
}
}
/**
* @param {Element} content - Specify the content element.
* @param {HTMLElement} checkEle - Specify the element.
* @param {string} cls - Specify the class name.
* @param {string} isSelection - Specify the selection element.
* @param {string} removeCls - Specify to remove class from element.
* @returns {void} - remove element with given range
*/
export function removeRangeEle(content: Element, checkEle: HTMLElement, cls: string, isSelection?: boolean, removeCls?: boolean): void {
if (isSelection || content !== checkEle) {
if (removeCls) {
const collection: NodeListOf<Element> = content.querySelectorAll('.' + cls);
let i: number = 0;
while (i < collection.length) {
collection[i].classList.remove(cls);
i++;
}
} else {
const ele: Element = content.querySelector('.' + cls);
if (ele) {
detach(ele);
}
}
}
}
/**
* Position element with given range
*
* @hidden
* @param {HTMLElement} ele - Specify the element.
* @param {number[]} range - specify the range.
* @param {SheetModel} sheet - Specify the sheet.
* @param {boolean} isRtl - Specify the boolean value.
* @param {number} frozenRow - Specidy the frozen row.
* @param {number} frozenColumn - Specify the frozen column
* @param {boolean} preventAnimation - Specify the preventAnimation.
* @param {boolean} isActiveCell - Specidy the boolean value.
* @param {number} freezeScrollHeight - Specify the freeze scroll height
* @param {number} freezeScrollWidth - Specify the freeze scroll width
* @param {number} rowHdrWidth - Specify the row header width
* @param {number} cls - Specify the class
* @param {number} isFillOptShow - Specify the fill option
* @param {number} freezeFillOpt - Specifies the fill option
* @param {number} freezeFillOpt.top - Specifies the fill option
* @param {number} freezeFillOpt.left - Specifies the fill option
* @returns {void} - Position element with given range
*/
export function locateElem(
parent: Spreadsheet, ele: HTMLElement, range: number[], sheet: SheetModel, isRtl: boolean, frozenRow: number, frozenColumn: number,
preventAnimation?: boolean, isActiveCell?: boolean, freezeScrollHeight?: number, freezeScrollWidth?: number, rowHdrWidth?: number,
cls?: string, isFillOptShow?: boolean, freezeFillOpt?: {top?: number; left?: number}): Promise<null> | void {
const swapRange: number[] = getSwapRange(range);
const cellPosition: { top: number, left: number } = getCellPosition(
sheet, swapRange, frozenRow, frozenColumn, freezeScrollHeight, freezeScrollWidth, rowHdrWidth);
const startIndex: number[] = [skipHiddenIdx(sheet, 0, true), skipHiddenIdx(sheet, 0, true, 'columns')];
let height: number; let width: number;
if (parent.scrollSettings.isFinite) {
height = swapRange[0] >= sheet.rowCount ? 0 : getRowsHeight(
sheet, swapRange[0], swapRange[2] < sheet.rowCount ? swapRange[2] : sheet.rowCount - 1, true);
width = swapRange[1] >= sheet.colCount ? 0 : getColumnsWidth(
sheet, swapRange[1], swapRange[3] < sheet.colCount ? swapRange[3] : sheet.colCount - 1, true);
} else {
height = getRowsHeight(sheet, swapRange[0], swapRange[2], true);
width = getColumnsWidth(sheet, swapRange[1], swapRange[3], true);
}
const isRowSelected: boolean = (swapRange[1] === 0 && swapRange[3] === sheet.colCount - 1);
const isColSelected: boolean = (swapRange[0] === 0 && swapRange[2] === sheet.rowCount - 1);
let top: number = 0; let tdiff: number = -5;
let ldiff: number = -5;
let left: number = 0;
let otdiff: number = 6;
let oldiff: number = 6;
if (isNullOrUndefined(cls)) {
const attrs: { [key: string]: string } = {
'top': (swapRange[0] === startIndex[0] ? cellPosition.top : cellPosition.top - getDPRValue(1)) + 'px',
'height': height && height + (swapRange[0] === startIndex[0] ? 0 : getDPRValue(1)) + 'px',
'width': width && width + (swapRange[1] === startIndex[1] ? 0 : getDPRValue(1)) + (isActiveCell
&& frozenColumn && swapRange[1] < frozenColumn && swapRange[3] >= frozenColumn ? 1 : 0) + 'px'
};
attrs[isRtl ? 'right' : 'left'] = (swapRange[1] === startIndex[1] ? cellPosition.left : cellPosition.left - 1) + 'px';
if (ele) {
const promise: Promise<null> = setStyleAttribute([{ element: ele, attrs: attrs }], preventAnimation);
return promise;
}
} else {
if (isRowSelected) {
tdiff = -5;
ldiff = -2;
otdiff = 6;
oldiff = 3;
}
if (isColSelected) {
ldiff = -5;
tdiff = 0;
otdiff = 1;
oldiff = 6;
}
if (!isColSelected) {
top += height;
}
if (!isRowSelected)
{
left += width;
}
top += Math.round(cellPosition.top) + tdiff;
left += Math.round(cellPosition.left) + ldiff;
let attrs: { [key: string]: string } = {};
if (isFillOptShow) {
removeClass([ele], 'e-hide');
top = freezeFillOpt && freezeFillOpt.top ? freezeFillOpt.top : top;
left = freezeFillOpt && freezeFillOpt.left ? freezeFillOpt.left : left;
attrs = {
'top': top + otdiff + 'px',
'left': left + oldiff + 'px'
};
if (ele) { setStyleAttribute([{ element: ele, attrs: attrs }], preventAnimation); }
} else {
attrs = {
'top': top + 'px'
};
attrs[isRtl ? 'right' : 'left'] = left + 'px';
if (ele) { setStyleAttribute([{ element: ele, attrs: attrs }], preventAnimation); }
}
}
}
/**
* To update element styles using request animation frame
*
* @hidden
* @param {StyleType[]} styles - Specify the styles
* @param {boolean} preventAnimation - Specify the preventAnimation.
* @returns {void} - To update element styles using request animation frame
*/
export function setStyleAttribute(styles: StyleType[], preventAnimation?: boolean): Promise<null> {
const promise: Promise<null> = new Promise((resolve: Function) => {
const setStyleFn: Function = () => {
styles.forEach((style: StyleType): void => {
setBaseStyleAttribute(style.element as HTMLElement, style.attrs);
resolve();
});
};
if (preventAnimation) {
setStyleFn();
} else {
requestAnimationFrame(() => setStyleFn());
}
});
return promise;
}
/**
* @hidden
* @returns {string} - to get Start Event
*/
export function getStartEvent(): string {
return (Browser.isPointer ? 'pointerdown' : 'mousedown touchstart');
}
/**
* @hidden
* @returns {string} - to get Move Event
*/
export function getMoveEvent(): string {
return (Browser.isPointer ? 'pointermove' : 'mousemove touchmove');
}
/**
* @hidden
* @returns {string} - Returns string value.
*/
export function getEndEvent(): string {
return (Browser.isPointer ? 'pointerup' : 'mouseup touchend');
}
/**
* @hidden
* @param {Event} e - To specify the event.
* @returns {boolean} - Returns boolean value.
*/
export function isTouchStart(e: Event): boolean {
return e.type === 'touchstart' || (e.type === 'pointerdown' && (e as PointerEvent).pointerType === 'touch');
}
/**
* @hidden
* @param {Event} e - To specify the event.
* @returns {boolean} - Returns boolean value.
*/
export function isTouchMove(e: Event): boolean {
return e.type === 'touchmove' || (e.type === 'pointermove' && (e as PointerEvent).pointerType === 'touch');
}
/**
* @hidden
* @param {Event} e - To specify the event.
* @returns {boolean} - Returns boolean value.
*/
export function isTouchEnd(e: Event): boolean {
return e.type === 'touchend' || (e.type === 'pointerup' && (e as PointerEvent).pointerType === 'touch');
}
/**
* @hidden
* @param {TouchEvent | MouseEvent} e - To specify the mouse and touch event.
* @returns {number} - To get client value
*/
export function isMouseDown(e: MouseEvent): boolean {
return e && (e.type === 'mousedown' || e.type === 'pointerdown');
}
/**
* @param {MouseEvent} e - Specify the event.
* @returns {boolean} - To get boolean value.
* @hidden
*/
export function isMouseMove(e: MouseEvent): boolean {
return e && (e.type === 'mousemove' || e.type === 'pointermove');
}
/**
* @param {MouseEvent} e - Specify the event.
* @returns {boolean} - To get boolean value
* @hidden
*/
export function isMouseUp(e: MouseEvent): boolean {
return e && (e.type === 'mouseup' || e.type === 'pointerup');
}
/**
* @param {MouseEvent | TouchEvent} e - To specify the mouse or touch event.
* @returns {number} - To get client X value.
* @hidden
*/
export function getClientX(e: TouchEvent & MouseEvent): number {
return e.changedTouches ? e.changedTouches[0].clientX : e.clientX;
}
/**
* @hidden
* @param {MouseEvent | TouchEvent} e - To specify the mouse and touch event.
* @returns {number} - To get client value
*/
export function getClientY(e: MouseEvent & TouchEvent): number {
return e.changedTouches ? e.changedTouches[0].clientY : e.clientY;
}
/**
* Get even number based on device pixel ratio
*
* @param {number} value - Specify the number
* @param {boolean} preventDecrease - Specify the boolean value
* @returns {number} - To get DPR value
* @hidden
*/
export function getDPRValue(value: number, preventDecrease?: boolean): number {
if (window.devicePixelRatio % 1 > 0) {
const pointValue: number = (value * window.devicePixelRatio) % 1;
return value + (pointValue ? (((pointValue > 0.5 || preventDecrease) ? (1 - pointValue) : -1 * pointValue)
/ window.devicePixelRatio) : 0);
} else {
return value;
}
}
const config: IAriaOptions<string> = {
role: 'role',
selected: 'aria-selected',
multiselectable: 'aria-multiselectable',
busy: 'aria-busy',
colcount: 'aria-colcount'
};
/**
* @hidden
* @param {HTMLElement} target - specify the target.
* @param {IAriaOptions<boolean>} options - Specify the options.
* @returns {void} - to set Aria Options
*/
export function setAriaOptions(target: HTMLElement, options: IAriaOptions<boolean>): void {
const props: string[] = Object.keys(options);
props.forEach((name: string) => {
if (target) { target.setAttribute(config[name], <string>options[name]); }
});
}
/**
* @hidden
* @param {HTMLElement} element - specify the element.
* @param {Object} component - Specify the component.
* @returns {void} - to destroy the component.
*/
export function destroyComponent(element: HTMLElement, component: Object): void {
if (element) {
const compObj: Object = getComponent(element, component);
if (compObj) {
(<{ destroy: Function }>compObj).destroy();
}
}
}
/**
* @hidden
* @param {number} idx - Specify the index
* @param {number} index - Specify the index
* @param {string} value - Specify the value.
* @param {boolean} isCol - Specify the boolean value.
* @param {Spreadsheet} parent - Specify the parent.
* @returns {void} - To set resize.
*/
export function setResize(idx: number, index: number, value: string, isCol: boolean, parent: Spreadsheet): void {
let curEle: HTMLElement;
let curEleH: HTMLElement;
let curEleC: HTMLElement;
let preEle: HTMLElement;
let preEleH: HTMLElement;
let preEleC: HTMLElement;
let nxtEle: HTMLElement;
let nxtEleH: HTMLElement;
let nxtEleC: HTMLElement;
const sheet: SheetModel = parent.getActiveSheet();
const frozenRow: number = parent.frozenRowCount(sheet); const frozenCol: number = parent.frozenColCount(sheet);
if (isCol) {
const header: Element = idx < frozenCol ? parent.getSelectAllContent() : parent.getColumnHeaderContent();
curEle = header.getElementsByTagName('th')[index]; curEleH = header.getElementsByTagName('col')[index];
curEleC = (idx < frozenCol ? parent.getRowHeaderContent() : parent.getMainContent()).getElementsByTagName('col')[index];
} else {
curEle = curEleH = frozenRow || frozenCol ? parent.getRow(idx, null, frozenCol - 1) :
parent.getRow(idx, parent.getRowHeaderTable());
curEleH.style.height = parseInt(value, 10) > 0 ? getDPRValue(parseInt(value, 10)) + 'px' : '2px';
curEleC = parent.getRow(idx, null, frozenCol);
curEleC.style.height = parseInt(value, 10) > 0 ? getDPRValue(parseInt(value, 10)) + 'px' : '0px';
let hdrFntSize: number;
if (sheet.showHeaders) {
const hdrRow: HTMLCollectionOf<HTMLTableRowElement> =
parent.getRowHeaderContent().getElementsByClassName('e-row') as HTMLCollectionOf<HTMLTableRowElement>;
const hdrClone: HTMLElement[] = [];
hdrClone[0] = hdrRow[index].getElementsByTagName('td')[0].cloneNode(true) as HTMLElement;
hdrFntSize = findMaxValue(parent.getRowHeaderTable(), hdrClone, false, parent) + 1;
}
const contentRow: HTMLCollectionOf<HTMLTableRowElement> =
parent.getMainContent().getElementsByClassName('e-row') as HTMLCollectionOf<HTMLTableRowElement>;
const contentClone: HTMLElement[] = [];
for (let idx: number = 0; idx < contentRow[index].getElementsByTagName('td').length; idx++) {
contentClone[idx] = contentRow[index].getElementsByTagName('td')[idx].cloneNode(true) as HTMLElement;
}
const cntFntSize: number = findMaxValue(parent.getContentTable(), contentClone, false, parent) + 1;
const fntSize: number = hdrFntSize >= cntFntSize ? hdrFntSize : cntFntSize;
if (parseInt(curEleC.style.height, 10) < fntSize ||
(curEle && curEle.classList.contains('e-reach-fntsize') && parseInt(curEleC.style.height, 10) === fntSize)) {
if (sheet.showHeaders) {
curEle.classList.add('e-reach-fntsize');
curEleH.style.lineHeight = parseInt(value, 10) >= 4 ? ((parseInt(value, 10)) - 4) + 'px' :
parseInt(value, 10) > 0 ? ((parseInt(value, 10)) - 1) + 'px' : '0px';
}
curEleC.style.lineHeight = parseInt(value, 10) > 0 ? ((parseInt(value, 10)) - 1) + 'px' : '0px';
} else {
if (curEleH) { curEleH.style.removeProperty('line-height'); }
curEleC.style.removeProperty('line-height');
if (curEle && curEle.classList.contains('e-reach-fntsize')) {
curEle.classList.remove('e-reach-fntsize');
}
}
}
preEleC = curEleC.previousElementSibling as HTMLElement;
nxtEleC = curEleC.nextElementSibling as HTMLElement;
if (preEleC) {
if (sheet.showHeaders) {
preEle = curEle.previousElementSibling as HTMLElement;
preEleH = curEleH.previousElementSibling as HTMLElement;
}
preEleC = curEleC.previousElementSibling as HTMLElement;
}
if (nxtEleC) {
if (sheet.showHeaders) {
nxtEle = curEle.nextElementSibling as HTMLElement;
nxtEleH = curEleH.nextElementSibling as HTMLElement;
}
nxtEleC = curEleC.nextElementSibling as HTMLElement;
}
if (parseInt(value, 10) <= 0 && !(curEleC.classList.contains('e-zero') || curEleC.classList.contains('e-zero-start'))) {
if (preEleC && nxtEleC) {
if (isCol) {
if (sheet.showHeaders) { curEleH.style.width = '2px'; }
curEleC.style.width = '0px';
} else {
if (sheet.showHeaders) { curEleH.style.height = '2px'; }
curEleC.style.height = '0px';
}
if (preEleC.classList.contains('e-zero-start')) {
if (sheet.showHeaders) { curEle.classList.add('e-zero-start'); }
curEleC.classList.add('e-zero-start');
} else {
if (sheet.showHeaders) { curEle.classList.add('e-zero'); }
curEleC.classList.add('e-zero');
}
if (nxtEle && !nxtEle.classList.contains('e-zero') && !nxtEle.classList.contains('e-zero-last')) {
if (sheet.showHeaders) { curEle.classList.add('e-zero-last'); }
curEleC.classList.add('e-zero-last');
}
if (preEleC.classList.contains('e-zero-last')) {
if (sheet.showHeaders) { preEle.classList.remove('e-zero-last'); }
preEleC.classList.remove('e-zero-last');
}
if (sheet.showHeaders && preEle.classList.contains('e-zero')) {
if (curEle.classList.contains('e-zero-end')) {
setWidthAndHeight(preEleH, -2, isCol);
} else {
setWidthAndHeight(preEleH, -2, isCol);
}
} else if (sheet.showHeaders) {
setWidthAndHeight(preEleH, -1, isCol);
}
if (sheet.showHeaders && preEle.classList.contains('e-zero-start')) {
setWidthAndHeight(curEleH, -1, isCol);
}
if (sheet.showHeaders && nxtEle.classList.contains('e-zero')) {
if (curEle.classList.contains('e-zero-start')) {
while (nxtEle) {
if (nxtEle.classList.contains('e-zero') && (parseInt(nxtEleH.style.height, 10) !== 0 && !isCol) ||
(parseInt(nxtEleH.style.width, 10) !== 0 && isCol)) {
if (isCol) {
curEleH.style.width = parseInt(curEleH.style.width, 10) - 1 + 'px';
nxtEleH.style.width = parseInt(nxtEleH.style.width, 10) - 1 + 'px';
} else {
curEleH.style.height = parseInt(curEleH.style.height, 10) - 1 + 'px';
nxtEleH.style.height = parseInt(nxtEleH.style.height, 10) - 1 + 'px';
}
nxtEle.classList.remove('e-zero');
nxtEle.classList.add('e-zero-start');
break;
} else {
let nxtIndex: number;
nxtEle.classList.remove('e-zero');
nxtEle.classList.add('e-zero-start');
if (isCol) {
nxtIndex = parseInt(nxtEle.getAttribute('aria-colindex'), 10) - 1;
nxtEle = parent.getColHeaderTable().getElementsByTagName('th')[nxtIndex + 1];
nxtEleH = parent.getColHeaderTable().getElementsByTagName('col')[nxtIndex + 1];
} else {
nxtIndex = parseInt(nxtEle.getAttribute('aria-rowindex'), 10) - 1;
nxtEle = parent.getRowHeaderTable().getElementsByTagName('tr')[nxtIndex + 1];
nxtEleH = parent.getRowHeaderTable().getElementsByTagName('tr')[nxtIndex + 1];
}
}
}
} else {
setWidthAndHeight(curEleH, -2, isCol);
}
} else if (sheet.showHeaders) {
if (nxtEle.classList.contains('e-zero-end')) {
if (isCol) {
curEleH.style.width = '0px';
} else {
curEleH.style.height = '0px';
}
} else {
setWidthAndHeight(nxtEleH, -1, isCol);
}
}
} else if (preEleC) {
if (isCol) {
if (sheet.showHeaders) { curEleH.style.width = '1px'; }
curEleC.style.width = '0px';
} else {
if (sheet.showHeaders) { curEleH.style.height = '1px'; }
curEleC.style.height = '0px';
}
if (sheet.showHeaders) { curEle.classList.add('e-zero-end'); }
curEleC.classList.add('e-zero-end');
if (sheet.showHeaders) { curEle.classList.add('e-zero-last'); }
curEleC.classList.add('e-zero-last');
if (sheet.showHeaders && preEle.classList.contains('e-zero')) {
setWidthAndHeight(preEleH, -2, isCol);
} else {
setWidthAndHeight(preEleH, -1, isCol);
}
} else if (nxtEle) {
curEle.classList.add('e-zero-start');
curEleC.classList.add('e-zero-start');
if (!nxtEle.classList.contains('e-zero')) {
curEle.classList.add('e-zero-last');
curEleC.classList.add('e-zero-last');
}
if (isCol) {
curEleH.style.width = '1px';
curEleC.style.width = '0px';
} else {
curEleH.style.height = '1px';
curEleC.style.height = '0px';
}
if (sheet.showHeaders && nxtEle.classList.contains('e-zero')) {
while (nxtEle) {
if (nxtEle.classList.contains('e-zero') && (parseInt(nxtEleH.style.width, 10) !== 0
&& isCol) || (parseInt(nxtEleH.style.height, 10) !== 0 && !isCol)) {
if (isCol) {
nxtEleH.style.width = parseInt(nxtEleH.style.width, 10) - 1 + 'px';
curEleH.style.width = parseInt(curEleH.style.width, 10) - 1 + 'px';
} else {
nxtEleH.style.height = parseInt(nxtEleH.style.height, 10) - 1 + 'px';
curEleH.style.height = parseInt(curEleH.style.height, 10) - 1 + 'px';
}
nxtEle.classList.add('e-zero-start');
nxtEle.classList.remove('e-zero');
break;
} else {
let nxtIndex: number;
nxtEle.classList.add('e-zero-start');
nxtEle.classList.remove('e-zero');
if (isCol) {
nxtIndex = parseInt(nxtEle.getAttribute('aria-colindex'), 10) - 1;
nxtEleH = parent.getColHeaderTable().getElementsByTagName('col')[nxtIndex + 1];
nxtEle = parent.getColHeaderTable().getElementsByTagName('th')[nxtIndex + 1];
} else {
nxtIndex = parseInt(nxtEle.getAttribute('aria-rowindex'), 10) - 1;
nxtEleH = parent.getRowHeaderTable().getElementsByTagName('tr')[nxtIndex + 1];
nxtEle = parent.getRowHeaderTable().getElementsByTagName('tr')[nxtIndex + 1];
}
}
}
} else if (sheet.showHeaders) {
setWidthAndHeight(nxtEleH, -1, isCol);
}
}
} else if (parseInt(value, 10) > 0) {
const DPRValue: string = getDPRValue(parseInt(value, 10)) + 'px';
if (isCol) {
curEleH.style.width = DPRValue;
curEleC.style.width = DPRValue;
} else {
curEleH.style.height = DPRValue;
curEleC.style.height = DPRValue;
}
if (sheet.showHeaders && preEle && nxtEle) {
if (preEle.classList.contains('e-zero')) {
if (curEle.classList.contains('e-zero')) {
if (isCol) {
preEleH.style.width = parseInt(preEleH.style.width, 10) + 2 + 'px';
curEleH.style.width = parseInt(curEleH.style.width, 10) - 1 + 'px';
} else {
preEleH.style.height = parseInt(preEleH.style.height, 10) + 2 + 'px';
curEleH.style.height = parseInt(curEleH.style.height, 10) - 1 + 'px';
}
} else {
setWidthAndHeight(curEleH, -1, isCol);
}
} else {
if (curEle.classList.contains('e-zero')) {
setWidthAndHeight(preEleH, 1, isCol);
} else {
if (curEle.classList.contains('e-zero-start')) {
if (isCol) {
preEleH.style.width = parseInt(preEleH.style.width, 10) + 1 + 'px';
curEleH.style.width = parseInt(curEleH.style.width, 10) - 1 + 'px';
} else {
preEleH.style.height = parseInt(preEleH.style.height, 10) + 1 + 'px';
curEleH.style.height = parseInt(curEleH.style.height, 10) - 1 + 'px';
}
}
}
}
if (nxtEle.classList.contains('e-zero')) {
setWidthAndHeight(curEleH, -1, isCol);
} else {
if (curEle.classList.contains('e-zero') || curEle.classList.contains('e-zero-start')) {
setWidthAndHeight(nxtEleH, 1, isCol);
}
}
if (curEle.classList.contains('e-zero')) { curEle.classList.remove('e-zero'); }
if (curEle.classList.contains('e-zero-start')) { curEle.classList.remove('e-zero-start'); }
if (curEleC.classList.contains('e-zero')) { curEleC.classList.remove('e-zero'); }
if (curEleC.classList.contains('e-zero-start')) { curEleC.classList.remove('e-zero-start'); }
if (curEle.classList.contains('e-zero-last')) { curEle.classList.remove('e-zero-last'); }
if (curEleC.classList.contains('e-zero-last')) { curEleC.classList.remove('e-zero-last'); }
if (preEle.classList.contains('e-zero') || preEle.classList.contains('e-zero-start')) {
preEle.classList.add('e-zero-last');
preEleC.classList.add('e-zero-last');
}
} else if (sheet.showHeaders && preEle) {
if (preEle.classList.contains('e-zero')) {
if (curEle.classList.contains('e-zero')) {
if (isCol) {
curEleH.style.width = parseInt(curEleH.style.width, 10) - 1 + 'px';
preEleH.style.width = parseInt(preEleH.style.width, 10) + 2 + 'px';
} else {
curEleH.style.height = parseInt(curEleH.style.height, 10) - 1 + 'px';
preEleH.style.height = parseInt(preEleH.style.height, 10) + 2 + 'px';
}
} else {
setWidthAndHeight(curEleH, -1, isCol);
}
} else {
if (curEle.classList.contains('e-zero')) {
setWidthAndHeight(preEleH, 1, isCol);
} else {
setWidthAndHeight(curEleH, -1, isCol);
}
}
if (curEle.classList.contains('e-zero')) { curEle.classList.remove('e-zero'); }
if (curEle.classList.contains('e-zero-end')) { curEle.classList.remove('e-zero-end'); }
if (curEleC.classList.contains('e-zero')) { curEleC.classList.remove('e-zero'); }
if (curEleC.classList.contains('e-zero-end')) { curEleC.classList.remove('e-zero-end'); }
} else if (sheet.showHeaders && nxtEle) {
if (nxtEle.classList.contains('e-zero')) {
setWidthAndHeight(curEleH, -1, isCol);
} else if (curEle.classList.contains('e-zero-start')) {
setWidthAndHeight(nxtEleH, 1, isCol);
curEle.classList.remove('e-zero-start');
}
if (curEle.classList.contains('e-zero')) { curEle.classList.remove('e-zero'); }
if (curEleC.classList.contains('e-zero')) { curEleC.classList.remove('e-zero'); }
if (curEle.classList.contains('e-zero-start')) { curEle.classList.remove('e-zero-start'); }
if (curEleC.classList.contains('e-zero-start')) { curEleC.classList.remove('e-zero-start'); }
}
}
}
/**
* @hidden
* @param {HTMLElement} trgt - Specify the target element.
* @param {number} value - specify the number.
* @param {boolean} isCol - Specify the boolean vlaue.
* @returns {void} - to set width and height.
*/
export function setWidthAndHeight(trgt: HTMLElement, value: number, isCol: boolean): void {
if (isCol) {
trgt.style.width = parseInt(trgt.style.width, 10) + value + 'px';
} else {
trgt.style.height = parseInt(trgt.style.height, 10) + value + 'px';
}
}
/**
* @hidden
* @param {number} lineHeight - Specify the line height for other culture text.
* @returns {void} - to set the line height for other culture text.
*/
export function setTextLineHeight(lineHeight: number): void {
textLineHeight = lineHeight;
}
/**
* @hidden
* @param {HTMLElement} table - Specify the table.
* @param {HTMLElement[]} text - specify the text.
* @param {boolean} isCol - Specifyt boolean value
* @param {Spreadsheet} parent - Specify the parent.
* @param {string} prevData - specify the prevData.
* @param {boolean} isWrap - Specifyt boolean value
* @returns {number} - To find maximum value.
*/
export function findMaxValue(
table: HTMLElement, text: HTMLElement[], isCol: boolean, parent: Spreadsheet, prevData?: string, isWrap?: boolean): number {
const myTableDiv: HTMLElement = parent.createElement('div', { className: parent.element.className, styles: 'display: block' });
const myTable: HTMLElement = parent.createElement('table', {
className: table.className + 'e-resizetable',
styles: 'width: auto;height: auto'
});
const myTr: HTMLElement = parent.createElement('tr');
if (isCol) {
text.forEach((element: Element) => {
const tr: Element = (<Element>myTr.cloneNode());
tr.appendChild(element);
myTable.appendChild(tr);
});
} else {
text.forEach((element: Element) => {
myTr.appendChild(<Element>element.cloneNode(true));
});
myTable.appendChild(myTr);
}
myTableDiv.appendChild(myTable);
document.body.appendChild(myTableDiv);
let offsetWidthValue: number;
let offsetHeightValue: number;
const tableMaxWidth: number = myTable.getBoundingClientRect().width;
const tableMaxHeight: number = myTable.getBoundingClientRect().height;
if (!isWrap) {
offsetHeightValue = tableMaxHeight;
offsetWidthValue = tableMaxWidth;
} else {
if (isCol && parseInt(prevData, 10) > tableMaxWidth)
{
offsetWidthValue = tableMaxWidth;
} else {
offsetWidthValue = parseInt(prevData, 10);
}
if (!isCol && parseInt(prevData, 10) > tableMaxHeight)
{
offsetHeightValue = tableMaxHeight;
} else {
offsetHeightValue = parseInt(prevData, 10);
}
}
document.body.removeChild(myTableDiv);
if (isCol) {
return Math.ceil(offsetWidthValue);
} else {
return Math.ceil(offsetHeightValue);
}
}
/**
* @hidden
* @param {CollaborativeEditArgs} options - Specify the collaborative edit arguments.
* @param {Spreadsheet} spreadsheet - specify the spreadsheet.
* @param {boolean} isRedo - Specifyt the boolean value.
* @param {CollaborativeEditArgs[]} undoCollections - Specify the undo collections.
* @param {object} actionEventArgs - Specify the actionEventArgs.
* @param {UndoRedoEventArgs} actionEventArgs.eventArgs - Specify the eventArgs.
* @returns {void} - To update the Action.
*/
export function updateAction(
options: CollaborativeEditArgs, spreadsheet: Spreadsheet, isRedo?: boolean, undoCollections?: CollaborativeEditArgs[],
actionEventArgs?: ActionEventArgs, isRecursive?: boolean): void {
/* eslint-disable-next-line @typescript-eslint/no-explicit-any */
const eventArgs: any = options.eventArgs;
let chartElement: HTMLElement;
let element: HTMLElement;
let args: BeforeSortEventArgs;
let promise: Promise<SortEventArgs>;
let sortArgs: { [key: string]: BeforeSortEventArgs | Promise<SortEventArgs> };
let cellEvtArgs: CellSaveEventArgs;
let cellValue: CellModel;
let clipboardPromise: Promise<Object>;
let model: RowModel[];
let sheet: SheetModel;
let column: ColumnModel;
let row: RowModel;
let addressInfo: { indices: number[], sheetIndex: number };
let isFromUpdateAction: boolean = (options as unknown as { isFromUpdateAction: boolean }).isFromUpdateAction || isUndefined(isRedo);
if ((options as unknown as { isUndoRedo: boolean }).isUndoRedo) {
isFromUpdateAction = (options as unknown as { isFromUpdateAction: boolean }).isFromUpdateAction = true;
delete (options as unknown as { isUndoRedo: boolean }).isUndoRedo;
spreadsheet.notify(performUndoRedo, options);
return;
}
if (isFromUpdateAction && !isRecursive) {
const address: string = eventArgs.address || eventArgs.range || eventArgs.pastedRange
|| (eventArgs.addressCollection && eventArgs.addressCollection[0]) || eventArgs.dataRange;
const sheetIndex: number = isUndefined(eventArgs.sheetIndex) ? isUndefined(eventArgs.sheetIdx)
? isUndefined(eventArgs.activeSheetIndex) ? address ? getSheetIndexFromAddress(spreadsheet, address)
: spreadsheet.activeSheetIndex : eventArgs.activeSheetIndex : eventArgs.sheetIdx : eventArgs.sheetIndex;
if (sheetIndex !== spreadsheet.activeSheetIndex) {
const args: { sheet: SheetModel, indexes: number[], promise?: Promise<Cell>, resolveAfterFullDataLoaded?: boolean } = {
sheet: getSheet(spreadsheet, sheetIndex), resolveAfterFullDataLoaded: true,
indexes: [0, 0, 0, 0], promise: new Promise((resolve: Function) => { resolve((() => { /** */ })()); })
};
spreadsheet.notify(updateSheetFromDataSource, args);
args.promise.then((): void => {
updateAction(options, spreadsheet, isRedo, undoCollections, actionEventArgs, true);
});
return;
}
}
switch (options.action) {
case 'sorting':
args = {
range: (options.eventArgs as SortEventArgs).range,
sortOptions: (options.eventArgs as SortEventArgs).sortOptions,
cancel: false
};
// eslint-disable-next-line @typescript-eslint/no-unused-vars
promise = new Promise((resolve: Function, reject: Function) => {
resolve((() => { /** */ })());
});
sortArgs = { args: args, promise: promise };
spreadsheet.notify(initiateSort, sortArgs);
(sortArgs.promise as Promise<SortEventArgs>).then((args: SortEventArgs) => {
spreadsheet.serviceLocator.getService<ICellRenderer>('cell').refreshRange(getIndexesFromAddress(args.range));
});
break;
case 'cellSave':
cellEvtArgs = options.eventArgs as CellSaveEventArgs;
const cellSaveArgs: CellSaveEventArgs = { element: cellEvtArgs.element, value: cellEvtArgs.value,
oldValue: cellEvtArgs.oldValue, address: cellEvtArgs.address, displayText: cellEvtArgs.displayText,
formula: cellEvtArgs.formula, originalEvent: cellEvtArgs.originalEvent };
cellValue = cellSaveArgs.formula ? { formula: cellSaveArgs.formula } : { value: cellSaveArgs.value };
spreadsheet.updateCell(cellValue, cellSaveArgs.address);
if (isRedo === true) {
spreadsheet.trigger('cellSave', cellSaveArgs);
}
break;
case 'cellDelete':
const addrInfo: { sheetIndex: number, indices: number[] } = getAddressInfo(spreadsheet, options.eventArgs.address);
clearRange(spreadsheet, addrInfo.indices, addrInfo.sheetIndex);
break;
case 'format':
if (eventArgs.requestType === 'CellFormat') {
if (eventArgs.style && eventArgs.style.border && !isNullOrUndefined(eventArgs.borderType)) {
const style: CellStyleModel = {};
Object.assign(style, eventArgs.style, null, true);
eventArgs.style.border = undefined;
spreadsheet.notify(
setCellFormat, { style: eventArgs.style, refreshRibbon: true, range: eventArgs.range,
onActionUpdate: !isFromUpdateAction });
eventArgs.style.border = style.border;
spreadsheet.setBorder(eventArgs.style, eventArgs.range, eventArgs.borderType);
eventArgs.style = style;
} else {
spreadsheet.notify(
setCellFormat, { style: eventArgs.style, refreshRibbon: true, range: eventArgs.range,
onActionUpdate: !isFromUpdateAction });
}
getUpdateUsingRaf((): void => spreadsheet.selectRange(spreadsheet.getActiveSheet().selectedRange));
} else {
spreadsheet.numberFormat(eventArgs.format, eventArgs.range);
}
break;
case 'clipboard':
clipboardPromise = new Promise((resolve: Function) => { resolve((() => { /** */ })()); });
addressInfo = spreadsheet.getAddressInfo(eventArgs.copiedRange);
spreadsheet.notify(eventArgs.copiedInfo.isCut ? cut : copy, {
range: addressInfo.indices, sId: getSheet(spreadsheet, addressInfo.sheetIndex).id,
promise: promise, invokeCopy: true, isPublic: true, isFromUpdateAction: true
});
clipboardPromise.then(() => spreadsheet.notify(paste, {
range: getIndexesFromAddress(eventArgs.pastedRange),
sIdx: getSheetIndex(spreadsheet, getSheetNameFromAddress(eventArgs.pastedRange)),
type: eventArgs.type, isAction: false, isInternal: true, isFromUpdateAction: true
}));
break;
case 'gridLines':
spreadsheet.setSheetPropertyOnMute(spreadsheet.sheets[eventArgs.sheetIdx], 'showGridLines', eventArgs.isShow);
(spreadsheet.serviceLocator.getService('sheet') as IRenderer).toggleGridlines();
spreadsheet.notify(refreshRibbonIcons, null);
break;
case 'headers':
spreadsheet.setSheetPropertyOnMute(spreadsheet.sheets[eventArgs.sheetIdx], 'showHeaders', eventArgs.isShow);
(spreadsheet.serviceLocator.getService('sheet') as IRenderer).showHideHeaders();
spreadsheet.notify(refreshRibbonIcons, null);
break;
case 'resize':
case 'resizeToFit':
if (isFromUpdateAction) {
sheet = spreadsheet.sheets[eventArgs.sheetIndex];
column = getColumn(sheet, eventArgs.index);
row = getRow(sheet, eventArgs.index);
if ((eventArgs.isCol && column && column.hidden) || (row && row.hidden)) {
spreadsheet.notify(hideShow, { startIndex: eventArgs.index, endIndex: eventArgs.index, hide: false, isCol: eventArgs.isCol,
sheetIndex: eventArgs.sheetIndex });
}
}
if (eventArgs.isCol) {
if (eventArgs.hide === undefined) {
spreadsheet.setColWidth(isFromUpdateAction && !isUndefined(isRedo) ? eventArgs.oldWidth : eventArgs.width, eventArgs.index, eventArgs.sheetIndex);
} else {
spreadsheet.hideColumn(eventArgs.index, eventArgs.index, eventArgs.hide);
}
} else {
if (eventArgs.hide === undefined) {
spreadsheet.setRowHeight(isFromUpdateAction && !isUndefined(isRedo) ? eventArgs.oldHeight : eventArgs.height, eventArgs.index, eventArgs.sheetIndex);
} else {
spreadsheet.hideRow(eventArgs.index, eventArgs.index, eventArgs.hide);
}
}
break;
case 'renameSheet':
const sheetIndex: number = getSheetIndexFromId(spreadsheet, eventArgs.index);
spreadsheet.setSheetPropertyOnMute(spreadsheet.sheets[sheetIndex], 'name', eventArgs.value);
spreadsheet.notify(sheetNameUpdate, {
items: spreadsheet.element.querySelector('.e-sheet-tabs-items'),
value: eventArgs.value,
idx: sheetIndex
});
break;
case 'hideSheet':
spreadsheet.notify(hideSheet, { sheetIndex: eventArgs.sheetIndex });
break;
case 'showSheet':
spreadsheet.notify(showSheet, eventArgs);
break;
case 'removeSheet':
spreadsheet.notify(removeSheetTab, { index: eventArgs.index, isAction: true, count: eventArgs.sheetCount, clicked: true });
break;
case 'gotoSheet':
spreadsheet.notify(goToSheet, { selectedIndex: eventArgs.currentSheetIndex, previousIndex: eventArgs.previousSheetIndex });
break;
case 'moveSheet':
moveSheet(spreadsheet, eventArgs.position, eventArgs.sheetIndexes, null, isFromUpdateAction);
break;
case 'wrap':
wrap(options.eventArgs.address, options.eventArgs.wrap, spreadsheet as Workbook);
break;
case 'hideShow':
if (eventArgs.isCol) {
spreadsheet.notify(
hideShow, <HideShowEventArgs>{ startIndex: eventArgs.startIndex, endIndex: eventArgs.endIndex, isCol: true,
hide: isRedo === false ? !eventArgs.hide : eventArgs.hide, sheetIndex: eventArgs.sheetIndex });
} else {
spreadsheet.notify(
hideShow, <HideShowEventArgs>{ startIndex: eventArgs.startIndex, endIndex: eventArgs.endIndex,
hide: isRedo === false ? !eventArgs.hide : eventArgs.hide, sheetIndex: eventArgs.sheetIndex });
}
break;
case 'replace':
spreadsheet.notify(replace, { value: eventArgs.compareValue, replaceValue: eventArgs.replaceValue,
sheetIndex: eventArgs.sheetIndex, address: eventArgs.address });
break;
case 'replaceAll':
spreadsheet.notify(replaceAll, eventArgs);
break;
case 'filter':
// eslint-disable-next-line @typescript-eslint/no-unused-vars
promise = new Promise((resolve: Function, reject: Function) => { resolve((() => { /** */ })()); });
if (isRedo === false) {
spreadsheet.notify(
initiateFilterUI, { predicates: eventArgs.previousPredicates, range: eventArgs.range, sIdx: eventArgs.sheetIndex, promise:
promise, isInternal: true });
} else {
spreadsheet.notify(
initiateFilterUI, { predicates: eventArgs.predicates, range: eventArgs.range, sIdx: eventArgs.sheetIndex, promise: promise,
isInternal: true, useFilterRange: eventArgs.useFilterRange });
}
if (actionEventArgs && !isFromUpdateAction) {
promise.then((): void => {
spreadsheet.notify(completeAction, extend({ isUndo: !isRedo, isUndoRedo: !isFromUpdateAction }, actionEventArgs));
});
}
break;
case 'insert':
if (options.eventArgs.modelType === 'Sheet') {
sheet = spreadsheet;
} else {
sheet = getSheet(spreadsheet, options.eventArgs.activeSheetIndex);
if (!sheet) { break; }
}
if (isRedo === false) {
spreadsheet.notify(
deleteModel, <InsertDeleteModelArgs>{ model: sheet, start: options.eventArgs.index, isUndoRedo: true, end:
options.eventArgs.index + (options.eventArgs.model.length - 1), modelType: options.eventArgs.modelType });
} else {
spreadsheet.notify(
insertModel, <InsertDeleteModelArgs>{ model: sheet, start: options.eventArgs.index, end: options.eventArgs.index +
(options.eventArgs.model.length - 1), modelType: options.eventArgs.modelType, checkCount: isRedo === undefined ? options.eventArgs.sheetCount : null, activeSheetIndex: options.eventArgs.activeSheetIndex, isUndoRedo: true, insertType: options.eventArgs.insertType,
isFromUpdateAction: isFromUpdateAction });
}
break;
case 'delete':
if (options.eventArgs.modelType === 'Sheet') {
sheet = spreadsheet;
} else {
sheet = getSheet(spreadsheet, options.eventArgs.activeSheetIndex);
if (!sheet) {
break;
}
}
if (isRedo === false) {
spreadsheet.notify(
insertModel, <InsertDeleteModelArgs>{ model: sheet, start: options.eventArgs.deletedModel, modelType:
options.eventArgs.modelType, columnCellsModel: options.eventArgs.deletedCellsModel, definedNames:
options.eventArgs.definedNames, activeSheetIndex: options.eventArgs.activeSheetIndex, isUndoRedo: true,
insertType: options.eventArgs.modelType === 'Row' ? 'above' : 'before', conditionalFormats: options.eventArgs.conditionalFormats, prevAction: options.action });
} else {
spreadsheet.notify(
deleteModel, <InsertDeleteModelArgs>{ model: sheet, start: options.eventArgs.startIndex,
checkCount: options.eventArgs.sheetCount, end: options.eventArgs.endIndex, modelType: options.eventArgs.modelType,
isUndoRedo: true, insertType: options.eventArgs.modelType === 'Row' ? 'above' : 'before' });
}
break;
case 'validation':
if (isRedo === false) {
spreadsheet.notify(removeDataValidation, { range: eventArgs.range });
} else {
spreadsheet.notify(
cellValidation, { rules: { type: eventArgs.type, operator: eventArgs.operator, value1: eventArgs.value1, value2:
eventArgs.value2, ignoreBlank: eventArgs.ignoreBlank, inCellDropDown: eventArgs.inCellDropDown },
range: eventArgs.range });
}
break;
case 'removeHighlight':
case 'addHighlight':
spreadsheet.notify(
invalidData, { isRemoveHighlight: options.action === 'removeHighlight' ? isRedo !== false : isRedo === false,
range: eventArgs.range, isPublic: true });
break;
case 'merge':
options.eventArgs.isAction = false;
model = [];
for (let rIdx: number = 0, rCnt: number = eventArgs.model.length; rIdx < rCnt; rIdx++) {
model.push({ cells: [] });
for (let cIdx: number = 0, cCnt: number = eventArgs.model[rIdx].cells.length; cIdx < cCnt; cIdx++) {
model[rIdx].cells[cIdx] = {};
Object.assign(model[rIdx].cells[cIdx], eventArgs.model[rIdx].cells[cIdx]);
}
}
spreadsheet.notify(setMerge, options.eventArgs);
eventArgs.model = model;
break;
case 'clear':
spreadsheet.notify(clearViewer, { options: options.eventArgs, isPublic: true, isFromUpdateAction: isFromUpdateAction });
break;
case 'conditionalFormat':
if (isRedo === false) {
spreadsheet.notify(clearCFRule, { range: eventArgs.range, sheetIdx: eventArgs.sheetIdx, isUndoRedo: true, isFromUpdateAction: isFromUpdateAction });
for (let undoCollIdx: number = 0; undoCollIdx < undoCollections.length; undoCollIdx++) {
if (undoCollections[undoCollIdx].action === 'conditionalFormat') {
const conditionalFormat: ConditionalFormatModel = {
type: undoCollections[undoCollIdx].eventArgs.type as (HighlightCell | TopBottom | DataBar | ColorScale | IconSet),
cFColor: undoCollections[undoCollIdx].eventArgs.cFColor as CFColor,
value: undoCollections[undoCollIdx].eventArgs.value, range: undoCollections[undoCollIdx].eventArgs.range
};
spreadsheet.notify(
cFUndo, { conditionalFormat: conditionalFormat, sheetIdx: undoCollections[undoCollIdx].eventArgs.sheetIdx });
}
}
} else {
const conditionalFormat: ConditionalFormatModel = { type: eventArgs.type, cFColor: eventArgs.cFColor, value: eventArgs.value,
range: eventArgs.range };
spreadsheet.notify(
setCFRule, { conditionalFormat: conditionalFormat, sheetIdx: eventArgs.sheetIdx, isRedo: true, isAction: false, isFromUpdateAction: isFromUpdateAction });
}
break;
case 'clearCF':
if (isRedo === false) {
spreadsheet.notify(clearCells,
{ conditionalFormats: eventArgs.cFormats, oldRange: eventArgs.oldRange, selectedRange: eventArgs.selectedRange });
} else {
spreadsheet.notify(clearCFRule,
{ range: eventArgs.selectedRange, sheetIdx: eventArgs.sheetIdx, isClearCF: true, isUndoRedo: true, isFromUpdateAction: isFromUpdateAction });
}
break;
case 'insertImage':
if (isRedo === false) {
spreadsheet.notify(
deleteImage, { id: options.eventArgs.id, sheetIdx: options.eventArgs.sheetIndex + 1, range: options.eventArgs.range, preventEventTrigger: true });
} else {
spreadsheet.notify(
createImageElement, { options: { src: options.eventArgs.imageData, height: options.eventArgs.imageHeight, width:
options.eventArgs.imageWidth, imageId: options.eventArgs.id }, range: options.eventArgs.range, isPublic: false,
isUndoRedo: true });
}
break;
case 'deleteImage':
if (isRedo === false) {
spreadsheet.notify(
createImageElement, { options: { src: options.eventArgs.imageData, height: options.eventArgs.imageHeight, width:
options.eventArgs.imageWidth, imageId: options.eventArgs.id }, range: options.eventArgs.address, isPublic: false,
isUndoRedo: true });
} else {
spreadsheet.notify(
deleteImage, { id: options.eventArgs.id, range: options.eventArgs.address, preventEventTrigger: true });
}
break;
case 'imageRefresh':
element = document.getElementById(options.eventArgs.id);
if (isRedo === false) {
spreadsheet.notify(
refreshImgCellObj, { prevTop: options.eventArgs.currentTop, prevLeft: options.eventArgs.currentLeft, currentTop:
options.eventArgs.prevTop, currentLeft: options.eventArgs.prevLeft, id: options.eventArgs.id, currentHeight:
options.eventArgs.prevHeight, currentWidth: options.eventArgs.prevWidth, requestType: 'imageRefresh',
prevHeight: options.eventArgs.currentHeight, prevWidth: options.eventArgs.currentWidth, isUndoRedo: true });
} else {
options.eventArgs.isUndoRedo = true;
spreadsheet.notify(refreshImgCellObj, options.eventArgs);
}
if (element) {
element.style.height = isRedo === false ? options.eventArgs.prevHeight + 'px' : options.eventArgs.currentHeight + 'px';
element.style.width = isRedo === false ? options.eventArgs.prevWidth + 'px' : options.eventArgs.currentWidth + 'px';
element.style.top = isRedo === false ? options.eventArgs.prevTop + 'px' : options.eventArgs.currentTop + 'px';
element.style.left = isRedo === false ? options.eventArgs.prevLeft + 'px' : options.eventArgs.currentLeft + 'px';
}
break;
case 'insertChart':
if (isRedo === false) {
spreadsheet.notify(deleteChart, { id: eventArgs.id, range: eventArgs.range, isUndoRedo: true });
} else {
const chartOptions: ChartModel[] = [{ type: eventArgs.type, theme: eventArgs.theme, isSeriesInRows: eventArgs.isSeriesInRows,
range: eventArgs.range, id: eventArgs.id, height: eventArgs.height, width: eventArgs.width }];
spreadsheet.notify(
setChart, { chart: chartOptions, isUndoRedo: false, range: eventArgs.posRange });
}
break;
case 'deleteChart':
if (isRedo === false) {
const chartOpts: ChartModel[] = [{ type: eventArgs.type, theme: eventArgs.theme, isSeriesInRows: eventArgs.isSeriesInRows,
range: eventArgs.range, id: eventArgs.id, height: eventArgs.height, width: eventArgs.width, top: eventArgs.top,
left: eventArgs.left }];
spreadsheet.notify(
setChart, { chart: chartOpts, isUndoRedo: false, range: eventArgs.posRange });
} else {
spreadsheet.notify(deleteChart, { id: eventArgs.id, range: eventArgs.range, isUndoRedo: true });
}
break;
case 'chartRefresh':
chartElement = document.getElementById(options.eventArgs.id);
if (chartElement) {
chartElement.style.height = isRedo === false ? options.eventArgs.prevHeight + 'px' : options.eventArgs.currentHeight + 'px';
chartElement.style.width = isRedo === false ? options.eventArgs.prevWidth + 'px' : options.eventArgs.currentWidth + 'px';
chartElement.style.top = isRedo === false ? options.eventArgs.prevTop + 'px' : options.eventArgs.currentTop + 'px';
chartElement.style.left = isRedo === false ? options.eventArgs.prevLeft + 'px' : options.eventArgs.currentLeft + 'px';
}
if (isRedo === false) {
spreadsheet.notify(refreshChartCellObj, extend({}, options.eventArgs, {
currentColIdx: options.eventArgs.prevColIdx, currentHeight: options.eventArgs.prevHeight,
currentLeft: options.eventArgs.prevLeft, currentRowIdx: options.eventArgs.prevRowIdx,
currentTop: options.eventArgs.prevTop, currentWidth: options.eventArgs.prevWidth,
prevColIdx: options.eventArgs.currentColIdx, prevHeight: options.eventArgs.currentHeight,
prevLeft: options.eventArgs.currentLeft, prevRowIdx: options.eventArgs.currentRowIdx,
prevTop: options.eventArgs.currentTop, prevWidth: options.eventArgs.currentWidth, isUndoRedo: true
}));
} else {
options.eventArgs.isUndoRedo = true;
spreadsheet.notify(refreshChartCellObj, options.eventArgs);
}
break;
case 'chartDesign':
spreadsheet.notify(chartDesignTab, options.eventArgs);
break;
case 'autofill':
spreadsheet.notify(
setAutoFill, { fillRange: options.eventArgs.fillRange, dataRange: options.eventArgs.dataRange,
fillType: options.eventArgs.fillType, direction: options.eventArgs.direction });
break;
case 'removeValidation':
if (isRedo !== false) {
spreadsheet.notify(removeDataValidation, { range: eventArgs.range });
}
break;
case 'addDefinedName':
if (isRedo === false) {
spreadsheet.notify(
workbookFormulaOperation, { action: 'removeDefinedName', isRemoved: false, definedName: eventArgs.name, scope:
eventArgs.scope, isEventTrigger: true });
} else {
const definedName: DefineNameModel =
{ name: eventArgs.name, refersTo: eventArgs.refersTo, scope: eventArgs.scope, comment: eventArgs.comment };
spreadsheet.notify(
workbookFormulaOperation, { action: 'addDefinedName', isAdded: false, definedName: definedName, isEventTrigger: true });
}
break;
case 'hyperlink':
spreadsheet.notify(setLinkModel, { hyperlink: eventArgs.hyperlink, cell: eventArgs.address, displayText: eventArgs.displayText, triggerEvt: false });
spreadsheet.serviceLocator.getService<ICellRenderer>('cell').refreshRange(getIndexesFromAddress(eventArgs.address));
break;
case 'removeHyperlink':
spreadsheet.notify(removeHyperlink, { range: eventArgs.address, preventEventTrigger: true });
break;
case 'freezePanes':
spreadsheet.freezePanes(eventArgs.row, eventArgs.column, eventArgs.sheetIndex);
break;
case 'duplicateSheet':
duplicateSheet(spreadsheet, eventArgs.sheetIndex, null, isFromUpdateAction);
break;
case 'protectSheet':
if(eventArgs.isProtected) {
spreadsheet.notify(protectsheetHandler, eventArgs);
} else {
spreadsheet.setSheetPropertyOnMute(getSheet(spreadsheet, eventArgs.sheetIndex), 'password', '');
spreadsheet.notify(applyProtect, { isActive: true, sheetIndex: eventArgs.sheetIndex });
}
break;
case 'protectWorkbook':
if (eventArgs.isProtected) {
spreadsheet.notify(setProtectWorkbook, eventArgs);
} else {
spreadsheet.notify(removeWorkbookProtection, null);
}
break;
case 'lockCells':
spreadsheet.notify(setLockCells, eventArgs);
}
}
/**
* @hidden
* @param {Workbook} workbook - Specify the workbook
* @param {number} rowIdx - specify the roe index
* @param {number} colIdx - specify the column Index.
* @param {number} sheetIdx - specify the sheet index.
* @returns {boolean} - Returns the boolean value.
*/
export function hasTemplate(workbook: Workbook, rowIdx: number, colIdx: number, sheetIdx: number): boolean {
const sheet: SheetModel = workbook.sheets[sheetIdx];
const ranges: RangeModel[] = sheet.ranges;
let range: number[];
for (let i: number = 0, len: number = ranges.length; i < len; i++) {
if (ranges[i].template) {
range = getRangeIndexes(ranges[i].address.length ? ranges[i].address : ranges[i].startCell);
if (range[0] <= rowIdx && range[1] <= colIdx && range[2] >= rowIdx && range[3] >= colIdx) {
return true;
}
}
}
return false;
}
/**
* Setting row height in view an model.
*
* @hidden
* @param {Spreadsheet} parent - Specify the parent
* @param {SheetModel} sheet - specify the column width
* @param {number} height - specify the style.
* @param {number} rowIdx - specify the rowIdx
* @param {HTMLElement} row - specify the row
* @param {HTMLElement} hRow - specify the hRow.
* @param {boolean} notifyRowHgtChange - specify boolean value.
* @returns {void} - Setting row height in view an model.
*/
export function setRowEleHeight(
parent: Spreadsheet, sheet: SheetModel, height: number, rowIdx: number, row?: HTMLElement,
hRow?: HTMLElement, notifyRowHgtChange: boolean = true): void {
const prevHgt: number = getRowHeight(sheet, rowIdx, true);
const frozenCol: number = parent.frozenColCount(sheet);
const dprHgt: number = getDPRValue(height);
row = row || (sheet.frozenRows ? parent.getRow(rowIdx, null, frozenCol) : parent.getRow(rowIdx));
if (row) {
row.style.height = `${dprHgt}px`;
}
if (sheet.frozenColumns) {
hRow = parent.getRow(rowIdx, null, frozenCol - 1);
} else {
const frozenRow: number = parent.frozenRowCount(sheet);
hRow = hRow || parent.getRow(rowIdx, rowIdx < frozenRow ? parent.sheetModule.getSelectAllTable() : parent.getRowHeaderTable());
}
if (hRow) {
hRow.style.height = `${dprHgt}px`;
}
setRowHeight(sheet, rowIdx, height);
parent.setProperties({ sheets: parent.sheets }, true);
if (notifyRowHgtChange) {
parent.notify(rowHeightChanged, { rowIdx: rowIdx, threshold: dprHgt - prevHgt });
}
}
/**
* @hidden
* @param {Workbook} context - Specify the context
* @param {CellStyleModel} style - specify the style.
* @param {number} lines - specify the lines
* @param {number} lineHeight - Specify the line height.
* @returns {number} - get Text Height
*/
export function getTextHeight(context: Workbook, style: CellStyleModel, lines: number = 1, lineHeight?: number): number {
const fontSize: string = (style && style.fontSize) || context.cellStyle.fontSize;
const fontSizePx: number = fontSize.indexOf('pt') > -1 ? parseInt(fontSize, 10) / 0.75 : parseInt(fontSize, 10);
const hgt: number = fontSizePx * (lineHeight || getLineHeight(style && style.fontFamily ? style : context.cellStyle)) * lines;
return Math.ceil(hgt % 1 > 0.9 ? hgt + 1 : hgt); // 0.9 -> if it is nearest value adding extra 1 pixel
}
/**
* @hidden
* @param {CellStyleModel} style - cell style
* @returns {number} - returns line height
*/
export function getLineHeight(style: CellStyleModel): number {
let lineHeight: number = textLineHeight;
if (style) {
if (style.fontFamily === 'Arial Black') {
lineHeight = 1.44;
} else if ((style.fontFamily as string) === '"Segoe UI", sans-serif') {
lineHeight = 1.36;
}
}
return lineHeight;
}
/**
* @hidden
* @param {string} text - Specify the text
* @param {CellStyleModel} style - specify the style.
* @param {CellStyleModel} parentStyle - specify the parentStyle
* @returns {number} - get Text Width
*/
export function getTextWidth(text: string, style: CellStyleModel, parentStyle: CellStyleModel, preventDpr?: boolean): number {
if (!style) {
style = parentStyle;
}
const canvas: HTMLCanvasElement = document.createElement('canvas');
const context: CanvasRenderingContext2D = canvas.getContext('2d');
context.font = (style.fontStyle || parentStyle.fontStyle) + ' ' + (style.fontWeight || parentStyle.fontWeight) + ' '
+ (style.fontSize || parentStyle.fontSize) + ' ' + (style.fontFamily || parentStyle.fontFamily);
return preventDpr ? context.measureText(text).width : getDPRValue(context.measureText(text).width, true);
}
/**
* @hidden
* @param {string} text - Specify the text
* @param {number} colwidth - specify the column width
* @param {CellStyleModel} style - specify the style.
* @param {CellStyleModel} parentStyle - specify the parentStyle
* @returns {number} - Setting maximum height while doing formats and wraptext
*/
export function getLines(text: string, colwidth: number, style: CellStyleModel, parentStyle: CellStyleModel): number {
let width: number; let splitTextArr: string[]; let lWidth: number; let cWidth: number;
let prevWidth: number = 0;
const textArr: string[] = text.toString().split(' ');
const spaceWidth: number = getTextWidth(' ', style, parentStyle);
let hypenWidth: number;
let lines: number;
let cnt: number = 0;
let lineCnt: number = 0; let maxCnt: number = 0;
const calculateCount: Function = (txt: string, isHypenSplit: boolean): void => {
if (prevWidth) {
cnt++;
}
if (width / colwidth >= 1) {
txt.split('').forEach((val: string) => {
cWidth = getTextWidth(val, style, parentStyle, true);
lWidth += cWidth;
if (lWidth > colwidth) {
cnt++;
lWidth = cWidth;
}
});
width = getDPRValue(lWidth, true);
}
if (!isHypenSplit) { addSpace(width); }
prevWidth = width;
};
const addSpace: Function = (size: number): void => {
width += ((size + spaceWidth) / colwidth >= 1 ? 0 : spaceWidth);
};
textArr.forEach((txt: string) => {
lWidth= 0; cWidth = 0;
width = getTextWidth(txt, style, parentStyle);
lines = (prevWidth + width) / colwidth;
if (lines > 1) {
splitTextArr = txt.split('-');
if (splitTextArr.length > 1) {
splitTextArr.forEach((splitText: string) => {
lWidth= 0; cWidth = 0;
if (!hypenWidth) { hypenWidth = getTextWidth('-', style, parentStyle); }
width = getTextWidth(splitText, style, parentStyle);
if (splitTextArr[splitTextArr.length - 1] !== splitText) {
width += hypenWidth;
}
lines = (prevWidth + width) / colwidth;
if (lines >= 1) {
calculateCount(splitText, splitTextArr[splitTextArr.length - 1] !== splitText);
} else {
if (splitTextArr[splitTextArr.length - 1] === splitText && textArr[textArr.length - 1] !== txt) {
addSpace(prevWidth + width);
}
prevWidth += width;
}
});
} else {
calculateCount(txt, false);
}
} else {
addSpace(prevWidth + width);
prevWidth += width;
}
});
if (prevWidth) {
lineCnt = (prevWidth - spaceWidth) / colwidth;
maxCnt = parseFloat((lineCnt).toString().split('.')[0]);
cnt += (lineCnt + 0.05 >= maxCnt + 1 ? Math.ceil(lineCnt) + 1 : Math.ceil(lineCnt));
}
return cnt;
}
/**
* calculation for width taken by border inside a cell
*
* @param {number} rowIdx - Specify the row index.
* @param {number} colIdx - Specify the column index.
* @param {SheetModel} sheet - Specify the sheet.
* @returns {number} - get border width.
*/
function getBorderWidth(rowIdx: number, colIdx: number, sheet: SheetModel): number {
let width: number = 0;
const cell: CellModel = getCell(rowIdx, colIdx, sheet, null, true);
const rightSideCell: CellModel = getCell(rowIdx, colIdx + 1, sheet, null, true);
if (cell.style) {
if (cell.style.border) {
width = (colIdx === 0 ? 2 : 1) * parseFloat(cell.style.border.split('px')[0]);
} else {
if (colIdx === 0 && cell.style.borderLeft) {
width = parseFloat(cell.style.borderLeft.split('px')[0]);
}
if (cell.style.borderRight) {
width += parseFloat(cell.style.borderRight.split('px')[0]);
}
}
}
if (!(cell.style && (cell.style.border || cell.style.borderRight)) && rightSideCell.style && rightSideCell.style.borderLeft) {
width += parseFloat(rightSideCell.style.borderLeft.split('px')[0]);
}
return width > 0 && width < 1 ? 1 : width;
}
/**
* calculation for height taken by border inside a cell
*
* @param {number} rowIdx - Specify the row index.
* @param {number} colIdx - Specify the column index.
* @param {SheetModel} sheet - Specify the sheet.
* @returns {number} - get border height.
* @hidden
*/
export function getBorderHeight(rowIdx: number, colIdx: number, sheet: SheetModel): number {
let height: number = 0;
const cell: CellModel = getCell(rowIdx, colIdx, sheet, null, true);
if (cell.style) {
if (cell.style.border) {
height = (rowIdx === 0 ? 2 : 1) * parseFloat(cell.style.border.split('px')[0]);
} else {
if (rowIdx === 0 && cell.style.borderTop) {
height = parseFloat(cell.style.borderTop.split('px')[0]);
}
if (cell.style.borderBottom) {
height += parseFloat(cell.style.borderBottom.split('px')[0]);
}
}
}
const bottomSideCell: CellModel = getCell(rowIdx + 1, colIdx, sheet, null, true);
if (!(cell.style && (cell.style.border || cell.style.borderBottom)) && bottomSideCell.style && bottomSideCell.style.borderTop) {
height += parseFloat(bottomSideCell.style.borderTop.split('px')[0]);
}
return Math.ceil(height) || 1; // 1 -> For default bottom border
}
/**
* Calculating column width by excluding cell padding and border width
*
* @param {SheetModel} sheet - Specify the sheet
* @param {number} rowIdx - Specify the row index.
* @param {number} startColIdx - Specify the start column index.
* @param {number} endColIdx - Specify the end column index.
* @returns {number} - get excluded column width.
* @hidden
*/
export function getExcludedColumnWidth(sheet: SheetModel, rowIdx: number, startColIdx: number, endColIdx: number = startColIdx): number {
return getColumnsWidth(sheet, startColIdx, endColIdx, true) - getDPRValue((4 + (getBorderWidth(rowIdx, startColIdx, sheet) || 1))); // 4 -> For cell padding
}
/**
* @param {Workbook} context - Specify the Workbook.
* @param {number} rowIdx - Specify the row index.
* @param {number} colIdx - Specify the column index.
* @param {SheetModel} sheet - Specify the sheet.
* @param {CellStyleModel} style - Specify the style.
* @param {number} lines - Specify the lines.
* @param {number} lineHeight - Specify the line height.
* @returns {number} - get text height with border.
* @hidden
*/
export function getTextHeightWithBorder(
context: Workbook, rowIdx: number, colIdx: number, sheet: SheetModel, style: CellStyleModel, lines?: number,
lineHeight?: number): number {
return getTextHeight(context, style, lines, lineHeight) + getBorderHeight(rowIdx, colIdx, sheet);
}
/**
* Setting maximum height while doing formats and wraptext
*
* @hidden
* @param {SheetModel} sheet - Specify the sheet
* @param {number} rIdx - specify the row Index
* @param {number} cIdx - specify the column Index.
* @param {number} hgt - specify the hgt
* @returns {void} - Setting maximum height while doing formats and wraptext
*/
export function setMaxHgt(sheet: SheetModel, rIdx: number, cIdx: number, hgt: number): void {
if (!sheet.maxHgts[rIdx]) {
sheet.maxHgts[rIdx] = {};
}
sheet.maxHgts[rIdx][cIdx] = hgt;
}
/**
* Getting maximum height by comparing each cell's modified height.
*
* @hidden
* @param {SheetModel} sheet - Specify the sheet.
* @param {number} rIdx - Specify the row index.
* @returns {number} - Getting maximum height by comparing each cell's modified height.
*/
export function getMaxHgt(sheet: SheetModel, rIdx: number): number {
let maxHgt: number = 0;
const rowHgt: object = sheet.maxHgts[rIdx];
if (rowHgt) {
Object.keys(rowHgt).forEach((key: string) => {
if (rowHgt[key] > maxHgt) {
maxHgt = rowHgt[key];
}
});
}
return maxHgt;
}
/**
* @hidden
* @param {HTMLElement} ele - Specify the element.
* @returns {void} - Specify the focus.
*/
export function focus(ele: HTMLElement): void {
if (!document.activeElement.classList.contains('e-text-findNext-short')) {
if (Browser.isIE) {
const scrollX: number = window.scrollX;
const scrollY: number = window.scrollY;
ele.focus();
window.scrollTo(scrollX, scrollY);
} else {
/* eslint-disable-next-line @typescript-eslint/no-explicit-any */
(ele as any).focus({ preventScroll: true });
}
}
}
/**
* Checks whether a specific range of cells is locked or not.
*
* @param {Spreadsheet} parent - Specify the spreadsheet.
* @param {number[]} rangeIndexes - Specify the range indexes.
* @returns {boolean} - Returns true if any of the cells is locked and returns false if none of the cells is locked.
* @hidden
*/
export function isLockedCells(parent: Spreadsheet, rangeIndexes?: number[]): boolean {
const sheet: SheetModel = parent.getActiveSheet(); let hasLockCell: boolean;
const address: number[] = !isNullOrUndefined(rangeIndexes) ? rangeIndexes : getSwapRange(getRangeIndexes(sheet.selectedRange));
for (let row: number = address[0]; row <= address[2]; row++) {
for (let col: number = address[1]; col <= address[3]; col++) {
const cell: CellModel = getCell(row, col, sheet);
if (isLocked(cell, getColumn(sheet, col))) {
hasLockCell = true;
break;
}
}
}
return hasLockCell;
}
/**
* Checks whether the range is discontinuous or not.
*
* @param {string} range - Specify the sheet
* @returns {boolean} - Returns true if the range is discontinuous range.
* @hidden
*/
export function isDiscontinuousRange(range: string): boolean {
return range.includes(' ');
}
/**
* @hidden
* @param {Spreadsheet} context - Specifies the context.
* @param {number[]} range - Specifies the address range.
* @param {number} sheetIdx - Specifies the sheetIdx.
* @returns {void} - To clear the range.
*/
export function clearRange(context: Spreadsheet, range: number[], sheetIdx: number): void {
const sheet: SheetModel = getSheet(context, sheetIdx);
let skip: boolean; let cell: CellModel; let newCell: CellModel; let td: HTMLElement;
const uiRefresh: boolean = sheetIdx === context.activeSheetIndex;
for (let sRIdx: number = range[0], eRIdx: number = range[2]; sRIdx <= eRIdx; sRIdx++) {
if (isFilterHidden(sheet, sRIdx)) { continue; }
for (let sCIdx: number = range[1], eCIdx: number = range[3]; sCIdx <= eCIdx; sCIdx++) {
const args: { cellIdx: number[], isUnique: boolean, uniqueRange: string } = { cellIdx: [sRIdx, sCIdx], isUnique: false ,
uniqueRange: '' };
context.notify(checkUniqueRange, args); skip = false;
if (args.uniqueRange !== '') {
const rangeIndex: number[] = getIndexesFromAddress(args.uniqueRange);
skip = getCell(rangeIndex[0], rangeIndex[1], sheet).value === '#SPILL!';
}
if (!args.isUnique || skip) {
cell = getCell(sRIdx, sCIdx, sheet);
if (cell) {
newCell = {};
if (cell.formula) {
newCell.formula = '';
}
if (cell.value || <unknown>cell.value === 0) {
newCell.value = '';
}
if (cell.hyperlink) {
newCell.hyperlink = '';
}
td = context.getCell(sRIdx, sCIdx);
if (!Object.keys(newCell).length || updateCell(
context, sheet, { cell: newCell, rowIdx: sRIdx, colIdx: sCIdx, valChange: true, uiRefresh: uiRefresh, td: td,
cellDelete: true })) {
continue;
}
if (td) {
if (td.querySelector('.e-cf-databar')) {
td.removeChild(td.querySelector('.e-cf-databar'));
}
if (td.querySelector('.e-iconsetspan')) {
td.removeChild(td.querySelector('.e-iconsetspan'));
}
}
context.notify(
checkConditionalFormat, { rowIdx: sRIdx, colIdx: sCIdx, cell: getCell(sRIdx, sCIdx, sheet), isAction: true });
}
}
}
}
} | the_stack |
import { AtomicProps } from '@fower/atomic-props'
import type { Atom } from './atom'
import type { Parser } from './parser'
import * as CSS from 'csstype'
import React from 'react'
import '@fower/atomic-props'
export type ComponentProps<T extends As> = React.ComponentProps<T> & {
as?: As
}
export type FowerHTMLProps<T extends As> = ComponentProps<T> & AtomicProps & { as?: As }
export type Preset = Partial<Configuration>
export type ResponsiveValue<T = any> =
| [T | false, ...(T | false)[]]
| (T | false)
| (() => [T | false, ...(T | false)[]])
| (() => T | false)
export type ResponsiveBoolean = ResponsiveValue<boolean> | boolean
export type PropValue = boolean | number | string
export type ModeType = 'light' | 'dark' | ({} & string)
export type FowerColor = CSS.Property.Color | keyof Colors
export type FowerThemeColor = keyof Colors
export type As = React.ElementType | React.ComponentType
export type GroupedAtomicProps = AtomicProps | (keyof AtomicProps)[]
export interface FowerCSSProperties extends AtomicProps, Omit<CSS.Properties, keyof AtomicProps> {}
export type PseudosObject = { [P in CSS.Pseudos]?: FowerCSSProperties }
export type AtomicKey = keyof Omit<AtomicProps, keyof PostfixAtomicProps> | ({} & string)
export type AtomicArray = AtomicKey[]
export type StyledArgs = (AtomicArray | CSSObject)[]
export type CSSArgs = (AtomicArray | CSSObject | AtomicKey | ({} & string))[]
export type CSSObject<T = any> =
| (FowerCSSProperties & PseudosObject)
| {
[K in keyof T]?: T[K] extends object
? CSSObject<T[K]>
: FowerCSSProperties | number | string | boolean
}
export interface FowerPlugin {
isMatch(key: string, parser?: Parser): boolean
init?(props: Props): void
/**
* before handle atom
* to modify some atom attr before handle
* @param atom
* @param parser
*/
beforeHandleAtom?(atom: Atom, parser: Parser): Atom
/**
* on atom style creating
* @param atom
* @param parser
*/
handleAtom?(atom: Atom, parser: Parser): Atom
/**
* after atom style created
* @param parser
*/
afterAtomStyleCreate?(parser: Parser): void
}
export interface PostfixAtomicProps {
_sm?: GroupedAtomicProps
_md?: GroupedAtomicProps
_lg?: GroupedAtomicProps
_xl?: GroupedAtomicProps
_2xl?: GroupedAtomicProps
_dark?: GroupedAtomicProps
_active?: GroupedAtomicProps
_checked?: GroupedAtomicProps
_disabled?: GroupedAtomicProps
_enabled?: GroupedAtomicProps
_default?: GroupedAtomicProps
_empty?: GroupedAtomicProps
_focus?: GroupedAtomicProps
_focusWithin?: GroupedAtomicProps
_invalid?: GroupedAtomicProps
_hover?: GroupedAtomicProps
_link?: GroupedAtomicProps
_visited?: GroupedAtomicProps
_firstChild?: GroupedAtomicProps
_lastChild?: GroupedAtomicProps
_after?: GroupedAtomicProps
_before?: GroupedAtomicProps
_placeholder?: GroupedAtomicProps
_selection?: GroupedAtomicProps
}
export interface Configuration {
unit: 'none' | 'px' | 'rem' | 'em' | 'vh' | 'rpx' | ({} & string)
/**
* prefix for all css classes
*/
prefix?: string
/**
* use inline style or not
* */
inline?: boolean
/**
* make all rule important
*/
important?: boolean
/**
* object style keys, default is ['css'], you can customize it to ['sx'], or both ['css', 'sx']
*/
objectPropKeys?: string[]
mode: {
/**
* @example
* current: 'dark', default is light
*/
currentMode: ModeType
/**
* @example
* modeList: ['dark', 'yellow']
*/
modeList: ModeType[]
/**
* enable auto dark mode
*/
autoDarkMode: {
enabled: boolean
mappings: Record<string, string | boolean>
}
classPrefix?: string // eg: fower-
}
pseudos?: string[]
theme: Theme
plugins: FowerPlugin[]
transformUnit?: (data: string | number) => string
}
export interface Props {
style?: any
className?: string
css?: CSSObject
/**
* props not convert to atomic props
*/
excludedProps?: string[]
[key: string]: any
}
export interface Theme {
breakpoints: {
sm: string
md: string
lg: string
xl: string
'2xl': string
}
colors: Colors
spacings: {
0: string | number
1: string | number
2: string | number
3: string | number
4: string | number
5: string | number
6: string | number
7: string | number
8: string | number
9: string | number
10: string | number
11: string | number
12: string | number
14: string | number
16: string | number
20: string | number
24: string | number
28: string | number
32: string | number
36: string | number
40: string | number
44: string | number
48: string | number
52: string | number
56: string | number
60: string | number
64: string | number
72: string | number
80: string | number
96: string | number
}
fontFamilies: {
sans: string
serif: string
mono: string
}
fontSizes: {
xs: string | number
sm: string | number
base: string | number
lg: string | number
xl: string | number
'2xl': string | number
'3xl': string | number
'4xl': string | number
'5xl': string | number
'6xl': string | number
'7xl': string | number
'8xl': string | number
'9xl': string | number
}
fontWeights: {
hairline: string | number
thin: string | number
light: string | number
normal: string | number
medium: string | number
semibold: string | number
bold: string | number
extrabold: string | number
black: string | number
}
lineHeights: {
none: number | string
tight: number | string
snug: number | string
normal: number | string
relaxed: number | string
loose: number | string
}
letterSpacings: {
tighter: number | string
tight: number | string
normal: number | string
wide: number | string
wider: number | string
widest: number | string
}
radii: {
none: number | string
tiny: number | string
small: number | string
medium: number | string
large: number | string
huge: number | string
gigantic: number | string
full: number | string
}
shadows: {
tiny: number | string
small: number | string
medium: number | string
large: number | string
huge: number | string
gigantic: number | string
inner: string
outline: string
none: string
}
}
export interface Colors {
modes?: Record<string, Omit<Colors, 'modes'> | { [key: string]: any }>
transparent: string
black: string
white: string
brand: string
brand50: string
brand100: string
brand200: string
brand300: string
brand400: string
brand500: string
brand600: string
brand700: string
brand800: string
brand900: string
pink50: string
pink100: string
pink200: string
pink300: string
pink400: string
pink500: string
pink600: string
pink700: string
pink800: string
pink900: string
fuchsia50: string
fuchsia100: string
fuchsia200: string
fuchsia300: string
fuchsia400: string
fuchsia500: string
fuchsia600: string
fuchsia700: string
fuchsia800: string
fuchsia900: string
purple50: string // '#faf5ff'
purple100: string // '#f3e8ff'
purple200: string // '#e9d5ff'
purple300: string // '#d8b4fe'
purple400: string // '#c084fc'
purple500: string // '#a855f7'
purple600: string // '#9333ea'
purple700: string // '#7e22ce'
purple800: string // '#6b21a8'
purple900: string // '#581c87'
violet50: string // '#f5f3ff'
violet100: string // '#ede9fe'
violet200: string // '#ddd6fe'
violet300: string // '#c4b5fd'
violet400: string // '#a78bfa'
violet500: string // '#8b5cf6'
violet600: string // '#7c3aed'
violet700: string // '#6d28d9'
violet800: string // '#5b21b6'
violet900: string // '#4c1d95'
indigo50: string // '#eef2ff'
indigo100: string // '#e0e7ff'
indigo200: string // '#c7d2fe'
indigo300: string // '#a5b4fc'
indigo400: string // '#818cf8'
indigo500: string // '#6366f1'
indigo600: string // '#4f46e5'
indigo700: string // '#4338ca'
indigo800: string // '#3730a3'
indigo900: string // '#312e81'
blue50: string // '#eff6ff'
blue100: string // '#dbeafe'
blue200: string // '#bfdbfe'
blue300: string // '#93c5fd'
blue400: string // '#60a5fa'
blue500: string // '#3b82f6'
blue600: string // '#2563eb'
blue700: string // '#1d4ed8'
blue800: string // '#1e40af'
blue900: string // '#1e3a8a'
cyan50: string // '#ecfeff'
cyan100: string // '#cffafe'
cyan200: string // '#a5f3fc'
cyan300: string // '#67e8f9'
cyan400: string // '#22d3ee'
cyan500: string // '#06b6d4'
cyan600: string // '#0891b2'
cyan700: string // '#0e7490'
cyan800: string // '#155e75'
cyan900: string // '#164e63'
teal50: string // '#f0fdfa'
teal100: string // '#ccfbf1'
teal200: string // '#99f6e4'
teal300: string // '#5eead4'
teal400: string // '#2dd4bf'
teal500: string // '#14b8a6'
teal600: string // '#0d9488'
teal700: string // '#0f766e'
teal800: string // '#115e59'
teal900: string // '#134e4a'
green50: string // '#f0fdf4'
green100: string // '#dcfce7'
green200: string // '#bbf7d0'
green300: string // '#86efac'
green400: string // '#4ade80'
green500: string // '#22c55e'
green600: string // '#16a34a'
green700: string // '#15803d'
green800: string // '#166534'
green900: string // '#14532d'
lime50: string // '#f7fee7'
lime100: string // '#ecfccb'
lime200: string // '#d9f99d'
lime300: string // '#bef264'
lime400: string // '#a3e635'
lime500: string // '#84cc16'
lime600: string // '#65a30d'
lime700: string // '#4d7c0f'
lime800: string // '#3f6212'
lime900: string // '#365314'
yellow50: string // '#fefce8'
yellow100: string // '#fef9c3'
yellow200: string // '#fef08a'
yellow300: string // '#fde047'
yellow400: string // '#facc15'
yellow500: string // '#eab308'
yellow600: string // '#ca8a04'
yellow700: string // '#a16207'
yellow800: string // '#854d0e'
yellow900: string // '#713f12'
orange50: string // '#fff7ed'
orange100: string // '#ffedd5'
orange200: string // '#fed7aa'
orange300: string // '#fdba74'
orange400: string // '#fb923c'
orange500: string // '#f97316'
orange600: string // '#ea580c'
orange700: string // '#c2410c'
orange800: string // '#9a3412'
orange900: string // '#7c2d12'
red50: string // '#fef2f2'
red100: string // '#fee2e2'
red200: string // '#fecaca'
red300: string // '#fca5a5'
red400: string // '#f87171'
red500: string // '#ef4444'
red600: string // '#dc2626'
red700: string // '#b91c1c'
red800: string // '#991b1b'
red900: string // '#7f1d1d'
trueGray50: string // '#fafafa'
trueGray100: string // '#f5f5f5'
trueGray200: string // '#e5e5e5'
trueGray300: string // '#d4d4d4'
trueGray400: string // '#a3a3a3'
trueGray500: string // '#737373'
trueGray600: string // '#525252'
trueGray700: string // '#404040'
trueGray800: string // '#262626'
trueGray900: string // '#171717'
gray50: string // '#fafafa'
gray100: string // '#f4f4f5'
gray200: string // '#e4e4e7'
gray300: string // '#d4d4d8'
gray400: string // '#a1a1aa'
gray500: string // '#71717a'
gray600: string // '#52525b'
gray700: string // '#3f3f46'
gray800: string // '#27272a'
gray900: string // '#18181b'
}
export interface Meta {
/**
* color mode
* @example
* mode: 'dark'
*/
mode?: string
/**
* @example
* breakpoint: '640px'
*/
breakpoint?: string
/**
* @example
* --hover -> hover
* --befor -> befor
*/
pseudo?: string
/**
* @example
* --hover -> :
* --before -> ::
*/
pseudoPrefix?: string
/**
* child selector for atom.className
* @example
* childSelector: '.child'
*/
childSelector?: string
/**
* sibling selector for atom.className
* @example
* childSelector: '.sibling'
*/
siblingSelector?: string
/**
* parent class for group pseudo
* @example
* parentClass: 'group'
*/
parentClass?: string
/**
* is !important style
*/
important?: boolean
/**
* is global style, value is global selector
*/
global?: string
/**
* color name or value
* @example
* gray200--O20 -> gray200
* gray200--T20 -> gray200
* #666--D40 -> #666
* #999--L40 -> #999
*/
color?: string
/**
* color postfix for opacify,transparent,darken,lighten
* @example
* gray200--O20 -> O20
* gray200--T20 -> T20
* #666--D40 -> D40
* #999--L40 -> L40
*/
colorPostfix?: string
}
export interface Options {
propKey: 'css' | 'debug' | ({} & string)
propValue?: any
key?: string
value?: any
meta?: Meta
style?: any
handled?: boolean
} | the_stack |
export const overrides = [
{
"key": "ctrl+g",
"command": "workbench.action.gotoLine",
},
{
"key": "ctrl+g",
"command": "-workbench.action.gotoLine",
},
{
"key": "1",
"command": "dance.selections.align",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "shift+0",
"command": "dance.count.0",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "0",
"command": "-dance.count.0",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "shift+1",
"command": "dance.count.1",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "1",
"command": "-dance.count.1",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "shift+2",
"command": "dance.count.2",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "2",
"command": "-dance.count.2",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "shift+3",
"command": "dance.count.3",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "3",
"command": "-dance.count.3",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "shift+4",
"command": "dance.count.4",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "4",
"command": "-dance.count.4",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "shift+5",
"command": "dance.count.5",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "5",
"command": "-dance.count.5",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "shift+6",
"command": "dance.count.6",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "6",
"command": "-dance.count.6",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "shift+7",
"command": "dance.count.7",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "7",
"command": "-dance.count.7",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "shift+8",
"command": "dance.count.8",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "8",
"command": "-dance.count.8",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "shift+9",
"command": "dance.count.9",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "9",
"command": "-dance.count.9",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "oem_102",
"command": "dance.deindent",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "shift+oem_102",
"command": "dance.indent",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "unknown",
"command": "-dance.selections.align",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "unknown",
"command": "-dance.deindent",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "unknown",
"command": "-dance.indent",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "oem_1",
"command": "dance.pipe.filter",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "shift+4",
"command": "-dance.pipe.filter",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "oem_8",
"command": "dance.pipe.append",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "shift+1",
"command": "-dance.pipe.append",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "alt+oem_8",
"command": "dance.pipe.prepend",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "shift+alt+1",
"command": "-dance.pipe.prepend",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "shift+alt+oem_8",
"command": "dance.pipe.ignore",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "shift+alt+oem_5",
"command": "-dance.pipe.ignore",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "shift+oem_8",
"command": "dance.pipe.replace",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "shift+oem_5",
"command": "-dance.pipe.replace",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "alt+1",
"command": "dance.selections.align.copy",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "alt+unknown",
"command": "-dance.selections.align.copy",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "shift+alt+oem_102",
"command": "dance.indent.withEmpty",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "alt+unknown",
"command": "-dance.indent.withEmpty",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "alt+oem_102",
"command": "dance.deindent.further",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "alt+unknown",
"command": "-dance.deindent.further",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "shift+alt+oem_5",
"command": "dance.objects.selectToEnd.extend.inner",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "shift+alt+unknown",
"command": "-dance.objects.selectToEnd.extend.inner",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "shift+alt+oem_3",
"command": "dance.objects.selectToStart.extend.inner",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "shift+alt+unknown",
"command": "-dance.objects.selectToStart.extend.inner",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "shift+oem_5",
"command": "dance.objects.selectToEnd.extend",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "shift+unknown",
"command": "-dance.objects.selectToEnd.extend",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "shift+oem_3",
"command": "dance.objects.selectToStart.extend",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "shift+unknown",
"command": "-dance.objects.selectToStart.extend",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "alt+oem_period",
"command": "dance.selections.flip",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "alt+oem_1",
"command": "-dance.selections.flip",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "shift+alt+oem_period",
"command": "dance.selections.forward",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "shift+alt+oem_1",
"command": "-dance.selections.forward",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "alt+8",
"command": "dance.selections.merge",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "alt+unknown",
"command": "-dance.selections.merge",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "oem_period",
"command": "dance.selections.reduce",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "oem_1",
"command": "-dance.selections.reduce",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "shift+oem_period",
"command": "dance.repeat.insert",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "oem_period",
"command": "-dance.repeat.insert",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "oem_4",
"command": "dance.rotate",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "unknown",
"command": "-dance.rotate",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "5",
"command": "dance.rotate.backwards",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "unknown",
"command": "-dance.rotate.backwards",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "alt+oem_4",
"command": "dance.rotate.content",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "alt+unknown",
"command": "-dance.rotate.content",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "alt+5",
"command": "dance.rotate.content.backwards",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "alt+unknown",
"command": "-dance.rotate.content.backwards",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "oem_2",
"command": "dance.search",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "unknown",
"command": "-dance.search",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "shift+oem_2",
"command": "dance.search.extend",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "shift+unknown",
"command": "-dance.search.extend",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "alt+oem_2",
"command": "dance.search.backwards",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "alt+unknown",
"command": "-dance.search.backwards",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "shift+alt+oem_2",
"command": "dance.search.backwards.extend",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "shift+alt+unknown",
"command": "-dance.search.backwards.extend",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "3",
"command": "dance.registers.select",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "unknown",
"command": "-dance.registers.select",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "alt+oem_5",
"command": "dance.objects.selectToEnd.inner",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "alt+unknown",
"command": "-dance.objects.selectToEnd.inner",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "unknown",
"command": "-dance.toUpperCase",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "oem_3",
"command": "-dance.toLowerCase",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "oem_3",
"command": "dance.objects.selectToStart",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "unknown",
"command": "-dance.objects.selectToStart",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "oem_5",
"command": "dance.objects.selectToEnd",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "unknown",
"command": "-dance.objects.selectToEnd",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "alt+oem_3",
"command": "dance.objects.selectToStart.inner",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "alt+unknown",
"command": "-dance.objects.selectToStart.inner",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "oem_7",
"command": "dance.toLowerCase",
},
{
"key": "shift+oem_7",
"command": "dance.toUpperCase",
},
{
"key": "alt+oem_7",
"command": "dance.swapCase",
"when": "editorTextFocus && dance.mode == 'normal'",
},
{
"key": "alt+oem_3",
"command": "-dance.swapCase",
"when": "editorTextFocus && dance.mode == 'normal'",
},
]; | the_stack |
namespace SchemeDesigner {
/**
* Tools
*/
export class Tools {
/**
* Number for id generator
* @type {number}
*/
protected static idNumber: number = 0;
/**
* Object configurator
* @param obj
* @param params
*/
public static configure(obj: any, params: any): void
{
if (params) {
for (let paramName in params) {
let value = params[paramName];
let setter = 'set' + Tools.capitalizeFirstLetter(paramName);
if (typeof obj[setter] === 'function') {
obj[setter].call(obj, value);
}
}
}
}
/**
* First latter to uppercase
* @param string
* @returns {string}
*/
public static capitalizeFirstLetter(string: string): string
{
return string.charAt(0).toUpperCase() + string.slice(1);
}
/**
* Clone object
* @param obj
*/
public static clone(obj: Object): Object
{
return JSON.parse(JSON.stringify(obj));
};
/**
* Check than point in rect
* @param coordinates
* @param boundingRect
* @param rotation - rotation of rect
* @returns {boolean}
*/
public static pointInRect(coordinates: Coordinates, boundingRect: BoundingRect, rotation?: number): boolean
{
let result = false;
let x = coordinates.x;
let y = coordinates.y;
// move point by rotation
if (rotation) {
rotation = -rotation;
let rectCenterX = (boundingRect.left + boundingRect.right) / 2;
let rectCenterY = (boundingRect.top + boundingRect.bottom) / 2;
let rotatedPoint = Tools.rotatePointByAxis(coordinates, {x: rectCenterX, y: rectCenterY}, rotation);
x = rotatedPoint.x;
y = rotatedPoint.y;
}
if (boundingRect.left <= x && boundingRect.right >= x
&& boundingRect.top <= y && boundingRect.bottom >= y) {
result = true;
}
return result;
}
/**
* Rotate point by axis
* @param point
* @param axis
* @param rotation
* @returns {Coordinates}
*/
public static rotatePointByAxis(point: Coordinates, axis: Coordinates, rotation: number): Coordinates
{
rotation = rotation * Math.PI / 180;
let x = axis.x + (point.x - axis.x) * Math.cos(rotation) - (point.y - axis.y) * Math.sin(rotation);
let y = axis.y + (point.x - axis.x) * Math.sin(rotation) + (point.y - axis.y) * Math.cos(rotation);
return {x: x, y: y};
}
/**
* Rect intersect rect
* @param boundingRect1
* @param boundingRect2
* @returns {boolean}
*/
public static rectIntersectRect(boundingRect1: BoundingRect, boundingRect2: BoundingRect): boolean
{
return !(
boundingRect1.top > boundingRect2.bottom
|| boundingRect1.bottom < boundingRect2.top
|| boundingRect1.right < boundingRect2.left
|| boundingRect1.left > boundingRect2.right
);
}
/**
* Find objects by coordinates
* @param boundingRect
* @param objects
* @returns {SchemeObject[]}
*/
public static filterObjectsByBoundingRect(boundingRect: BoundingRect, objects: SchemeObject[]): SchemeObject[]
{
let result: SchemeObject[] = [];
for (let schemeObject of objects) {
let objectBoundingRect = schemeObject.getOuterBoundingRect();
let isPart = this.rectIntersectRect(objectBoundingRect, boundingRect);
if (isPart) {
result.push(schemeObject);
}
}
return result;
}
/**
* Filter by bounding rect objects in layers
* @param boundingRect
* @param objectsByLayers
* @returns {SchemeObjectsByLayers}
*/
public static filterLayersObjectsByBoundingRect(boundingRect: BoundingRect, objectsByLayers: SchemeObjectsByLayers): SchemeObjectsByLayers
{
let result: SchemeObjectsByLayers = {};
for (let layerId in objectsByLayers) {
let objects = objectsByLayers[layerId];
result[layerId] = Tools.filterObjectsByBoundingRect(boundingRect, objects);
}
return result;
}
/**
* convert max-width/max-height values that may be percentages into a number
* @param styleValue
* @param node
* @param parentProperty
* @returns {number}
*/
public static parseMaxStyle(styleValue: number | string, node: HTMLElement, parentProperty: string): number {
let valueInPixels;
if (typeof styleValue === 'string') {
valueInPixels = parseInt(styleValue, 10);
if (styleValue.indexOf('%') !== -1) {
// percentage * size in dimension
valueInPixels = valueInPixels / 100 * (node.parentNode as any)[parentProperty];
}
} else {
valueInPixels = styleValue;
}
return valueInPixels;
}
/**
* Returns if the given value contains an effective constraint.
* @param value
* @returns {boolean}
*/
public static isConstrainedValue(value: any): boolean {
return value !== undefined && value !== null && value !== 'none';
}
/**
* Get constraint dimention
* @see http://www.nathanaeljones.com/blog/2013/reading-max-width-cross-browser
* @param domNode
* @param maxStyle
* @param percentageProperty
* @returns {null|number}
*/
public static getConstraintDimension(domNode: HTMLElement, maxStyle: string, percentageProperty: string): null|number {
let view = document.defaultView;
let parentNode = domNode.parentNode as HTMLElement;
let constrainedNode = (view.getComputedStyle(domNode) as any)[maxStyle];
let constrainedContainer = (view.getComputedStyle(parentNode) as any)[maxStyle];
let hasCNode = this.isConstrainedValue(constrainedNode);
let hasCContainer = this.isConstrainedValue(constrainedContainer);
let infinity = Number.POSITIVE_INFINITY;
if (hasCNode || hasCContainer) {
return Math.min(
hasCNode ? this.parseMaxStyle(constrainedNode, domNode, percentageProperty) : infinity,
hasCContainer ? this.parseMaxStyle(constrainedContainer, parentNode, percentageProperty) : infinity);
}
return null;
}
/**
* Number or undefined if no constraint
* @param domNode
* @returns {number|string}
*/
public static getConstraintWidth(domNode: HTMLElement) {
return this.getConstraintDimension(domNode, 'max-width', 'clientWidth');
}
/**
* Number or undefined if no constraint
* @param domNode
* @returns {number|string}
*/
public static getConstraintHeight(domNode: HTMLElement) {
return this.getConstraintDimension(domNode, 'max-height', 'clientHeight');
}
/**
* Get max width
* @param domNode
* @returns {number}
*/
public static getMaximumWidth(domNode: HTMLElement): number {
let container = domNode.parentNode as HTMLElement;
if (!container) {
return domNode.clientWidth;
}
let paddingLeft = parseInt(this.getStyle(container, 'padding-left'), 10);
let paddingRight = parseInt(this.getStyle(container, 'padding-right'), 10);
let w = container.clientWidth - paddingLeft - paddingRight;
let cw = this.getConstraintWidth(domNode);
return !cw ? w : Math.min(w, cw);
}
/**
* Get max height
* @param domNode
* @returns {number}
*/
public static getMaximumHeight(domNode: HTMLElement): number {
let container = domNode.parentNode as HTMLElement;
if (!container) {
return domNode.clientHeight;
}
let paddingTop = parseInt(this.getStyle(container, 'padding-top'), 10);
let paddingBottom = parseInt(this.getStyle(container, 'padding-bottom'), 10);
let h = container.clientHeight - paddingTop - paddingBottom;
let ch = this.getConstraintHeight(domNode);
return !ch ? h : Math.min(h, ch);
}
/**
* Get style
* @param element
* @param {string} property
* @returns {string}
*/
public static getStyle(element: any, property: string): string {
return element.currentStyle ?
element.currentStyle[property] :
document.defaultView.getComputedStyle(element, null).getPropertyValue(property);
};
/**
* Generate unique id
* @returns {number}
*/
public static generateUniqueId(): number
{
this.idNumber++;
return this.idNumber;
}
/**
* Touch supported
* @returns {boolean}
*/
public static touchSupported(): boolean
{
return 'ontouchstart' in window;
}
/**
* Sorting object
* @param obj
* @returns {{}}
*/
public static sortObject(obj: Object): Object
{
let keys = Object.keys(obj),
len = keys.length;
keys.sort();
let result = {};
for (let i = 0; i < len; i++) {
let k = keys[i];
(result as any)[k] = (obj as any)[k];
}
return result;
}
/**
* Get random string
* @returns {string}
*/
public static getRandomString(): string
{
return Math.random().toString(36).substr(2, 9);
}
/**
* Disable selection on element
* @param element
*/
public static disableElementSelection(element: HTMLElement): void
{
let styles = [
'-webkit-touch-callout',
'-webkit-user-select',
'-khtml-user-select',
'-moz-user-select',
'-ms-user-select',
'user-select',
'outline'
];
for (let styleName of styles) {
(element.style as any)[styleName] = 'none';
}
}
/**
* Get pointer from event
* @param e
* @param clientProp
* @returns {number}
*/
public static getPointer(e: MouseEvent | TouchEvent, clientProp: string): number
{
let touchProp = e.type === 'touchend' ? 'changedTouches' : 'touches';
let event = (e as any);
// touch event
if (event[touchProp] && event[touchProp][0]) {
if (event[touchProp].length == 2) {
return (event[touchProp][0][clientProp] + event[touchProp][1][clientProp]) / 2;
}
return event[touchProp][0][clientProp];
}
return event[clientProp];
}
}
} | the_stack |
import { HttpClient, HttpErrorResponse } from '@angular/common/http';
import { Injectable } from '@angular/core';
import { Deserialize, Serialize } from 'cerialize';
import { Observable, of, zip } from 'rxjs';
import { catchError, map, mergeMap, tap } from 'rxjs/operators';
import { environment } from '@browsecloud/environments/environment';
import {
BrowseCloudBatchJob,
BrowseCloudDocument,
BrowseCloudDocumentWithJobs,
BrowseCloudFileValidationResponse
} from '@browsecloud/models';
import { AuthService } from '@browsecloud/services/auth.service';
import { ErrorService } from '@browsecloud/services/error.service';
@Injectable()
export class BrowseCloudService {
private jobsCache: BrowseCloudBatchJob[];
private documentsCache: BrowseCloudDocument[];
private publicJobsCache: BrowseCloudBatchJob[];
private publicDocumentsCache: BrowseCloudDocument[];
constructor(
private authService: AuthService,
private http: HttpClient,
private errorService: ErrorService
) { }
public getAllDocuments(isPublic: boolean): Observable<BrowseCloudDocument[]> {
const workingCache = isPublic === true ? this.publicDocumentsCache : this.documentsCache;
if (workingCache != null && workingCache.length !== 0) {
return of(workingCache);
}
return this.authService.acquireToken(environment.auth.serviceScopes)
.pipe(
mergeMap((token) => {
return this.http.get(
environment.serviceURL + `/api/v1/documents${isPublic === true ? '/public' : ''}`,
{
headers: { Authorization: 'Bearer ' + token },
}
).pipe(
map((ta: any[]) => ta.map((t) => {
return Deserialize(t, BrowseCloudDocument) as BrowseCloudDocument;
})),
tap((documents) => isPublic === true ? this.publicDocumentsCache = documents : this.documentsCache = documents),
catchError((error) => {
this.errorService.newError('Error getting all documents.', error);
throw error;
})
);
})
);
}
public getAllJobs(isPublic: boolean): Observable<BrowseCloudBatchJob[]> {
const workingCache = isPublic === true ? this.publicJobsCache : this.jobsCache;
if (workingCache != null && workingCache.length !== 0) {
return of(workingCache);
}
return this.authService.acquireToken(environment.auth.serviceScopes)
.pipe(
mergeMap((token) => {
return this.http.get(
environment.serviceURL + `/api/v1/jobs${isPublic === true ? '/public' : ''}`,
{
headers: { Authorization: 'Bearer ' + token },
}
).pipe(
map((ta: any[]) => ta.map((t) => {
return Deserialize(t, BrowseCloudBatchJob) as BrowseCloudBatchJob;
})),
tap((jobs) => isPublic === true ? this.publicJobsCache = jobs : this.jobsCache = jobs),
catchError((error) => {
this.errorService.newError('Error getting all jobs.', error);
throw error;
})
);
})
);
}
public getJobFile(jobId: string, fileName: string): Observable<string> {
return this.authService.acquireToken(environment.auth.serviceScopes)
.pipe(
mergeMap((token) => {
return this.http.get(
environment.serviceURL + `/api/v1/jobs/${jobId}/files/${fileName}`,
{
responseType: 'text',
headers: { Authorization: 'Bearer ' + token },
}
).pipe(
catchError((error) => {
// No need to throw when the file is simply not available.
// Some files are optional. Let CountingGridModel handle errors.
if (error.status === 404) {
return of(null);
}
this.errorService.newError(`Error getting file ${fileName} for job.`, error);
throw error;
})
);
})
);
}
public getJobsForDocument(documentId: string): Observable<BrowseCloudBatchJob[]> {
if (this.jobsCache != null) {
const jobs = this.jobsCache.filter((job) => job.documentId === documentId);
if (jobs.length > 0) {
return of(jobs);
}
}
return this.authService.acquireToken(environment.auth.serviceScopes)
.pipe(
mergeMap((token) => {
return this.http.get(
environment.serviceURL + `/api/v1/documents/${documentId}/jobs`,
{
headers: { Authorization: 'Bearer ' + token },
}
).pipe(
map((ta: any[]) => ta.map((t) => {
return Deserialize(t, BrowseCloudBatchJob) as BrowseCloudBatchJob;
})),
tap((jobs) => {
if (this.jobsCache != null) {
this.jobsCache.push(...jobs);
}
}),
catchError((error) => {
this.errorService.newError('Error getting all jobs for document.', error);
throw error;
})
);
})
);
}
public getDocumentWithJobs(documentId: string): Observable<BrowseCloudDocumentWithJobs> {
if (this.documentsCache != null && this.jobsCache != null) {
const documentsWithJobs = BrowseCloudDocumentWithJobs.fromDocumentsAndJobs(
[...(this.documentsCache || []), ...(this.publicDocumentsCache || [])],
[...(this.jobsCache || []), ...(this.publicJobsCache || [])]
);
if (documentsWithJobs != null) {
const documentWithJobs = documentsWithJobs.find((d) => d.document.id === documentId);
if (documentWithJobs != null) {
return of(documentWithJobs);
}
}
}
return this.authService.acquireToken(environment.auth.serviceScopes)
.pipe(
mergeMap((token) => {
return this.http.get(
environment.serviceURL + `/api/v1/documents/${documentId}`,
{
headers: { Authorization: 'Bearer ' + token },
}
).pipe(
map((t: any) => Deserialize(t, BrowseCloudDocument) as BrowseCloudDocument),
mergeMap((document) => this.addJobsToDocument(document)),
catchError((error) => {
this.errorService.newError('Error getting document.', error);
throw error;
})
);
})
);
}
public getAllDocumentsWithJobs(isPublic: boolean): Observable<BrowseCloudDocumentWithJobs[]> {
return zip(this.getAllDocuments(isPublic), this.getAllJobs(isPublic))
.pipe(
map(([documents, jobs]) => BrowseCloudDocumentWithJobs.fromDocumentsAndJobs(documents, jobs))
);
}
public addJobsToDocument(document: BrowseCloudDocument): Observable<BrowseCloudDocumentWithJobs> {
return this.getJobsForDocument(document.id)
.pipe(
map((jobs) => new BrowseCloudDocumentWithJobs(document, jobs))
);
}
public postDocumentWithText(documentText: string): Observable<BrowseCloudDocument> {
return this.authService.acquireToken(environment.auth.serviceScopes)
.pipe(
mergeMap((token) => {
return this.http.post(
environment.serviceURL + '/api/v1/documents',
documentText,
{
headers: { Authorization: 'Bearer ' + token },
}
).pipe(
map((t: any) => Deserialize(t, BrowseCloudDocument) as BrowseCloudDocument),
tap((d) => this.documentsCache.push(d)),
catchError((error) => {
this.errorService.newError('Error creating new document.', error);
throw error;
})
);
})
);
}
public postValidateDocumentWithText(documentText: string): Observable<BrowseCloudFileValidationResponse> {
return this.authService.acquireToken(environment.auth.serviceScopes)
.pipe(
mergeMap((token) => {
return this.http.post(
environment.serviceURL + '/api/v1/documents/validateInput',
documentText,
{
headers: { Authorization: 'Bearer ' + token },
}
).pipe(
map((t: any) => Deserialize(t, BrowseCloudFileValidationResponse) as BrowseCloudFileValidationResponse),
catchError((error: HttpErrorResponse) => {
this.errorService.newError('Error validating document.', error);
throw error;
})
);
})
);
}
public putDocument(document: BrowseCloudDocument): Observable<BrowseCloudDocument> {
return this.authService.acquireToken(environment.auth.serviceScopes)
.pipe(
mergeMap((token) => {
return this.http.put(
environment.serviceURL + `/api/v1/documents/${document.id}`,
Serialize(document),
{
headers: { Authorization: 'Bearer ' + token },
}
).pipe(
map((t: any) => Deserialize(t, BrowseCloudDocument) as BrowseCloudDocument),
tap((d) => {
if (this.documentsCache != null) {
const index = this.documentsCache.findIndex((d1) => d1.id === d.id);
if (index !== -1) {
this.documentsCache[index] = d;
}
}
if (this.publicDocumentsCache != null) {
const index = this.publicDocumentsCache.findIndex((d1) => d1.id === d.id);
if (index !== -1) {
this.publicDocumentsCache[index] = d;
if (document.isPublic === false) {
this.publicDocumentsCache.splice(index, 1);
}
} else if (document.isPublic === true) {
this.publicDocumentsCache.push(document);
if (this.jobsCache != null) {
this.publicJobsCache.push(...this.jobsCache.filter((j) => j.documentId === document.id));
}
}
}
}),
catchError((error) => {
this.errorService.newError('Error updating the document.', error);
throw error;
})
);
})
);
}
public putJob(job: BrowseCloudBatchJob): Observable<BrowseCloudBatchJob> {
return this.authService.acquireToken(environment.auth.serviceScopes)
.pipe(
mergeMap((token) => {
return this.http.put(
environment.serviceURL + `/api/v1/jobs/${job.id}`,
Serialize(job),
{
headers: { Authorization: 'Bearer ' + token },
}
).pipe(
map((t: any) => Deserialize(t, BrowseCloudBatchJob) as BrowseCloudBatchJob),
tap((j) => {
if (this.jobsCache != null) {
const index = this.jobsCache.findIndex((j1) => j1.id === j.id);
if (index !== -1) {
this.jobsCache[index] = j;
}
}
if (this.publicJobsCache != null) {
const index = this.publicJobsCache.findIndex((j1) => j1.id === j.id);
if (index !== -1) {
this.publicJobsCache[index] = j;
}
}
}),
catchError((error) => {
this.errorService.newError('Error updating the job settings.', error);
throw error;
})
);
})
);
}
public postDocumentWithTextAndModifyDocument(
documentText: string,
title: string,
description: string,
isPublic: boolean
): Observable<BrowseCloudDocument> {
return this.postDocumentWithText(documentText)
.pipe(
mergeMap((document) => {
document.displayName = title;
document.description = description;
document.isPublic = isPublic;
return this.putDocument(document);
})
);
}
public deleteDocument(documentId: string): Observable<any> {
return this.authService.acquireToken(environment.auth.serviceScopes)
.pipe(
mergeMap((token) => {
return this.http.delete(
environment.serviceURL + `/api/v1/documents/${documentId}`,
{
headers: { Authorization: 'Bearer ' + token },
}
).pipe(
tap((d) => {
if (this.documentsCache != null) {
const index = this.documentsCache.findIndex((d1) => d1.id === documentId);
if (index !== -1) {
this.documentsCache.splice(index, 1);
}
}
if (this.publicDocumentsCache != null) {
const index = this.publicDocumentsCache.findIndex((d1) => d1.id === documentId);
if (index !== -1) {
this.publicDocumentsCache.splice(index, 1);
}
}
}),
catchError((error) => {
this.errorService.newError('Error deleting the document.', error);
throw error;
})
);
})
);
}
public getDemoData(): Observable<BrowseCloudDocumentWithJobs> {
return this.http.get(
'assets/demo/demo-data.json'
).pipe(
map((t) => Deserialize(t, BrowseCloudDocumentWithJobs) as BrowseCloudDocumentWithJobs),
catchError((error) => {
this.errorService.newError('Error getting demo document.', error);
throw error;
})
);
}
public getDemoFile(fileName: string): Observable<string> {
return this.http.get(
`assets/demo/${fileName}`,
{
responseType: 'text',
}
).pipe(
catchError((error) => {
// No need to throw when the file is simply not available.
// Some files are optional. Let CountingGridModel handle errors.
if (error.status === 404) {
return of(null);
}
this.errorService.newError(`Error getting file ${fileName} for demo job.`, error);
throw error;
})
);
}
public updateJobCache(job: BrowseCloudBatchJob): void {
const jobIndex = this.jobsCache != null ? this.jobsCache.findIndex((j) => j.id === job.id) : -1;
const publicJobIndex = this.publicJobsCache != null ? this.publicJobsCache.findIndex((j) => j.id === job.id) : -1;
if (jobIndex === -1) {
return;
}
this.jobsCache[jobIndex] = job;
if (publicJobIndex === -1) {
return;
}
this.publicJobsCache[publicJobIndex] = job;
}
public userCanModifyDocument(document: BrowseCloudDocument): Observable<boolean> {
if (this.documentsCache != null) {
return of(this.documentsCache.findIndex((doc) => doc.id === document.id) !== -1);
}
return this.authService.acquireToken(environment.auth.serviceScopes)
.pipe(
mergeMap((token) => {
return this.http.get(
environment.serviceURL + '/api/v1/users/me/userIdentityIds',
{
headers: { Authorization: 'Bearer ' + token },
}
).pipe(
map((identityList: string[]) => {
return identityList.findIndex((identity) => identity === document.owner.id ) !== -1
|| document.acl.some((aclItem) => {
return identityList.findIndex((identity) => identity === aclItem.id ) !== -1;
});
}),
catchError((error) => {
this.errorService.newError('Error getting user groups.', error);
throw error;
})
);
})
);
}
public postNewJob(job: BrowseCloudBatchJob): Observable<BrowseCloudBatchJob> {
return this.authService.acquireToken(environment.auth.serviceScopes)
.pipe(
mergeMap((token) => {
return this.http.post(
environment.serviceURL + '/api/v1/jobs',
job,
{
headers: { Authorization: 'Bearer ' + token },
}
).pipe(
map((t: any) =>
Deserialize(t, BrowseCloudBatchJob) as BrowseCloudBatchJob
),
tap((j) => {
if (this.jobsCache != null) {
this.jobsCache.push(j);
}
}),
catchError((error) => {
this.errorService.newError('Error posting new job.', error);
throw error;
})
);
})
);
}
} | the_stack |
import Ajv from "ajv";
import { JSONSchema7 } from "json-schema";
import { z } from "zod";
import { zodToJsonSchema } from "../src/zodToJsonSchema";
const ajv = new Ajv();
const deref = require("json-schema-deref-sync");
describe("Pathing", () => {
it("should handle recurring properties with paths", () => {
const addressSchema = z.object({
street: z.string(),
number: z.number(),
city: z.string(),
});
const someAddresses = z.object({
address1: addressSchema,
address2: addressSchema,
lotsOfAddresses: z.array(addressSchema),
});
const jsonSchema = {
$schema: "http://json-schema.org/draft-07/schema#",
type: "object",
properties: {
address1: {
type: "object",
properties: {
street: { type: "string" },
number: { type: "number" },
city: { type: "string" },
},
additionalProperties: false,
required: ["street", "number", "city"],
},
address2: { $ref: "#/properties/address1" },
lotsOfAddresses: {
type: "array",
items: { $ref: "#/properties/address1" },
},
},
additionalProperties: false,
required: ["address1", "address2", "lotsOfAddresses"],
};
const parsedSchema = zodToJsonSchema(someAddresses);
expect(parsedSchema).toStrictEqual(jsonSchema);
expect(ajv.validateSchema(parsedSchema!)).toEqual(true);
});
it("Should properly reference union participants", () => {
const participant = z.object({ str: z.string() });
const schema = z.object({
union: z.union([participant, z.string()]),
part: participant,
});
const expectedJsonSchema = {
$schema: "http://json-schema.org/draft-07/schema#",
type: "object",
properties: {
union: {
anyOf: [
{
type: "object",
properties: {
str: {
type: "string",
},
},
additionalProperties: false,
required: ["str"],
},
{
type: "string",
},
],
},
part: {
$ref: "#/properties/union/anyOf/0",
},
},
additionalProperties: false,
required: ["union", "part"],
};
const parsedSchema = zodToJsonSchema(schema);
expect(parsedSchema).toStrictEqual(expectedJsonSchema);
expect(ajv.validateSchema(parsedSchema!)).toEqual(true);
const resolvedSchema = deref(expectedJsonSchema);
expect(resolvedSchema.properties.part).toBe(
resolvedSchema.properties.union.anyOf[0]
);
});
it("Should be able to handle recursive schemas", () => {
type Category = {
name: string;
subcategories: Category[];
};
// cast to z.ZodSchema<Category>
// @ts-ignore
const categorySchema: z.ZodSchema<Category> = z.lazy(() =>
z.object({
name: z.string(),
subcategories: z.array(categorySchema),
})
);
const parsedSchema = zodToJsonSchema(categorySchema);
const expectedJsonSchema = {
$schema: "http://json-schema.org/draft-07/schema#",
type: "object",
properties: {
name: {
type: "string",
},
subcategories: {
type: "array",
items: {
$ref: "#/",
},
},
},
required: ["name", "subcategories"],
additionalProperties: false,
};
expect(parsedSchema).toStrictEqual(expectedJsonSchema);
expect(ajv.validateSchema(parsedSchema!)).toEqual(true);
const resolvedSchema = deref(parsedSchema);
expect(resolvedSchema.properties.subcategories.items).toBe(resolvedSchema);
});
it("Should be able to handle complex & nested recursive schemas", () => {
type Category = {
name: string;
inner: {
subcategories?: Record<string, Category> | null;
};
};
// cast to z.ZodSchema<Category>
// @ts-ignore
const categorySchema: z.ZodSchema<Category> = z.lazy(() =>
z.object({
name: z.string(),
inner: z.object({
subcategories: z.record(categorySchema).nullable().optional(),
}),
})
);
const inObjectSchema = z.object({
category: categorySchema,
});
const parsedSchema = zodToJsonSchema(inObjectSchema);
const expectedJsonSchema = {
$schema: "http://json-schema.org/draft-07/schema#",
type: "object",
additionalProperties: false,
required: ["category"],
properties: {
category: {
type: "object",
properties: {
name: {
type: "string",
},
inner: {
type: "object",
additionalProperties: false,
properties: {
subcategories: {
anyOf: [
{
type: "object",
additionalProperties: {
$ref: "#/properties/category",
},
},
{
type: "null",
},
],
},
},
},
},
required: ["name", "inner"],
additionalProperties: false,
},
},
};
expect(parsedSchema).toStrictEqual(expectedJsonSchema);
expect(ajv.validateSchema(parsedSchema!)).toEqual(true);
});
it("should work with relative references", () => {
const recurringSchema = z.string();
const objectSchema = z.object({
foo: recurringSchema,
bar: recurringSchema,
});
const jsonSchema = zodToJsonSchema(objectSchema, {
$refStrategy: "relative",
});
const exptectedResult: JSONSchema7 = {
$schema: "http://json-schema.org/draft-07/schema#",
type: "object",
properties: {
foo: {
type: "string",
},
bar: {
$ref: "1/foo",
},
},
required: ["foo", "bar"],
additionalProperties: false,
};
expect(jsonSchema).toStrictEqual(exptectedResult);
});
it("should be possible to override the base path", () => {
const recurringSchema = z.string();
const objectSchema = z.object({
foo: recurringSchema,
bar: recurringSchema,
});
const jsonSchema = zodToJsonSchema(objectSchema, {
basePath: ["#", "lol", "xD"],
});
const exptectedResult: JSONSchema7 = {
$schema: "http://json-schema.org/draft-07/schema#",
type: "object",
properties: {
foo: {
type: "string",
},
bar: {
$ref: "#/lol/xD/properties/foo",
},
},
required: ["foo", "bar"],
additionalProperties: false,
};
expect(jsonSchema).toStrictEqual(exptectedResult);
});
it("should be possible to opt out of $ref building", () => {
const recurringSchema = z.string();
const objectSchema = z.object({
foo: recurringSchema,
bar: recurringSchema,
});
const jsonSchema = zodToJsonSchema(objectSchema, {
$refStrategy: "none",
});
const exptectedResult: JSONSchema7 = {
$schema: "http://json-schema.org/draft-07/schema#",
type: "object",
properties: {
foo: {
type: "string",
},
bar: {
type: "string",
},
},
required: ["foo", "bar"],
additionalProperties: false,
};
expect(jsonSchema).toStrictEqual(exptectedResult);
});
it("When opting out of ref building and using recursive schemas, should warn and default to any", () => {
global.console = { ...global.console, warn: jest.fn() };
type Category = {
name: string;
subcategories: Category[];
};
// cast to z.ZodSchema<Category>
// @ts-ignore
const categorySchema: z.ZodSchema<Category> = z.lazy(() =>
z.object({
name: z.string(),
subcategories: z.array(categorySchema),
})
);
const parsedSchema = zodToJsonSchema(categorySchema, {
$refStrategy: "none",
});
const expectedJsonSchema = {
$schema: "http://json-schema.org/draft-07/schema#",
type: "object",
properties: {
name: {
type: "string",
},
subcategories: {
type: "array",
items: {},
},
},
required: ["name", "subcategories"],
additionalProperties: false,
};
expect(parsedSchema).toStrictEqual(expectedJsonSchema);
expect(console.warn).toBeCalledWith(
"Recursive reference detected at #/properties/subcategories/items! Defaulting to any"
);
});
it("should be possible to override get proper references even when picking optional definitions path $defs", () => {
const recurringSchema = z.string();
const objectSchema = z.object({
foo: recurringSchema,
bar: recurringSchema,
});
const jsonSchema = zodToJsonSchema(objectSchema, {
name: "hello",
definitionPath: "$defs",
});
const exptectedResult = {
$schema: "http://json-schema.org/draft-07/schema#",
$ref: "#/$defs/hello",
$defs: {
hello: {
type: "object",
properties: {
foo: {
type: "string",
},
bar: {
$ref: "#/$defs/hello/properties/foo",
},
},
required: ["foo", "bar"],
additionalProperties: false,
},
},
};
expect(jsonSchema).toStrictEqual(exptectedResult);
});
it("should be possible to override get proper references even when picking optional definitions path definitions", () => {
const recurringSchema = z.string();
const objectSchema = z.object({
foo: recurringSchema,
bar: recurringSchema,
});
const jsonSchema = zodToJsonSchema(objectSchema, {
name: "hello",
definitionPath: "definitions",
});
const exptectedResult = {
$schema: "http://json-schema.org/draft-07/schema#",
$ref: "#/definitions/hello",
definitions: {
hello: {
type: "object",
properties: {
foo: {
type: "string",
},
bar: {
$ref: "#/definitions/hello/properties/foo",
},
},
required: ["foo", "bar"],
additionalProperties: false,
},
},
};
expect(jsonSchema).toStrictEqual(exptectedResult);
});
}); | the_stack |
namespace eui {
/**
* An ViewStack navigator container consists of a collection of child
* containers stacked on top of each other, where only one child
* at a time is visible.
* When a different child container is selected, it seems to replace
* the old one because it appears in the same location.
* However, the old child container still exists; it is just invisible.
*
* @event eui.CollectionEvent.COLLECTION_CHANGE Dispatched when the ICollection has been updated in some way.
*
* @version Egret 2.4
* @version eui 1.0
* @platform Web,Native
* @includeExample extension/eui/components/ViewStackExample.ts
* @language en_US
*/
/**
* ViewStack 导航器容器由一组彼此上下堆叠的子容器组成,其中一次只可以显示一个子容器。
* 选择另一个子容器后,它将显示在原来子容器的位置处,所以看起来好像此子容器替换了原来的子容器。
* 但是,原来的子容器仍然存在,只不过它现在处于不可见状态。
*
* @event eui.CollectionEvent.COLLECTION_CHANGE 以某种方式更新 ICollection 后分派。
*
* @version Egret 2.4
* @version eui 1.0
* @platform Web,Native
* @includeExample extension/eui/components/ViewStackExample.ts
* @language zh_CN
*/
export class ViewStack extends Group implements ICollection {
/**
* Constructor.
*
* @version Egret 2.4
* @version eui 1.0
* @platform Web,Native
* @language en_US
*/
/**
* 构造函数。
*
* @version Egret 2.4
* @version eui 1.0
* @platform Web,Native
* @language zh_CN
*/
public constructor() {
super();
}
/**
* The layout object for this container.
* This object is responsible for the measurement and layout of
* the visual elements in the container.
*
* @default eui.BasicLayout
*
* @version Egret 2.4
* @version eui 1.0
* @platform Web,Native
* @language en_US
*/
/**
* 此容器的 layout 对象。此对象负责容器中可视元素的测量和布局。
*
* @default eui.BasicLayout
*
* @version Egret 2.4
* @version eui 1.0
* @platform Web,Native
* @language zh_CN
*/
public get layout():LayoutBase {
return this.$layout;
}
/**
* @private
*/
private _selectedChild:egret.DisplayObject = null;
/**
* A reference to the currently visible child container.
* The default is a reference to the first child.
* If there are no children, this property is <code>null</code>.
*
* @version Egret 2.4
* @version eui 1.0
* @platform Web,Native
* @language en_US
*/
/**
* 对当前可见子容器的引用。默认设置为对第一个子容器的引用。如果没有子项,则此属性为 <code>null</code>。
*
* @version Egret 2.4
* @version eui 1.0
* @platform Web,Native
* @language zh_CN
*/
public get selectedChild():egret.DisplayObject {
let index = this.selectedIndex;
if (index >= 0 && index < this.numChildren)
return this.getChildAt(index);
return null;
}
public set selectedChild(value:egret.DisplayObject) {
let index = this.getChildIndex(value);
if (index >= 0 && index < this.numChildren)
this.setSelectedIndex(index);
}
/**
* @private
* 在属性提交前缓存选中项索引
*/
private proposedSelectedIndex:number = ListBase.NO_PROPOSED_SELECTION;
/**
* @private
*/
public _selectedIndex:number = -1;
/**
* The zero-based index of the currently visible child container.
* Child indexes are in the range 0, 1, 2, ..., n - 1,
* where <code>n</code> is the number of children.
* The default value is 0, corresponding to the first child.
* If there are no children, the value of this property is <code>-1</code>.
*
* @version Egret 2.4
* @version eui 1.0
* @platform Web,Native
* @language en_US
*/
/**
* 当前可见子容器的从零开始的索引。子索引的范围是 0、1、2、...、n - 1,其中 <code>n</code> 是子项的数目。
* 默认值是 0,对应于第一个子项。如果不存在子容器,则此属性的值为 -1。
*
* @version Egret 2.4
* @version eui 1.0
* @platform Web,Native
* @language zh_CN
*/
public get selectedIndex():number {
return this.proposedSelectedIndex != ListBase.NO_PROPOSED_SELECTION ? this.proposedSelectedIndex : this._selectedIndex;
}
public set selectedIndex(value:number) {
value = +value|0;
this.setSelectedIndex(value);
}
/**
* @private
* 设置选中项索引
*/
private setSelectedIndex(value:number):void {
if (value == this.selectedIndex) {
return;
}
this.proposedSelectedIndex = value;
this.invalidateProperties();
PropertyEvent.dispatchPropertyEvent(this,PropertyEvent.PROPERTY_CHANGE,"selectedIndex");
}
/**
* @private
* 一个子项被添加到容器内,此方法不仅在操作addChild()时会被回调,在操作setChildIndex()或swapChildren时也会回调。
* 当子项索引发生改变时,会先触发$childRemoved()方法,然后触发$childAdded()方法。
*/
$childAdded(child:egret.DisplayObject, index:number):void {
super.$childAdded(child, index);
this.showOrHide(child, false);
let selectedIndex = this.selectedIndex;
if (selectedIndex == -1) {
this.setSelectedIndex(index);
}
else if (index <= this.selectedIndex && this.$stage) {
this.setSelectedIndex(selectedIndex + 1);
}
CollectionEvent.dispatchCollectionEvent(this, CollectionEvent.COLLECTION_CHANGE,
CollectionEventKind.ADD, index, -1, [child.name]);
}
/**
* @private
* 一个子项从容器内移除,此方法不仅在操作removeChild()时会被回调,在操作setChildIndex()或swapChildren时也会回调。
* 当子项索引发生改变时,会先触发$childRemoved()方法,然后触发$childAdded()方法。
*/
$childRemoved(child:egret.DisplayObject, index:number):void {
super.$childRemoved(child, index);
this.showOrHide(child, true);
let selectedIndex = this.selectedIndex;
if (index == selectedIndex) {
if (this.numChildren > 0) {
if (index == 0) {
this.proposedSelectedIndex = 0;
this.invalidateProperties();
}
else
this.setSelectedIndex(0);
}
else
this.setSelectedIndex(-1);
}
else if (index < selectedIndex) {
this.setSelectedIndex(selectedIndex - 1);
}
CollectionEvent.dispatchCollectionEvent(this, CollectionEvent.COLLECTION_CHANGE,
CollectionEventKind.REMOVE, index, -1, [child.name]);
}
/**
* @inheritDoc
*
* @version Egret 2.4
* @version eui 1.0
* @platform Web,Native
*/
protected commitProperties():void {
super.commitProperties();
if (this.proposedSelectedIndex != ListBase.NO_PROPOSED_SELECTION) {
this.commitSelection(this.proposedSelectedIndex);
this.proposedSelectedIndex = ListBase.NO_PROPOSED_SELECTION;
}
}
/**
* @private
*
* @param newIndex
*/
private commitSelection(newIndex:number):void {
if (newIndex >= 0 && newIndex < this.numChildren) {
this._selectedIndex = newIndex;
if (this._selectedChild) {
this.showOrHide(this._selectedChild, false);
}
this._selectedChild = this.getElementAt(this._selectedIndex);
this.showOrHide(this._selectedChild, true);
}
else {
this._selectedChild = null;
this._selectedIndex = -1;
}
this.invalidateSize();
this.invalidateDisplayList();
}
/**
* @private
*
* @param child
* @param visible
*/
private showOrHide(child:egret.DisplayObject, visible:boolean):void {
if (egret.is(child, "eui.UIComponent")) {
(<eui.UIComponent><any>child).includeInLayout = visible;
}
child.visible = visible;
}
/**
* number of children
*
* @version Egret 2.4
* @version eui 1.0
* @platform Web,Native
* @language en_US
*/
/**
* 子项数量
*
* @version Egret 2.4
* @version eui 1.0
* @platform Web,Native
* @language zh_CN
*/
public get length():number {
return this.$children.length;
}
/**
* @inheritDoc
*
* @version Egret 2.4
* @version eui 1.0
* @platform Web,Native
*/
public getItemAt(index:number):any {
let element:egret.DisplayObject = this.$children[index];
return element ? element.name : "";
}
/**
* @inheritDoc
*
* @version Egret 2.4
* @version eui 1.0
* @platform Web,Native
*/
public getItemIndex(item:any):number {
let list = this.$children;
let length = list.length;
for (let i = 0; i < length; i++) {
if (list[i].name == item) {
return i;
}
}
return -1;
}
}
registerBindable(ViewStack.prototype,"selectedIndex");
} | the_stack |
import { expect } from 'chai';
import mock, * as mockRequire from 'mock-require';
import { SinonSandbox as Sandbox, SinonStub as Stub } from 'sinon';
import * as sinon from 'sinon';
import { normalize, sep } from 'path';
import { cloneDeep } from 'lodash';
import {
getMockFilename,
hydrateHttpMock,
IHttpMock,
IPartialMockRequest,
IPartialMockResponse,
} from '../../src/file';
import {
ICreateRecord,
IHeaders,
ISerializedHttp,
ISerializedRequest,
ISerializedResponse,
} from '../../src/http-serializer';
import * as httpSerializer from '../../src/http-serializer';
import {
DEFAULT_PORT_HTTP,
DEFAULT_PORT_HTTPS,
HEADER_CONTENT_TYPE,
MIME_TYPE_JSON,
} from '../../src/consts';
import { YesNoError } from '../../src/errors';
describe('file', () => {
const sandbox: Sandbox = sinon.createSandbox();
afterEach(() => {
sandbox.restore();
mockRequire.stopAll();
});
const mockDirectory: string = '/some/directory';
const mockFileName: string = `${mockDirectory}/some-file-name.json`;
const mockRecords: ISerializedHttp[] = [];
const expectedData = JSON.stringify({ records: mockRecords }, null, 2);
const mockErrorMessage: string = 'some-error';
const mockError: Error = new Error(mockErrorMessage);
let file: { [key: string]: any };
let stubEnsureDir: Stub;
let stubReadFile: Stub;
let stubWriteFile: Stub;
beforeEach(() => {
stubEnsureDir = sandbox.stub();
stubReadFile = sandbox.stub();
stubWriteFile = sandbox.stub();
mock('fs-extra', {
ensureDir: stubEnsureDir,
readFile: stubReadFile,
writeFile: stubWriteFile,
});
mockRequire.reRequire('fs-extra');
file = mockRequire.reRequire('../../src/file');
});
describe('load', () => {
it('reads the file specified and returns the stored serialized http mocks', async () => {
stubReadFile.resolves(Buffer.from(expectedData, 'utf8'));
const results: ISerializedHttp[] = await file.load({ filename: mockFileName });
expect(results).deep.equals(mockRecords);
expect(stubReadFile).calledOnce.and.calledWithExactly(mockFileName);
});
it('rejects with a typed error if readFile rejects with missing file', async () => {
const missingFileError: any = new Error('some message');
missingFileError.code = 'ENOENT';
stubReadFile.rejects(missingFileError);
let error: Error | undefined;
try {
await file.load({ filename: mockFileName });
} catch (e) {
error = e;
}
expect(error).instanceOf(YesNoError);
expect(stubReadFile).calledOnce.and.calledWithExactly(mockFileName);
});
it('rejects with any other rejection from readFile', async () => {
stubReadFile.rejects(mockError);
let error: Error | undefined;
try {
await file.load({ filename: mockFileName });
} catch (e) {
error = e;
}
expect(error && error.message).equals(mockErrorMessage);
expect(stubReadFile).calledOnce.and.calledWithExactly(mockFileName);
});
it('rejects if the data is not JSON', async () => {
stubReadFile.resolves(Buffer.from('this is not JSON', 'utf8'));
let error: Error | undefined;
try {
await file.load({ filename: mockFileName });
} catch (e) {
error = e;
}
expect(error).instanceOf(YesNoError);
expect(stubReadFile).calledOnce.and.calledWithExactly(mockFileName);
});
it('rejects if the data does not contain top level records property', async () => {
stubReadFile.resolves(
Buffer.from(JSON.stringify({ data: 'this is missing records' }), 'utf8'),
);
let error: Error | undefined;
try {
await file.load({ filename: mockFileName });
} catch (e) {
error = e;
}
expect(error).instanceOf(YesNoError);
expect(stubReadFile).calledOnce.and.calledWithExactly(mockFileName);
});
});
describe('save', () => {
it('makes the directory and writes the file', async () => {
stubEnsureDir.resolves();
stubWriteFile.resolves();
const results: string = await file.save({ filename: mockFileName, records: mockRecords });
expect(results).equals(mockFileName);
expect(stubEnsureDir).calledOnce;
expect(normalize(stubEnsureDir.args[0][0])).equals(normalize(mockDirectory));
expect(stubWriteFile).calledOnce.and.calledWithExactly(mockFileName, expectedData);
});
it('provides defaults for some parameters', async () => {
stubEnsureDir.resolves();
stubWriteFile.resolves();
const results: string = await file.save({ filename: mockFileName });
expect(results).equals(mockFileName);
expect(stubEnsureDir).calledOnce;
expect(normalize(stubEnsureDir.args[0][0])).equals(normalize(mockDirectory));
expect(stubWriteFile).calledOnce.and.calledWithExactly(
mockFileName,
JSON.stringify({ records: [] }, null, 2),
);
});
it('rejects if making the directory fails', async () => {
stubEnsureDir.rejects(mockError);
let error: Error | undefined;
try {
await file.save({ filename: mockFileName, records: mockRecords });
} catch (e) {
error = e;
}
expect(error && error.message).equals(mockErrorMessage);
expect(stubEnsureDir).calledOnce;
expect(normalize(stubEnsureDir.args[0][0])).equals(normalize(mockDirectory));
expect(stubWriteFile).not.called;
});
it('rejects if writing the file contents fails', async () => {
stubEnsureDir.resolves();
stubWriteFile.rejects(mockError);
let error: Error | undefined;
try {
await file.save({ filename: mockFileName, records: mockRecords });
} catch (e) {
error = e;
}
expect(error && error.message).equals(mockErrorMessage);
expect(stubEnsureDir).calledOnce;
expect(normalize(stubEnsureDir.args[0][0])).equals(normalize(mockDirectory));
expect(stubWriteFile).calledOnce.and.calledWithExactly(mockFileName, expectedData);
});
});
describe('hydrateHttpMock', () => {
const mockBody: string = 'some-body';
const mockHeaderName: string = 'some-header-name';
const mockHeader: string = 'some-header';
const mockHeaders: IHeaders = {
[mockHeaderName]: mockHeader,
};
const mockHost: string = 'some-host';
const mockMethod: string = 'GET';
const mockPath: string = 'some/api/path';
const mockPort: number = 8080;
const mockRequest: IPartialMockRequest = {
body: mockBody,
headers: mockHeaders,
host: mockHost,
method: mockMethod,
path: mockPath,
port: mockPort,
protocol: 'https',
};
const mockStatusCode: number = 200;
const mockResponse: IPartialMockResponse = {
body: mockBody,
headers: mockHeaders,
statusCode: mockStatusCode,
};
const mockMock: IHttpMock = {
request: mockRequest,
response: mockResponse,
};
const mockId: string = 'some-id';
const mockVersion: string = 'some-version';
const mockSerializedHttp: ISerializedHttp = {
__id: mockId,
__version: mockVersion,
request: mockRequest as ISerializedRequest,
response: mockResponse as ISerializedResponse,
};
const expectedOptions: ICreateRecord = {
duration: 0,
request: {
headers: {},
path: '/',
port: DEFAULT_PORT_HTTPS,
...mockRequest,
},
response: {
body: mockBody,
headers: mockHeaders,
statusCode: mockStatusCode,
},
};
let stubCreateRecord: Stub;
beforeEach(() => {
stubCreateRecord = sandbox.stub(httpSerializer, 'createRecord').returns(mockSerializedHttp);
});
it('returns a serialized http record', () => {
const serializedHttp: ISerializedHttp = hydrateHttpMock(mockMock);
expect(serializedHttp).deep.equals(mockSerializedHttp);
expect(stubCreateRecord).calledOnce.and.calledWithExactly(expectedOptions);
});
it('provides defaults for response headers and body', () => {
const clonedMock: IHttpMock = cloneDeep(mockMock);
// by removing headers and body, the default logic will trigger
clonedMock.response.headers = undefined;
clonedMock.response.body = undefined;
const serializedHttp: ISerializedHttp = hydrateHttpMock(clonedMock);
expect(serializedHttp).deep.equals(mockSerializedHttp);
const clonedOptions: ICreateRecord = cloneDeep(expectedOptions);
clonedOptions.response.body = '';
clonedOptions.response.headers = {};
expect(stubCreateRecord).calledOnce.and.calledWithExactly(clonedOptions);
});
it('supplies a header content type header as needed', () => {
const clonedMock: IHttpMock = cloneDeep(mockMock);
// by specifying a body as an object, the headers will be manipulated
const mockObjectBody = { data: 'some-data' };
clonedMock.response.body = mockObjectBody;
const serializedHttp: ISerializedHttp = hydrateHttpMock(clonedMock);
expect(serializedHttp).deep.equals(mockSerializedHttp);
const clonedOptions: ICreateRecord = cloneDeep(expectedOptions);
clonedOptions.response.body = mockObjectBody;
clonedOptions.response.headers = {
...mockHeaders,
[HEADER_CONTENT_TYPE]: MIME_TYPE_JSON,
};
expect(stubCreateRecord).calledOnce.and.calledWithExactly(clonedOptions);
});
it('changes ports based on the protocol', () => {
const clonedMock: IHttpMock = cloneDeep(mockMock);
// test port switch
clonedMock.request.protocol = 'http';
// demonstrate port is supplied a default in the absence of the request port
delete clonedMock.request.port;
const serializedHttp: ISerializedHttp = hydrateHttpMock(clonedMock);
expect(serializedHttp).deep.equals(mockSerializedHttp);
const clonedOptions: ICreateRecord = cloneDeep(expectedOptions);
clonedOptions.request.port = DEFAULT_PORT_HTTP;
clonedOptions.request.protocol = clonedMock.request.protocol;
expect(stubCreateRecord).calledOnce.and.calledWithExactly(clonedOptions);
});
});
describe('getMockFileName', () => {
const mockName: string = 'Some Name With Spaces';
it('computes the mock file name for a mock name', () => {
const results: string = getMockFilename(mockName, mockDirectory);
expect(results).equals(normalize(`${mockDirectory}${sep}some-name-with-spaces-yesno.json`));
});
});
}); | the_stack |
import * as _ from "lodash";
import * as uuid from "node-uuid";
import PUL from "../../../lib/updates/PUL";
import * as jerr from "../../../lib/errors";
describe("PUL", () => {
it("Should build a simple PUL", () => {
var pul = new PUL();
pul
.insertIntoObject(uuid.v4(), [], { b: 2 })
.insertIntoArray(uuid.v4(), [], 0, ["a"])
.deleteFromObject(uuid.v4(), [], ["a", "b"])
.replaceInObject(uuid.v4(), [], "a", 1);
});
it("Two or more ReplaceInObject primitives have the same target object and selector.", () => {
expect(() => {
var pul = new PUL();
pul.replaceInObject(uuid.v4(), [], "a", 1);
pul.replaceInObject(uuid.v4(), [], "a", 2);
}).not.toThrow();
expect(() => {
try {
var target = uuid.v4();
var pul = new PUL();
pul.replaceInObject(target, [], "a", 1);
pul.replaceInObject(target, [], "a", 2);
} catch(e) {
expect(e instanceof jerr.JNUP0009).toBe(true);
throw e;
}
}).toThrow();
});
it("Two or more RenameInObject primitives have the same target object and selector.", () => {
expect(() => {
var pul = new PUL();
pul.renameInObject(uuid.v4(), [], "a", "b");
pul.renameInObject(uuid.v4(), [], "a", "b");
}).not.toThrow();
expect(() => {
try {
var target = uuid.v4();
var pul = new PUL();
pul.renameInObject(target, [], "a", "b");
pul.renameInObject(target, [], "a", "c");
} catch(e) {
expect(e instanceof jerr.JNUP0009).toBe(true);
throw e;
}
}).toThrow();
});
it("Two or more ReplaceInArray primitives have the same target object and selector.", () => {
expect(() => {
var pul = new PUL();
pul.replaceInArray(uuid.v4(), [], 1, "b");
pul.replaceInArray(uuid.v4(), [], 1, "a");
}).not.toThrow();
expect(() => {
try {
var target = uuid.v4();
var pul = new PUL();
pul.replaceInArray(target, [], 1, "b");
pul.replaceInArray(target, [], 1, "c");
} catch(e) {
expect(e instanceof jerr.JNUP0009).toBe(true);
throw e;
}
}).toThrow();
});
it("insertIntoObject Normalization", () => {
var target = uuid.v4();
var pul = new PUL();
pul.insertIntoObject(target, [], { a: 1, b: 2});
pul.insertIntoObject(target, [], { c: 3 });
pul.insertIntoObject(target, [], { d: 4 });
expect(pul.udps.insertIntoObject.length).toBe(1);
expect(pul.udps.insertIntoObject[0].pairs["a"]).toBe(1);
expect(pul.udps.insertIntoObject[0].pairs["b"]).toBe(2);
expect(pul.udps.insertIntoObject[0].pairs["c"]).toBe(3);
expect(pul.udps.insertIntoObject[0].pairs["d"]).toBe(4);
expect(() => {
try {
var pul = new PUL();
pul.insertIntoObject(target, [], { a: 1, b: 2 });
pul.insertIntoObject(target, [], { b: 3, c: 3 });
pul.insertIntoObject(target, [], { c: 3, d: 4 });
} catch(e) {
expect(e instanceof jerr.JNUP0005).toBe(true);
throw e;
}
}).toThrow();
});
it("InsertIntoArray Normalization", () => {
var target = uuid.v4();
var pul = new PUL();
pul.insertIntoArray(target, [], 1, ["a"]);
pul.insertIntoArray(target, [], 0, ["a"]);
pul.insertIntoArray(target, [], 0, ["a"]);
expect(pul.udps.insertIntoArray.length).toBe(2);
});
it("DeleteFromObject Normalization", () => {
var target = uuid.v4();
var pul = new PUL();
pul.deleteFromObject(target, [], ["a"]);
pul.deleteFromObject(target, [], ["a"]);
pul.deleteFromObject(target, [], ["b"]);
pul.deleteFromObject(target, [], ["b"]);
expect(pul.udps.deleteFromObject.length).toBe(1);
expect(pul.udps.deleteFromObject[0].keys.length).toBe(2);
expect(pul.udps.deleteFromObject[0].keys.indexOf("a") !== -1).toBe(true);
expect(pul.udps.deleteFromObject[0].keys.indexOf("b") !== -1).toBe(true);
});
it("DeleteFromArray Normalization", () => {
var target = uuid.v4();
var pul = new PUL();
pul.deleteFromArray(target, [], 0);
pul.deleteFromArray(target, [], 0);
pul.deleteFromArray(target, [], 0);
pul.deleteFromArray(target, [], 1);
pul.deleteFromArray(target, [], 1);
expect(pul.udps.deleteFromArray.length).toBe(2);
});
it("ReplaceInArray Normalization", () => {
var target = uuid.v4();
var pul = new PUL();
pul.deleteFromArray(target, [], 0);
pul.replaceInArray(target, [], 0, 1);
expect(pul.normalize().udps.replaceInArray.length).toBe(0);
pul = new PUL();
pul.replaceInArray(target, [], 0, 1);
pul.deleteFromArray(target, [], 0);
expect(pul.normalize().udps.replaceInArray.length).toBe(0);
//The presence of multiple UPs of this type with the same (array,index) target raises an error.
expect(() => {
try {
var pul = new PUL();
pul.replaceInArray(target, [], 0, 1);
pul.replaceInArray(target, [], 0, 1);
} catch(e) {
expect(e instanceof jerr.JNUP0009).toBe(true);
throw e;
}
}).toThrow();
expect(() => {
try {
var pul = new PUL();
pul.deleteFromArray(target, [], 0);
pul.replaceInArray(target, [], 0, 1);
pul.replaceInArray(target, [], 0, 1);
} catch(e) {
expect(e instanceof jerr.JNUP0009).toBe(true);
throw e;
}
}).toThrow();
expect(() => {
try {
var pul = new PUL();
pul.replaceInArray(target, [], 0, 1);
pul.deleteFromArray(target, [], 0);
pul.replaceInArray(target, [], 0, 1);
} catch(e) {
expect(e instanceof jerr.JNUP0009).toBe(true);
throw e;
}
}).toThrow();
});
it("ReplaceInObject Normalization", () => {
var target = uuid.v4();
var pul = new PUL();
pul.deleteFromObject(target, [], ["foo"]);
pul.replaceInObject(target, [], "foo", "bar");
expect(pul.normalize().udps.replaceInObject.length).toBe(0);
pul = new PUL();
pul.replaceInObject(target, [], "foo", "bar");
pul.deleteFromObject(target, [], ["foo"]);
expect(pul.normalize().udps.replaceInObject.length).toBe(0);
//The presence of multiple UPs of this type with the same (array,index) target raises an error.
expect(() => {
try {
var pul = new PUL();
pul.replaceInObject(target, [], "foo", "bar");
pul.replaceInObject(target, [], "foo", "bar");
} catch(e) {
expect(e instanceof jerr.JNUP0009).toBe(true);
throw e;
}
}).toThrow();
expect(() => {
try {
var pul = new PUL();
pul
.deleteFromObject(target, [], ["foo"])
.replaceInObject(target, [], "foo", "bar")
.replaceInObject(target, [], "foo", "bar");
} catch(e) {
expect(e instanceof jerr.JNUP0009).toBe(true);
throw e;
}
}).toThrow();
expect(() => {
try {
var pul = new PUL();
pul
.replaceInObject(target, [], "foo", "bar")
.deleteFromObject(target, [], ["foo"])
.replaceInObject(target, [], "foo", "bar");
} catch(e) {
expect(e instanceof jerr.JNUP0009).toBe(true);
throw e;
}
}).toThrow();
});
it("RenameInObject Normalization", () => {
var target = uuid.v4();
var pul = new PUL();
pul
.deleteFromObject(target, [], ["foo"])
.renameInObject(target, [], "foo", "bar");
expect(pul.normalize().udps.replaceInObject.length).toBe(0);
pul = new PUL();
pul
.renameInObject(target, [], "foo", "bar")
.deleteFromObject(target, [], ["foo"]);
expect(pul.normalize().udps.replaceInObject.length).toBe(0);
//The presence of multiple UPs of this type with the same (array,index) target raises an error.
expect(() => {
try {
var pul = new PUL();
pul
.renameInObject(target, [], "foo", "bar")
.renameInObject(target, [], "foo", "bar");
} catch(e) {
expect(e instanceof jerr.JNUP0009).toBe(true);
throw e;
}
}).toThrow();
expect(() => {
try {
var pul = new PUL();
pul
.deleteFromObject(target, [], ["foo"])
.renameInObject(target, [], "foo", "bar")
.renameInObject(target, [], "foo", "bar");
} catch(e) {
expect(e instanceof jerr.JNUP0009).toBe(true);
throw e;
}
}).toThrow();
expect(() => {
try {
var pul = new PUL();
pul
.renameInObject(target, [], "foo", "bar")
.deleteFromObject(target, [], ["foo"])
.renameInObject(target, [], "foo", "bar");
} catch(e) {
expect(e instanceof jerr.JNUP0009).toBe(true);
throw e;
}
}).toThrow();
});
it("Insert Conflict", () => {
expect(() => {
try {
var target = uuid.v4();
var pul = new PUL();
pul.insert(target, { id: 1 });
pul.insert(target, { id: 2 });
} catch(e) {
expect(e instanceof jerr.JNUP0005).toBe(true);
throw e;
}
}).toThrow();
});
it("Remove Normalization", () => {
var pul = new PUL();
pul.insert("1", { a: 1 });
pul.remove("1");
pul.normalize();
expect(pul.udps.insert.length).toBe(0);
expect(pul.udps.remove.length).toBe(1);
});
it("Normalization Example", () => {
var target = uuid.v4();
var pul = new PUL();
pul.insertIntoArray(target, [], 0, [{ id: 1 }]);
pul.insertIntoArray(target, [], 0, [{ id: 2 }]);
pul.deleteFromArray(target, [], 1);
pul.deleteFromArray(target, [], 2);
pul.deleteFromArray(target, [], 2);
pul.deleteFromArray(target, [], 3);
pul.renameInObject(target, ["0"], "title", "obsolete");
pul.deleteFromObject(target, ["0"], ["title"]);
pul.insertIntoObject(target, ["0"], { a: 1 });
pul.insertIntoObject(target, ["0"], { b: 2 });
pul.normalize();
expect(pul.udps.insertIntoArray.length).toBe(1);
expect(pul.udps.insertIntoArray[0].items.length).toBe(2);
expect(_.isEqual(pul.udps.insertIntoArray[0].items, [{ id: 1 }, { id: 2 }])).toBe(true);
expect(pul.udps.insertIntoObject.length).toBe(1);
expect(_.isEqual(pul.udps.insertIntoObject[0].pairs, {"a":1,"b":2})).toBe(true);
expect(pul.udps.deleteFromArray.length).toBe(3);
var positions = [];
pul.udps.deleteFromArray.forEach((udp) => {
positions.push(udp.position);
});
expect(_.isEqual(positions.sort(), [1, 2, 3].sort()));
expect(pul.udps.deleteFromObject.length).toBe(1);
expect(pul.udps.deleteFromObject[0].keys.length).toBe(1);
expect(pul.udps.deleteFromObject[0].keys["0"]).toBe("title");
});
it("Test PUL parsing and serialization", () => {
var pul = new PUL();
pul
.insertIntoObject(uuid.v4(), [], { a: 1 })
.insertIntoArray(uuid.v4(), [], 0, [1, 2])
.deleteFromObject(uuid.v4(), [], ["a"])
.replaceInObject(uuid.v4(), [], "a", 1)
.deleteFromArray(uuid.v4(), [], 0)
.replaceInArray(uuid.v4(), [], 0, 1)
.renameInObject(uuid.v4(), [], "a", "b")
.insert(uuid.v4(), { hello: "world" })
.remove(uuid.v4());
var serializedPUL = pul.serialize();
var pul1 = new PUL();
pul1.parse(serializedPUL);
var pul2 = new PUL();
pul2.parse(serializedPUL);
expect(pul1.serialize()).toBe(pul2.serialize());
expect(pul1.serialize()).toBe(serializedPUL);
expect(pul2.serialize()).toBe(serializedPUL);
});
}); | the_stack |
* TLSH is provided for use under two licenses: Apache OR BSD.
* Users may opt to use either license depending on the license
* restictions of the systems with which they plan to integrate
* the TLSH code.
*/
/* ==============
* Apache License
* ==============
* Copyright 2013 Trend Micro Incorporated
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* ===========
* BSD License
* ===========
* Copyright (c) 2013, Trend Micro Incorporated
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. Neither the name of the copyright holder nor the names of its contributors
* may be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*
* Port of C++ implementation tlsh to javascript.
*
* Construct Tlsh object with methods:
* update
* finale
* fromTlshStr
* reset
* hash
* totalDiff
*
* See tlsh.html for example use.
*/
// https://raw.githubusercontent.com/trendmicro/tlsh/master/js_ext/tlsh.js
// modified for use in phishcatch (eg type definitions)
import { TLSHInstance, TLSHQuartile } from '../types'
const debug = false
///////////////////////////////////////////////////////////////////////////////////
// From tlsh_util.cpp
const v_table = new Uint8Array([
1,
87,
49,
12,
176,
178,
102,
166,
121,
193,
6,
84,
249,
230,
44,
163,
14,
197,
213,
181,
161,
85,
218,
80,
64,
239,
24,
226,
236,
142,
38,
200,
110,
177,
104,
103,
141,
253,
255,
50,
77,
101,
81,
18,
45,
96,
31,
222,
25,
107,
190,
70,
86,
237,
240,
34,
72,
242,
20,
214,
244,
227,
149,
235,
97,
234,
57,
22,
60,
250,
82,
175,
208,
5,
127,
199,
111,
62,
135,
248,
174,
169,
211,
58,
66,
154,
106,
195,
245,
171,
17,
187,
182,
179,
0,
243,
132,
56,
148,
75,
128,
133,
158,
100,
130,
126,
91,
13,
153,
246,
216,
219,
119,
68,
223,
78,
83,
88,
201,
99,
122,
11,
92,
32,
136,
114,
52,
10,
138,
30,
48,
183,
156,
35,
61,
26,
143,
74,
251,
94,
129,
162,
63,
152,
170,
7,
115,
167,
241,
206,
3,
150,
55,
59,
151,
220,
90,
53,
23,
131,
125,
173,
15,
238,
79,
95,
89,
16,
105,
137,
225,
224,
217,
160,
37,
123,
118,
73,
2,
157,
46,
116,
9,
145,
134,
228,
207,
212,
202,
215,
69,
229,
27,
188,
67,
124,
168,
252,
42,
4,
29,
108,
21,
247,
19,
205,
39,
203,
233,
40,
186,
147,
198,
192,
155,
33,
164,
191,
98,
204,
165,
180,
117,
76,
140,
36,
210,
172,
41,
54,
159,
8,
185,
232,
113,
196,
231,
47,
146,
120,
51,
65,
28,
144,
254,
221,
93,
189,
194,
139,
112,
43,
71,
109,
184,
209,
])
function b_mapping(salt: number, i: number, j: number, k: number) {
let h = 0
h = v_table[h ^ salt]
h = v_table[h ^ i]
h = v_table[h ^ j]
h = v_table[h ^ k]
return h
}
const LOG_1_5 = 0.4054651
const LOG_1_3 = 0.26236426
const LOG_1_1 = 0.09531018
function l_capturing(len: number) {
let i
if (len <= 656) {
i = Math.floor(Math.log(len) / LOG_1_5)
} else if (len <= 3199) {
i = Math.floor(Math.log(len) / LOG_1_3 - 8.72777)
} else {
i = Math.floor(Math.log(len) / LOG_1_1 - 62.5472)
}
return i & 0xff
}
function swap_byte(i: number) {
let byte = 0
byte = ((i & 0xf0) >> 4) & 0x0f
byte |= ((i & 0x0f) << 4) & 0xf0
return byte
}
function to_hex(data: Uint8Array, len: number) {
// Use TLSH.java implementation for to_hex
let s = new String()
for (let i = 0; i < len; i++) {
if (data[i] < 16) {
s = s.concat('0')
}
debug && console.log('to_hex: ' + data[i])
s = s.concat(data[i].toString(16).toUpperCase())
}
return s
}
function from_hex(str: string) {
// Use TLSH.java implementation for from_hex
const ret = new Uint8Array(str.length / 2) // unsigned char array}
for (let i = 0; i < str.length; i += 2) {
ret[i / 2] = parseInt(str.substring(i, i + 2), 16)
}
return ret
}
function mod_diff(x: number, y: number, R: number) {
let dl = 0
let dr = 0
if (y > x) {
dl = y - x
dr = x + R - y
} else {
dl = x - y
dr = y + R - x
}
return dl > dr ? dr : dl
}
// Use generateTable() from TLSH.java implementation
function generateTable() {
const arraySize = 256
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
const result: Uint8Array[] = new Array(arraySize)
for (let i = 0; i < result.length; i++) {
result[i] = new Uint8Array(arraySize)
}
for (let i = 0; i < arraySize; i++) {
for (let j = 0; j < arraySize; j++) {
let x = i,
y = j,
d,
diff = 0
d = Math.abs((x % 4) - (y % 4))
diff += d == 3 ? 6 : d
x = Math.floor(x / 4)
y = Math.floor(y / 4)
d = Math.abs((x % 4) - (y % 4))
diff += d == 3 ? 6 : d
x = Math.floor(x / 4)
y = Math.floor(y / 4)
d = Math.abs((x % 4) - (y % 4))
diff += d == 3 ? 6 : d
x = Math.floor(x / 4)
y = Math.floor(y / 4)
d = Math.abs((x % 4) - (y % 4))
diff += d == 3 ? 6 : d
result[i][j] = diff
}
}
return result
}
const bit_pairs_diff_table = generateTable()
function h_distance(len: number, x: Uint8Array, y: Uint8Array) {
let diff = 0
for (let i = 0; i < len; i++) {
debug && console.log('bit_pairs_diff_table[' + x[i] + '][' + y[i] + ']=' + bit_pairs_diff_table[x[i]][y[i]])
diff += bit_pairs_diff_table[x[i]][y[i]]
}
debug && console.log('h_distance returning ' + diff)
return diff
}
///////////////////////////////////////////////////////////////////////////////////
// from C #defines in tlsh_impl.h and tlsh_impl.cpp
const SLIDING_WND_SIZE = 5
const RNG_SIZE = SLIDING_WND_SIZE
function RNG_IDX(i: number) {
return (i + RNG_SIZE) % RNG_SIZE
}
const TLSH_CHECKSUM_LEN = 1
const BUCKETS = 256
const EFF_BUCKETS = 128
const CODE_SIZE = 32 // 128 * 2 bits = 32 bytes
const TLSH_STRING_LEN = 70 // 2 + 1 + 32 bytes = 70 hexidecimal chars
const RANGE_LVALUE = 256
const RANGE_QRATIO = 16
function SWAP_UINT(buf: { bucket_copy: Uint32Array }, x: number, y: number) {
const int_tmp = buf.bucket_copy[x]
buf.bucket_copy[x] = buf.bucket_copy[y]
buf.bucket_copy[y] = int_tmp
}
///////////////////////////////////////////////////////////////////////////////////
// TLSH member and non-member functions - from tlsh_impl.cpp
function partition(buf: { bucket_copy: Uint32Array }, left: number, right: number) {
if (left == right) {
return left
}
if (left + 1 == right) {
if (buf.bucket_copy[left] > buf.bucket_copy[right]) {
SWAP_UINT(buf, left, right)
}
return left
}
let ret = left
const pivot = (left + right) >> 1
const val = buf.bucket_copy[pivot]
buf.bucket_copy[pivot] = buf.bucket_copy[right]
buf.bucket_copy[right] = val
for (let i = left; i < right; i++) {
if (buf.bucket_copy[i] < val) {
SWAP_UINT(buf, ret, i)
ret++
}
}
buf.bucket_copy[right] = buf.bucket_copy[ret]
buf.bucket_copy[ret] = val
return ret
}
function find_quartile(tlsh: TLSHInstance, quartiles: TLSHQuartile) {
const buf = {
bucket_copy: new Uint32Array(EFF_BUCKETS),
}
const short_cut_left = new Uint32Array(EFF_BUCKETS)
const short_cut_right = new Uint32Array(EFF_BUCKETS)
let spl = 0
let spr = 0
const p1 = EFF_BUCKETS / 4 - 1
const p2 = EFF_BUCKETS / 2 - 1
const p3 = EFF_BUCKETS - EFF_BUCKETS / 4 - 1
const end = EFF_BUCKETS - 1
for (let i = 0; i <= end; i++) {
buf.bucket_copy[i] = tlsh.a_bucket[i]
}
for (let l = 0, r = end; ; ) {
const ret = partition(buf, l, r)
if (ret > p2) {
r = ret - 1
short_cut_right[spr] = ret
spr++
} else if (ret < p2) {
l = ret + 1
short_cut_left[spl] = ret
spl++
} else {
quartiles.q2 = buf.bucket_copy[p2]
break
}
}
short_cut_left[spl] = p2 - 1
short_cut_right[spr] = p2 + 1
for (let i = 0, l = 0; i <= spl; i++) {
let r = short_cut_left[i]
if (r > p1) {
for (;;) {
const ret = partition(buf, l, r)
if (ret > p1) {
r = ret - 1
} else if (ret < p1) {
l = ret + 1
} else {
quartiles.q1 = buf.bucket_copy[p1]
break
}
}
break
} else if (r < p1) {
l = r
} else {
quartiles.q1 = buf.bucket_copy[p1]
break
}
}
for (let i = 0, r = end; i <= spr; i++) {
let l = short_cut_right[i]
if (l < p3) {
for (;;) {
const ret = partition(buf, l, r)
if (ret > p3) {
r = ret - 1
} else if (ret < p3) {
l = ret + 1
} else {
quartiles.q3 = buf.bucket_copy[p3]
break
}
}
break
} else if (l > p3) {
r = l
} else {
quartiles.q3 = buf.bucket_copy[p3]
break
}
}
}
///////////////////////////////////////////////////////////////////////////////////
// Definition of tlsh object
const Tlsh = function (this: TLSHInstance) {
this.checksum = new Uint8Array(TLSH_CHECKSUM_LEN) // unsigned char array
this.slide_window = new Uint8Array(SLIDING_WND_SIZE)
this.a_bucket = new Uint32Array(BUCKETS) // unsigned int array
this.data_len = 0
this.tmp_code = new Uint8Array(CODE_SIZE)
this.Lvalue = 0
this.Q = 0
this.lsh_code = ''
this.lsh_code_valid = false
}
// Use get/setQLo() and get/setQHi() from TLSH.java implementation
function getQLo(Q: number) {
return Q & 0x0f
}
function getQHi(Q: number) {
return (Q & 0xf0) >> 4
}
function setQLo(Q: number, x: number) {
return (Q & 0xf0) | (x & 0x0f)
}
function setQHi(Q: number, x: number) {
return (Q & 0x0f) | ((x & 0x0f) << 4)
}
// Allow caller to pass in length in case there are embedded null characters, as there
// are in strings str_1 and str_2 (see simple_test.cpp)
//
// length parameter defaults to str.length
Tlsh.prototype.update = function (this: TLSHInstance, str: string, length: number) {
if (!str || !str.length) {
debug && console.log('No string or string length')
return
}
length = typeof length !== 'undefined' ? length : str.length
const data = []
for (let i = 0; i < length; i++) {
const code = str.charCodeAt(i)
if (code > 255) {
debug && console.log('Unexpected ' + str[i] + ' has value ' + code + ' which is too large')
// return;
// TODO: Added this to handle char codes outside of 255 range
data.push(254 & 0xff)
} else {
// Since charCodeAt returns between 0~65536, simply save every character as 2-bytes
// data.push(code & 0xff00, code & 0xff);
data.push(code & 0xff)
}
}
if (length != data.length) {
debug &&
console.log('Unexpected string length:' + length + ' is not equal to value unsigned char length: ' + data.length)
return
}
let j = this.data_len % RNG_SIZE
let fed_len = this.data_len
for (let i = 0; i < length; i++, fed_len++, j = RNG_IDX(j + 1)) {
this.slide_window[j] = data[i]
debug && console.log('slide_window[' + j + ']=' + this.slide_window[j])
if (fed_len >= 4) {
//only calculate when input >= 5 bytes
const j_1 = RNG_IDX(j - 1)
const j_2 = RNG_IDX(j - 2)
const j_3 = RNG_IDX(j - 3)
const j_4 = RNG_IDX(j - 4)
for (let k = 0; k < TLSH_CHECKSUM_LEN; k++) {
if (k == 0) {
this.checksum[k] = b_mapping(0, this.slide_window[j], this.slide_window[j_1], this.checksum[k])
debug && console.log('tlsh.checksum[' + k + ']=' + this.checksum[k])
} else {
// use calculated 1 byte checksums to expand the total checksum to 3 bytes
this.checksum[k] = b_mapping(
this.checksum[k - 1],
this.slide_window[j],
this.slide_window[j_1],
this.checksum[k],
)
}
}
let r
r = b_mapping(2, this.slide_window[j], this.slide_window[j_1], this.slide_window[j_2])
r = b_mapping(2, this.slide_window[j], this.slide_window[j_1], this.slide_window[j_2])
r = b_mapping(2, this.slide_window[j], this.slide_window[j_1], this.slide_window[j_2])
this.a_bucket[r]++
r = b_mapping(3, this.slide_window[j], this.slide_window[j_1], this.slide_window[j_3])
this.a_bucket[r]++
r = b_mapping(5, this.slide_window[j], this.slide_window[j_2], this.slide_window[j_3])
this.a_bucket[r]++
r = b_mapping(7, this.slide_window[j], this.slide_window[j_2], this.slide_window[j_4])
this.a_bucket[r]++
r = b_mapping(11, this.slide_window[j], this.slide_window[j_1], this.slide_window[j_4])
this.a_bucket[r]++
r = b_mapping(13, this.slide_window[j], this.slide_window[j_3], this.slide_window[j_4])
this.a_bucket[r]++
}
}
this.data_len += length
}
// final is a reserved word
Tlsh.prototype.finale = function (this: TLSHInstance, str: string, length: number) {
if (str) {
this.update(str, length)
}
// incoming data must more than or equal to 512 bytes
if (this.data_len < 256) {
debug && console.log('ERROR: length too small - ' + this.data_len) // + ")");
}
const quartiles = {
q1: 0,
q2: 0,
q3: 0,
}
find_quartile(this, quartiles)
// buckets must be more than 50% non-zero
let nonzero = 0
for (let i = 0; i < CODE_SIZE; i++) {
for (let j = 0; j < 4; j++) {
if (this.a_bucket[4 * i + j] > 0) {
nonzero++
}
}
}
if (nonzero <= (4 * CODE_SIZE) / 2) {
debug && console.log('ERROR: not enought variation in input - ' + nonzero + ' < ' + (4 * CODE_SIZE) / 2)
}
for (let i = 0; i < CODE_SIZE; i++) {
let h = 0
for (let j = 0; j < 4; j++) {
const k = this.a_bucket[4 * i + j]
if (quartiles.q3 < k) {
h += 3 << (j * 2) // leave the optimization j*2 = j<<1 or j*2 = j+j for compiler
} else if (quartiles.q2 < k) {
h += 2 << (j * 2)
} else if (quartiles.q1 < k) {
h += 1 << (j * 2)
}
}
this.tmp_code[i] = h
}
this.Lvalue = l_capturing(this.data_len)
this.Q = setQLo(this.Q, ((quartiles.q1 * 100) / quartiles.q3) % 16)
this.Q = setQHi(this.Q, ((quartiles.q2 * 100) / quartiles.q3) % 16)
this.lsh_code_valid = true
}
Tlsh.prototype.hash = function () {
if (this.lsh_code_valid == false) {
return 'ERROR IN PROCESSING'
}
const tmp = {
checksum: new Uint8Array(TLSH_CHECKSUM_LEN),
Lvalue: 0,
Q: 0,
tmp_code: new Uint8Array(CODE_SIZE),
}
for (let k = 0; k < TLSH_CHECKSUM_LEN; k++) {
tmp.checksum[k] = swap_byte(this.checksum[k])
debug &&
console.log(
'After swap_byte for checksum: tmp.checksum:' + tmp.checksum[k] + ', tlsh.checksum:' + this.checksum[k],
)
}
tmp.Lvalue = swap_byte(this.Lvalue)
tmp.Q = swap_byte(this.Q)
debug && console.log('After swap_byte for Q: tmp.Q:' + tmp.Q + ', tlsh.Q:' + this.Q)
for (let i = 0; i < CODE_SIZE; i++) {
tmp.tmp_code[i] = this.tmp_code[CODE_SIZE - 1 - i]
debug && console.log('tmp.tmp_code[' + i + ']:' + tmp.tmp_code[i])
}
this.lsh_code = to_hex(tmp.checksum, TLSH_CHECKSUM_LEN)
const tmpArray = new Uint8Array(1)
tmpArray[0] = tmp.Lvalue
this.lsh_code = this.lsh_code.concat(to_hex(tmpArray, 1))
tmpArray[0] = tmp.Q
this.lsh_code = this.lsh_code.concat(to_hex(tmpArray, 1))
this.lsh_code = this.lsh_code.concat(to_hex(tmp.tmp_code, CODE_SIZE))
return this.lsh_code
}
Tlsh.prototype.reset = function () {
this.checksum = new Uint8Array(TLSH_CHECKSUM_LEN)
this.slide_window = new Uint8Array(SLIDING_WND_SIZE)
this.a_bucket = new Uint32Array(BUCKETS)
this.data_len = 0
this.tmp_code = new Uint8Array(CODE_SIZE)
this.Lvalue = 0
this.Q = 0
this.lsh_code = new String()
this.lsh_code_valid = false
}
// len_diff defaults to true
Tlsh.prototype.totalDiff = function (this: TLSHInstance, other: TLSHInstance, len_diff: boolean) {
if (this == other) {
return 0
}
len_diff = typeof len_diff !== 'undefined' ? len_diff : true
let diff = 0
if (len_diff) {
const ldiff = mod_diff(this.Lvalue, other.Lvalue, RANGE_LVALUE)
if (ldiff == 0) diff = 0
else if (ldiff == 1) diff = 1
else diff += ldiff * 12
}
const q1diff = mod_diff(getQLo(this.Q), getQLo(other.Q), RANGE_QRATIO)
if (q1diff <= 1) diff += q1diff
else diff += (q1diff - 1) * 12
const q2diff = mod_diff(getQHi(this.Q), getQHi(other.Q), RANGE_QRATIO)
if (q2diff <= 1) diff += q2diff
else diff += (q2diff - 1) * 12
for (let k = 0; k < TLSH_CHECKSUM_LEN; k++) {
if (this.checksum[k] != other.checksum[k]) {
diff++
break
}
}
diff += h_distance(CODE_SIZE, this.tmp_code, other.tmp_code)
return diff
}
Tlsh.prototype.fromTlshStr = function (this: TLSHInstance, str: string) {
if (str.length != TLSH_STRING_LEN) {
debug && console.log('Tlsh.fromTlshStr() - string has wrong length (' + str.length + ' != ' + TLSH_STRING_LEN + ')')
return
}
for (let i = 0; i < TLSH_STRING_LEN; i++) {
if (!((str[i] >= '0' && str[i] <= '9') || (str[i] >= 'A' && str[i] <= 'F') || (str[i] >= 'a' && str[i] <= 'f'))) {
debug && console.log('Tlsh.fromTlshStr() - string has invalid (non-hex) characters')
return
}
}
const tmp = from_hex(str)
// Order of assignment is based on order of fields in lsh_bin
// Also note that TLSH_CHECKSUM_LEN is 1
let i = 0
this.checksum[i] = swap_byte(tmp[i++])
this.Lvalue = swap_byte(tmp[i++])
this.Q = swap_byte(tmp[i++])
for (let j = 0; j < CODE_SIZE; j++) {
this.tmp_code[j] = tmp[i + CODE_SIZE - 1 - j]
}
this.lsh_code_valid = true
}
export default function TlshConstructor(): TLSHInstance {
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
return new (Tlsh as any)()
} | the_stack |
import * as DiscoveryEntryWithMetaInfo from "../../../generated/joynr/types/DiscoveryEntryWithMetaInfo";
import MessagingQos from "../../messaging/MessagingQos";
import MulticastWildcardRegexFactory from "../../messaging/util/MulticastWildcardRegexFactory";
import defaultMessagingSettings from "../../start/settings/defaultMessagingSettings";
import SubscriptionQos from "../../proxy/SubscriptionQos";
import { MulticastPublication } from "../types/MulticastPublication";
import { SubscriptionPublication } from "../types/SubscriptionPublication";
import SubscriptionStop from "../types/SubscriptionStop";
import SubscriptionRequest from "../types/SubscriptionRequest";
import MulticastSubscriptionRequest from "../types/MulticastSubscriptionRequest";
import BroadcastSubscriptionRequest from "../types/BroadcastSubscriptionRequest";
import SubscriptionListener from "./SubscriptionListener";
import * as SubscriptionUtil from "./util/SubscriptionUtil";
import LongTimer from "../../util/LongTimer";
import LoggingManager from "../../system/LoggingManager";
import nanoid from "nanoid";
import * as UtilInternal from "../../util/UtilInternal";
import * as Typing from "../../util/Typing";
import PublicationMissedException from "../../exceptions/PublicationMissedException";
import * as JSONSerializer from "../../util/JSONSerializer";
import util from "util";
import Dispatcher = require("../Dispatcher");
import SubscriptionReply = require("../types/SubscriptionReply");
import BroadcastFilterParameters = require("../../proxy/BroadcastFilterParameters");
import MulticastSubscriptionQos = require("../../proxy/MulticastSubscriptionQos");
import OnChangeSubscriptionQos = require("../../proxy/OnChangeSubscriptionQos");
import OnChangeWithKeepAliveSubscriptionQos = require("../../proxy/OnChangeWithKeepAliveSubscriptionQos");
const log = LoggingManager.getLogger("joynr.dispatching.subscription.SubscriptionManager");
interface SubscriptionSettings {
proxyId: string;
providerDiscoveryEntry: DiscoveryEntryWithMetaInfo;
attributeType: string;
attributeName: string;
qos: SubscriptionQos;
subscriptionId?: string;
onReceive: (value: any) => void;
onError?: (e: Error) => void;
onSubscribed?: (participantId: string) => void;
}
interface BroadcastSubscriptionSettings {
proxyId: string;
providerDiscoveryEntry: DiscoveryEntryWithMetaInfo;
broadcastName: string;
broadcastParameter: { name: string; type: string }[];
subscriptionQos: SubscriptionQos;
filterParameters: BroadcastFilterParameters;
selective?: boolean;
partitions: string[];
subscriptionId?: string;
onReceive: Function;
onSubscribed?: Function;
onError?: Function;
}
class SubscriptionManager {
private dispatcher: Dispatcher;
/**
* @name SubscriptionManager#registerBroadcastSubscription
* @function
* @param parameters
* @param parameters.proxyId participantId of the sender
* @param parameters.providerDiscoveryEntry DiscoveryEntry of the receiver
* @param parameters.broadcastName the name of the broadcast being subscribed to
* @param parameters.broadcastParameter the parameter meta information of the broadcast being subscribed to
* @param [parameters.subscriptionQos] the subscriptionQos
* @param [parameters.filterParameters] filter parameters used to indicate interest in
* only a subset of broadcasts that might be sent.
* @param parameters.selective true if broadcast is selective
* @param [parameters.partitions] partitions for multicast requests
* @param parameters.subscriptionId optional parameter subscriptionId to reuse a
* pre-existing identifier for this concrete subscription request
* @param parameters.onReceive is called when a broadcast is received.
* @param parameters.onError is called when an error occurs with the broadcast
* @param parameters.onSubscribed the callback to inform once the subscription request has
* been delivered successfully
* @returns a promise object which provides the subscription token upon success and an error
* upon failure
*/
public registerBroadcastSubscription: (parameters: BroadcastSubscriptionSettings) => Promise<string>;
/**
* This callback is called when a publication is received
* @callback SubscriptionManager~onReceive
* @param {Object} publication received
*/
/**
* This callback is called if there was an error with the subscription
* @callback SubscriptionManager~onError
* @param {Error} error
*/
/**
* @name SubscriptionManager#registerSubscription
* @function
* @param settings
* @param settings.proxyId participantId of the sender
* @param settings.providerDiscoveryEntry DiscoveryEntry of the receiver
* @param settings.attributeType the type of the subscribing attribute
* @param settings.attributeName the attribute name to subscribe to
* @param [settings.qos] the subscriptionQos
* @param settings.subscriptionId optional parameter subscriptionId to reuse a
* preexisting identifier for this concrete subscription request
* @param settings.onReceive the callback for received publications.
* @param settings.onError the callback for missing publication alerts or when an
* error occurs.
* @param settings.onSubscribed the callback to inform once the subscription request has
* been delivered successfully
* @returns an A promise object which provides the subscription token upon success and
* an error upon failure
*/
public registerSubscription: (settings: SubscriptionSettings) => Promise<string>;
private multicastSubscribers: any = {};
private started: boolean = true;
private subscriptionReplyCallers: any;
// stores the object which is returned by setTimeout mapped to the subscriptionId
private publicationCheckTimerIds: any = {};
// stores subscriptionId - SubscriptionListener
private subscriptionListeners: any = {};
// stores subscriptionId - SubscriptionInfo pairs
private subscriptionInfos: any = {};
private multicastWildcardRegexFactory: any;
/**
* @constructor
* @param dispatcher
*/
public constructor(dispatcher: Dispatcher) {
this.multicastWildcardRegexFactory = new MulticastWildcardRegexFactory();
this.subscriptionReplyCallers = new Map();
this.registerSubscription = util.promisify(this.registerSubscriptionInternal);
this.registerBroadcastSubscription = util.promisify(this.registerBroadcastSubscriptionInternal);
this.dispatcher = dispatcher;
this.checkPublication = this.checkPublication.bind(this);
}
private isReady(): boolean {
return this.started;
}
/**
* @param subscriptionId Id of the subscription to check
* @returns time of last received publication
*/
private getLastPublicationTime(subscriptionId: string): number {
return this.subscriptionInfos[subscriptionId].lastPublicationTime_ms;
}
private setLastPublicationTime(subscriptionId: string, timeMs: number): void {
// eslint-disable-next-line @typescript-eslint/camelcase
this.subscriptionInfos[subscriptionId].lastPublicationTime_ms = timeMs;
}
/**
* @param subscriptionId Id of the subscription to check
* @param delayMs Delay to the next publication check.
* @returns true if subscription is expired, false if end date is not reached.
*/
private subscriptionEnds(subscriptionId: string, delayMs: number): boolean {
if (this.subscriptionInfos[subscriptionId] === undefined) {
log.warn(`subscriptionEnds has been called with unresolved subscriptionId "${subscriptionId}"`);
return true;
}
const expiryDateMs = this.subscriptionInfos[subscriptionId].qos.expiryDateMs;
// log.debug("Checking subscription end for subscriptionId: " + subscriptionId + "
// expiryDateMs: " + expiryDateMs + "
// current time: " + Date.now());
const ends = expiryDateMs <= Date.now() + delayMs;
// if (ends === true) {
// log.info("Subscription end date reached for id: " + subscriptionId);
// }
return ends;
}
/**
* @param subscriptionId Id of the subscription to check
* @param alertAfterIntervalMs maximum delay between two incoming publications
*/
private checkPublication(subscriptionId: string, alertAfterIntervalMs: number): void {
const subscriptionListener = this.subscriptionListeners[subscriptionId];
const timeSinceLastPublication = Date.now() - this.getLastPublicationTime(subscriptionId);
// log.debug("timeSinceLastPublication : " + timeSinceLastPublication + "
// alertAfterIntervalMs: " + alertAfterIntervalMs);
if (alertAfterIntervalMs > 0 && timeSinceLastPublication >= alertAfterIntervalMs) {
// log.warn("publication missed for subscription id: " + subscriptionId);
if (subscriptionListener.onError) {
const publicationMissedException = new PublicationMissedException({
detailMessage: "alertAfterIntervalMs period exceeded without receiving publication",
subscriptionId
});
subscriptionListener.onError(publicationMissedException);
}
}
let delayMs;
if (timeSinceLastPublication > alertAfterIntervalMs) {
delayMs = alertAfterIntervalMs;
} else {
delayMs = alertAfterIntervalMs - timeSinceLastPublication;
}
if (!this.subscriptionEnds(subscriptionId, delayMs)) {
// log.debug("Rescheduling checkPublication with delay: " + delay_ms);
this.publicationCheckTimerIds[subscriptionId] = LongTimer.setTimeout(
this.checkPublication,
delayMs,
subscriptionId,
alertAfterIntervalMs
);
}
}
private calculateTtl(subscriptionQos: SubscriptionQos): number {
if (subscriptionQos.expiryDateMs === SubscriptionQos.NO_EXPIRY_DATE) {
return defaultMessagingSettings.MAX_MESSAGING_TTL_MS;
}
const ttl = subscriptionQos.expiryDateMs - Date.now();
if (ttl > defaultMessagingSettings.MAX_MESSAGING_TTL_MS) {
return defaultMessagingSettings.MAX_MESSAGING_TTL_MS;
}
return ttl;
}
private storeSubscriptionRequest(
settings: BroadcastSubscriptionSettings,
subscriptionRequest: BroadcastSubscriptionRequest | MulticastSubscriptionRequest
): void;
private storeSubscriptionRequest(settings: SubscriptionSettings, subscriptionRequest: SubscriptionRequest): void;
private storeSubscriptionRequest(
settings: BroadcastSubscriptionSettings & SubscriptionSettings,
subscriptionRequest: SubscriptionRequest & BroadcastSubscriptionRequest & MulticastSubscriptionRequest
): void {
let onReceiveWrapper;
if (settings.attributeType !== undefined) {
onReceiveWrapper = (response: any[]) => {
settings.onReceive(Typing.augmentTypes(response[0], settings.attributeType));
};
} else {
onReceiveWrapper = (response: any[]) => {
for (const responseKey in response) {
if (response.hasOwnProperty(responseKey)) {
response[responseKey] = Typing.augmentTypes(
response[responseKey],
settings.broadcastParameter[responseKey].type
);
}
}
settings.onReceive(response);
};
}
this.subscriptionListeners[subscriptionRequest.subscriptionId] = new SubscriptionListener({
onReceive: onReceiveWrapper,
onError: settings.onError,
onSubscribed: settings.onSubscribed
});
const subscriptionInfo = UtilInternal.extend(
{
proxyId: settings.proxyId,
providerDiscoveryEntry: settings.providerDiscoveryEntry,
// eslint-disable-next-line @typescript-eslint/camelcase
lastPublicationTime_ms: 0
},
subscriptionRequest
);
this.subscriptionInfos[subscriptionRequest.subscriptionId] = subscriptionInfo;
const alertAfterIntervalMs = (subscriptionRequest.qos as OnChangeWithKeepAliveSubscriptionQos)
.alertAfterIntervalMs;
if (alertAfterIntervalMs !== undefined && alertAfterIntervalMs > 0) {
this.publicationCheckTimerIds[subscriptionRequest.subscriptionId] = LongTimer.setTimeout(
this.checkPublication,
alertAfterIntervalMs,
subscriptionRequest.subscriptionId,
alertAfterIntervalMs
);
}
}
private removeRequestFromMulticastSubscribers(_multicastId: string, subscriptionId: string): void {
for (const multicastIdPattern in this.multicastSubscribers) {
if (this.multicastSubscribers.hasOwnProperty(multicastIdPattern)) {
const subscribers = this.multicastSubscribers[multicastIdPattern];
for (let i = 0; i < subscribers.length; i++) {
if (subscribers[i] === subscriptionId) {
subscribers.splice(i, 1);
if (subscribers.length === 0) {
delete this.multicastSubscribers[multicastIdPattern];
}
}
}
}
}
}
private cleanupSubscription(subscriptionId: string): void {
if (this.publicationCheckTimerIds[subscriptionId] !== undefined) {
LongTimer.clearTimeout(this.publicationCheckTimerIds[subscriptionId]);
delete this.publicationCheckTimerIds[subscriptionId];
}
if (this.subscriptionInfos[subscriptionId] !== undefined) {
const subscriptionInfo = this.subscriptionInfos[subscriptionId];
if (subscriptionInfo.multicastId !== undefined) {
this.removeRequestFromMulticastSubscribers(subscriptionInfo.multicastId, subscriptionId);
}
delete this.subscriptionInfos[subscriptionId];
}
if (this.subscriptionListeners[subscriptionId] !== undefined) {
delete this.subscriptionListeners[subscriptionId];
}
this.subscriptionReplyCallers.delete(subscriptionId);
}
private registerSubscriptionInternal(settings: SubscriptionSettings, cb: Function): void {
if (!this.isReady()) {
cb(new Error("SubscriptionManager is already shut down"));
return;
}
const subscriptionId = settings.subscriptionId || nanoid();
// log.debug("Registering Subscription Id " + subscriptionId);
if (settings.attributeName === undefined) {
cb(
new Error(
`Error: attributeName not provided in call to registerSubscription, settings = ${JSON.stringify(
settings
)}`
)
);
}
if (settings.attributeType === undefined) {
cb(
new Error(
`Error: attributeType not provided in call to registerSubscription, settings = ${JSON.stringify(
settings
)}`
)
);
}
if (settings.onError === undefined) {
log.warn(
`Warning: subscription for attribute "${
settings.attributeName
}" has been done without error callback function. You will not be informed about missed publications. Please specify the "onError" parameter while subscribing!`
);
}
if (settings.onReceive === undefined) {
log.warn(
`Warning: subscription for attribute "${
settings.attributeName
}" has been done without receive callback function. You will not be informed about incoming publications. Please specify the "onReceive" parameter while subscribing!`
);
}
const subscriptionRequest = new SubscriptionRequest({
subscriptionId,
subscribedToName: settings.attributeName,
qos: settings.qos
});
const ttl = this.calculateTtl(subscriptionRequest.qos);
const messagingQos = new MessagingQos({ ttl });
const timeout = LongTimer.setTimeout(() => {
this.cleanupSubscription(subscriptionId);
cb(new Error(`SubscriptionRequest with id ${subscriptionId} failed: tll expired`));
}, ttl);
this.subscriptionReplyCallers.set(subscriptionId, {
cb: (...args: any[]) => {
LongTimer.clearTimeout(timeout);
cb(...args);
}
});
this.storeSubscriptionRequest(settings, subscriptionRequest);
this.dispatcher.sendSubscriptionRequest({
from: settings.proxyId,
toDiscoveryEntry: settings.providerDiscoveryEntry,
messagingQos,
subscriptionRequest
});
}
private addRequestToMulticastSubscribers(multicastId: string, subscriptionId: string): void {
const multicastIdPattern = this.multicastWildcardRegexFactory.createIdPattern(multicastId);
if (this.multicastSubscribers[multicastIdPattern] === undefined) {
this.multicastSubscribers[multicastIdPattern] = [];
}
const subscribers = this.multicastSubscribers[multicastIdPattern];
for (let i = 0; i < subscribers.length; i++) {
if (subscribers[i] === subscriptionId) {
return;
}
}
subscribers.push(subscriptionId);
}
private createBroadcastSubscriptionRequest(
parameters: BroadcastSubscriptionSettings
): BroadcastSubscriptionRequest | MulticastSubscriptionRequest {
let request;
if (parameters.selective) {
request = new BroadcastSubscriptionRequest({
subscriptionId: parameters.subscriptionId || nanoid(),
subscribedToName: parameters.broadcastName,
qos: (parameters.subscriptionQos as any) as OnChangeSubscriptionQos,
filterParameters: parameters.filterParameters
});
} else {
request = new MulticastSubscriptionRequest({
multicastId: SubscriptionUtil.createMulticastId(
parameters.providerDiscoveryEntry.participantId,
parameters.broadcastName,
parameters.partitions
),
subscriptionId: parameters.subscriptionId || nanoid(),
subscribedToName: parameters.broadcastName,
qos: (parameters.subscriptionQos as any) as MulticastSubscriptionQos
});
this.addRequestToMulticastSubscribers(request.multicastId, request.subscriptionId);
}
return request;
}
/**
* @name SubscriptionManager#registerBroadcastSubscription
* @function
* @param parameters
* @param parameters.proxyId participantId of the sender
* @param parameters.providerDiscoveryEntry DiscoveryEntry of the receiver
* @param parameters.broadcastName the name of the broadcast being subscribed to
* @param parameters.broadcastParameter the parameter meta information of the broadcast being subscribed to
* @param [parameters.subscriptionQos] the subscriptionQos
* @param [parameters.filterParameters] filter parameters used to indicate interest in
* only a subset of broadcasts that might be sent.
* @param parameters.selective true if broadcast is selective
* @param [parameters.partitions] partitions for multicast requests
* @param parameters.subscriptionId optional parameter subscriptionId to reuse a
* pre-existing identifier for this concrete subscription request
* @param {SubscriptionManager~onReceive} parameters.onReceive is called when a broadcast is received.
* @param {SubscriptionManager~onError} parameters.onError is called when an error occurs with the broadcast
* @param {SubscriptionManager~onSubscribed} parameters.onSubscribed the callback to inform once the subscription request has
* been delivered successfully
* @param cb
*/
private registerBroadcastSubscriptionInternal(parameters: BroadcastSubscriptionSettings, cb: Function): void {
if (!this.isReady()) {
cb(new Error("SubscriptionManager is already shut down"));
return;
}
const subscriptionRequest = this.createBroadcastSubscriptionRequest(parameters);
const subscriptionId = subscriptionRequest.subscriptionId;
const ttl = this.calculateTtl(subscriptionRequest.qos);
const messagingQos = new MessagingQos({ ttl });
const timeout = LongTimer.setTimeout(() => {
this.cleanupSubscription(subscriptionId);
cb(new Error(`BroadcastSubscriptionRequest with id ${subscriptionId} failed: tll expired`));
}, ttl);
this.subscriptionReplyCallers.set(subscriptionId, {
cb: (...args: any[]) => {
LongTimer.clearTimeout(timeout);
cb(...args);
}
});
this.storeSubscriptionRequest(parameters, subscriptionRequest);
this.dispatcher
.sendBroadcastSubscriptionRequest({
from: parameters.proxyId,
toDiscoveryEntry: parameters.providerDiscoveryEntry,
messagingQos,
subscriptionRequest
})
.then(() => {
const type = Typing.getObjectType(subscriptionRequest);
if (type === "MulticastSubscriptionRequest") {
const subscriptionReplyCaller = this.subscriptionReplyCallers.get(subscriptionId);
const subscriptionListener = this.subscriptionListeners[subscriptionId];
if (subscriptionReplyCaller !== undefined) {
subscriptionReplyCaller.cb(undefined, subscriptionId);
}
if (subscriptionListener !== undefined && subscriptionListener.onSubscribed !== undefined) {
subscriptionListener.onSubscribed(subscriptionId);
}
this.subscriptionReplyCallers.delete(subscriptionId);
this.addRequestToMulticastSubscribers(
(subscriptionRequest as MulticastSubscriptionRequest).multicastId,
subscriptionRequest.subscriptionId
);
}
})
.catch(error => {
this.cleanupSubscription(subscriptionRequest.subscriptionId);
if (parameters.onError) {
parameters.onError(error);
}
// eslint-disable-next-line promise/no-callback-in-promise
cb(error);
});
}
/**
* @param subscriptionReply incoming subscriptionReply
*/
public handleSubscriptionReply(subscriptionReply: SubscriptionReply): void {
const subscriptionReplyCaller = this.subscriptionReplyCallers.get(subscriptionReply.subscriptionId);
const subscriptionListener = this.subscriptionListeners[subscriptionReply.subscriptionId];
if (subscriptionReplyCaller === undefined && subscriptionListener === undefined) {
log.error(
`error handling subscription reply, because subscriptionReplyCaller and subscriptionListener could not be found: ${JSONSerializer.stringify(
subscriptionReply
)}`
);
return;
}
try {
if (subscriptionReply.error) {
if (!(subscriptionReply.error instanceof Error)) {
subscriptionReply.error = Typing.augmentTypes(subscriptionReply.error);
}
if (subscriptionReplyCaller !== undefined) {
subscriptionReplyCaller.cb(subscriptionReply.error);
}
if (subscriptionListener !== undefined && subscriptionListener.onError !== undefined) {
subscriptionListener.onError(subscriptionReply.error);
}
this.cleanupSubscription(subscriptionReply.subscriptionId);
} else {
if (subscriptionReplyCaller !== undefined) {
subscriptionReplyCaller.cb(undefined, subscriptionReply.subscriptionId);
}
if (subscriptionListener !== undefined && subscriptionListener.onSubscribed !== undefined) {
subscriptionListener.onSubscribed(subscriptionReply.subscriptionId);
}
this.subscriptionReplyCallers.delete(subscriptionReply.subscriptionId);
}
} catch (e) {
log.error(
`exception thrown during handling subscription reply ${JSONSerializer.stringify(subscriptionReply)}:\n${
e.stack
}`
);
this.subscriptionReplyCallers.delete(subscriptionReply.subscriptionId);
}
}
/**
* @param publication incoming multicast publication
*/
public handleMulticastPublication(publication: MulticastPublication): void {
let subscribersFound = false;
for (const multicastIdPattern in this.multicastSubscribers) {
if (this.multicastSubscribers.hasOwnProperty(multicastIdPattern)) {
if (publication.multicastId.match(new RegExp(multicastIdPattern)) !== null) {
const subscribers = this.multicastSubscribers[multicastIdPattern];
if (subscribers !== undefined) {
subscribersFound = true;
for (let i = 0; i < subscribers.length; i++) {
const subscriptionListener = this.subscriptionListeners[subscribers[i]];
if (publication.error) {
if (subscriptionListener.onError) {
subscriptionListener.onError(publication.error);
} else {
log.debug(
`subscriptionListener with Id "${
subscribers[i]
}" has no onError callback. Skipping error publication`
);
}
} else if (publication.response) {
if (subscriptionListener.onReceive) {
subscriptionListener.onReceive(publication.response);
} else {
log.debug(
`subscriptionListener with Id "${
subscribers[i]
}" has no onReceive callback. Skipping multicast publication`
);
}
}
}
}
}
}
}
if (!subscribersFound) {
throw new Error(
`${"Publication cannot be handled, as no subscription with multicastId "}${
publication.multicastId
} is known.`
);
}
}
/**
* @param publication incoming publication
*/
public handlePublication(publication: SubscriptionPublication): void {
if (this.subscriptionInfos[publication.subscriptionId] === undefined) {
throw new Error(
`${"Publication cannot be handled, as no subscription with subscriptionId "}${
publication.subscriptionId
} is known.`
);
}
this.setLastPublicationTime(publication.subscriptionId, Date.now());
const subscriptionListener = this.subscriptionListeners[publication.subscriptionId];
if (publication.error) {
if (subscriptionListener.onError) {
subscriptionListener.onError(publication.error);
} else {
log.debug(
`subscriptionListener with Id "${
publication.subscriptionId
}" has no onError callback. Skipping error publication`
);
}
} else if (publication.response) {
if (subscriptionListener.onReceive) {
subscriptionListener.onReceive(publication.response);
} else {
log.debug(
`subscriptionListener with Id "${
publication.subscriptionId
}" has no onReceive callback. Skipping publication`
);
}
}
}
/**
* @param settings
* @param settings.messagingQos the messaging Qos object for the ttl
* @param settings.subscriptionId of the subscriptionId to stop
* @returns A promise object
*/
public unregisterSubscription(settings: { messagingQos: MessagingQos; subscriptionId: string }): Promise<void> {
if (!this.isReady()) {
throw new Error("SubscriptionManager is already shut down");
}
const subscriptionInfo = this.subscriptionInfos[settings.subscriptionId];
let errorMessage;
if (subscriptionInfo === undefined) {
errorMessage = `Cannot find subscription with id: ${settings.subscriptionId}`;
log.error(errorMessage);
return Promise.reject(new Error(errorMessage));
}
const subscriptionStop = new SubscriptionStop({
subscriptionId: settings.subscriptionId
});
let promise;
if (subscriptionInfo.multicastId !== undefined) {
promise = this.dispatcher.sendMulticastSubscriptionStop({
from: subscriptionInfo.proxyId,
toDiscoveryEntry: subscriptionInfo.providerDiscoveryEntry,
messagingQos: settings.messagingQos,
multicastId: subscriptionInfo.multicastId,
subscriptionStop
});
} else {
promise = this.dispatcher.sendSubscriptionStop({
from: subscriptionInfo.proxyId,
toDiscoveryEntry: subscriptionInfo.providerDiscoveryEntry,
messagingQos: settings.messagingQos,
subscriptionStop
});
}
this.cleanupSubscription(settings.subscriptionId);
return promise;
}
public hasMulticastSubscriptions(): boolean {
return Object.keys(this.multicastSubscribers).length > 0;
}
public hasOpenSubscriptions(): boolean {
const hasSubscriptionInfos = Object.keys(this.subscriptionInfos).length > 0;
const hasSubscriptionListeners = Object.keys(this.subscriptionListeners).length > 0;
const hasPublicationCheckTimerIds = Object.keys(this.publicationCheckTimerIds).length > 0;
const hasSubscriptionReplyCallers = this.subscriptionReplyCallers.size > 0;
return (
hasSubscriptionInfos ||
hasSubscriptionListeners ||
hasPublicationCheckTimerIds ||
hasSubscriptionReplyCallers ||
this.hasMulticastSubscriptions()
);
}
/**
* This method is meant to be called by the runtime before shutdown is called.
* It turns out that there is a necessary shutdown order and SubscriptionManager can't be shutdown first.
*
* @param timeoutMs timeout in ms after which this operation shall timeout. 0 defaults to no timeout.
* @returns - resolves if subscriptionStop message has been sent for each active subscription
* - rejects in case of any issues or timeout occurs
*/
public terminateSubscriptions(timeoutMs: number): Promise<any> {
const logPrefix = "SubscriptionManager::terminateSubscriptions";
log.info(`${logPrefix} ${timeoutMs}`);
const cleanUpPromises = [];
let activeSubscriptionId;
for (activeSubscriptionId in this.subscriptionInfos) {
if (Object.prototype.hasOwnProperty.call(this.subscriptionInfos, activeSubscriptionId)) {
const promise = this.unregisterSubscription({
subscriptionId: activeSubscriptionId,
messagingQos: new MessagingQos({})
});
cleanUpPromises.push(promise);
}
}
const cleanUpPromise = Promise.all(cleanUpPromises);
log.info(`${logPrefix} terminating a total of ${cleanUpPromises.length} subscriptions`);
return timeoutMs === 0 ? cleanUpPromise : UtilInternal.timeoutPromise(cleanUpPromise, timeoutMs);
}
/**
* Shutdown the subscription manager
*/
public shutdown(): void {
for (const subscriptionId in this.publicationCheckTimerIds) {
if (this.publicationCheckTimerIds.hasOwnProperty(subscriptionId)) {
const timerId = this.publicationCheckTimerIds[subscriptionId];
if (timerId !== undefined) {
LongTimer.clearTimeout(timerId);
}
}
}
this.publicationCheckTimerIds = {};
for (const subscriptionReplyCaller of this.subscriptionReplyCallers.values()) {
if (subscriptionReplyCaller) {
subscriptionReplyCaller.cb(new Error("Subscription Manager is already shut down"));
}
}
this.subscriptionReplyCallers.clear();
this.started = false;
}
}
export = SubscriptionManager; | the_stack |
import EEObject from "../Base/EEObject";
import IAttributeConverterDeclaration from "../Declaration/IAttributeConverterDeclaration";
import GomlLoader from "../Node/GomlLoader";
import EnumConverter from "../Converters/EnumConverter";
import NumberArrayConverter from "../Converters/NumberArrayConverter";
import ComponentConverter from "../Converters/ComponentConverter";
import NumberConverter from "../Converters/NumberConverter";
import ObjectConverter from "../Converters/ObjectConverter";
import ArrayConverter from "../Converters/ArrayConverter";
import NodeInterface from "../Interface/NodeInterface";
import Utility from "../Base/Utility";
import GomlInterfaceImpl from "../Interface/GomlInterfaceImpl";
import BooleanConverter from "../Converters/BooleanConverter";
import GrimoireComponent from "../Components/GrimoireComponent";
import StringArrayConverter from "../Converters/StringArrayConverter";
import StringConverter from "../Converters/StringConverter";
import Attribute from "../Node/Attribute";
import Constants from "../Base/Constants";
import ITreeInitializedInfo from "../Node/ITreeInitializedInfo";
import GomlNode from "../Node/GomlNode";
import ComponentDeclaration from "../Node/ComponentDeclaration";
import Component from "../Node/Component";
import NodeDeclaration from "../Node/NodeDeclaration";
import NSIdentity from "../Base/NSIdentity";
import Namespace from "../Base/Namespace";
import NSDictionary from "../Base/NSDictionary";
import Ensure from "../Base/Ensure";
import { Name, Nullable, Ctor, ComponentRegistering } from "../Base/Types";
export default class GrimoireInterfaceImpl extends EEObject {
public nodeDeclarations: NSDictionary<NodeDeclaration> = new NSDictionary<NodeDeclaration>();
public converters: NSDictionary<IAttributeConverterDeclaration> = new NSDictionary<IAttributeConverterDeclaration>();
public componentDeclarations: NSDictionary<ComponentDeclaration> = new NSDictionary<ComponentDeclaration>();
public rootNodes: { [rootNodeId: string]: GomlNode } = {};
public loadTasks: ({ ns: string, task: () => Promise<void> })[] = [];
public lib: {
[key: string]: {
__VERSION__: string;
__NAME__: string;
[key: string]: any;
}
} = {};
public nodeDictionary: { [nodeId: string]: GomlNode } = {};
public componentDictionary: { [componentId: string]: Component } = {};
public libraryPreference?: { [preference: string]: any };
public debug = true;
/**
* The object assigned to gr before loading grimoire.js
* @type {any}
*/
public noConflictPreserve: any;
private _registeringPluginNamespace: string;
private _registrationContext: string = Constants.defaultNamespace;
public get initializedEventHandler(): ((scriptTags: HTMLScriptElement[]) => void)[] {
return GomlLoader.initializedEventHandlers;
}
public get callInitializedAlready(): boolean {
return GomlLoader.callInitializedAlready;
}
/**
* [obsolete] use `Namespace.define` instead of.
* @param {string} ns namespace URI to be used
* @return {[type]} the namespaced identity
*/
public ns(ns: string): (name: string) => NSIdentity {
return (name: string) => Namespace.define(ns).for(name);
}
public initialize(): void {
this.registerConverter("String", StringConverter);
this.registerConverter("StringArray", StringArrayConverter);
this.registerConverter("Boolean", BooleanConverter);
this.registerConverter(ArrayConverter);
this.registerConverter("Object", ObjectConverter);
this.registerConverter(EnumConverter);
this.registerConverter("Number", NumberConverter);
this.registerConverter(ComponentConverter);
this.registerConverter("NumberArray", NumberArrayConverter);
this.registerComponent(GrimoireComponent);
this.registerNode("grimoire-node-base", ["GrimoireComponent"]);
}
/**
* Register plugins
* @param {(} loadTask [description]
* @return {[type]} [description]
*/
public register(loadTask: () => Promise<void>): void {
this.loadTasks.push({ ns: this._registeringPluginNamespace, task: loadTask });
this._registeringPluginNamespace = Constants.defaultNamespace;
}
public async resolvePlugins(): Promise<void> {
for (let i = 0; i < this.loadTasks.length; i++) {
const obj = this.loadTasks[i];
this._registrationContext = obj.ns;
try {
await obj.task();
} catch (e) {
console.error(`Error: loadTask of plugin '${obj.ns}' is failed.`);
console.error(e);
}
}
this._registrationContext = Constants.defaultNamespace;
// resolveDependency
this.componentDeclarations.forEach(dec => {
dec.resolveDependency();
});
this.nodeDeclarations.forEach(dec => {
dec.resolveDependency();
});
}
/**
* register custom component
* @param {string | NSIdentity} name [description]
* @param {IAttributeDeclaration }} attributes [description]
* @param {Object | (new (} obj [description]
* @return {[type]} [description]
*/
public registerComponent(obj: ComponentRegistering<Object | Ctor<Component>>, superComponent?: Name | Ctor<Component>): ComponentDeclaration;
public registerComponent(name: Name, obj: ComponentRegistering<Object | Ctor<Component>>, superComponent?: Name | Ctor<Component>): ComponentDeclaration;
public registerComponent(arg1: Name | ComponentRegistering<Object | Ctor<Component>>, arg2?: Name | Ctor<Component> | ComponentRegistering<Object | Ctor<Component>>, arg3?: Name | Ctor<Component>): ComponentDeclaration {
let name: Name;
let obj: ComponentRegistering<Object | Ctor<Component>>;
let superComponent: Name | Ctor<Component> | undefined;
if (typeof arg1 === "string" || arg1 instanceof NSIdentity) {
name = arg1;
obj = arg2 as ComponentRegistering<Object | Ctor<Component>>;
superComponent = arg3;
} else {
obj = arg1 as ComponentRegistering<Object | Ctor<Component>>;
superComponent = arg2 as Name | Ctor<Component>;
if (obj.componentName == null) {
throw new Error(`registering component has not 'componentName': ${obj}`);
}
name = obj.componentName;
}
name = this._ensureTobeNSIdentityOnRegister(name);
if (this.componentDeclarations.get(name)) {
throw new Error(`component ${name.fqn} is already registerd.`);
}
if (typeof obj === "function" && !(obj.prototype instanceof Component)) {
throw new Error(`component constructor ${name.fqn} must be inherits Component`);
}
if (this.debug && !Utility.isCamelCase(name.name)) {
console.warn(`component ${name.name} is registerd. but,it should be 'CamelCase'.`);
}
const attrs = obj.attributes;
if (!attrs) {
throw new Error("component must has 'attributes'");
}
for (let key in attrs) {
if (attrs[key].default === void 0) {
throw new Error(`default value of attribute ${key} in ${name.fqn} must be not 'undefined'.`);
}
}
const dec = new ComponentDeclaration(name, obj, superComponent);
this.componentDeclarations.set(name, dec);
return dec;
}
public registerNode(name: Name, requiredComponents: Name[] = [],
defaults?: { [key: string]: any } | NSDictionary<any>,
superNode?: Name, freezeAttributes?: Name[]): NodeDeclaration {
const registerId = this._ensureTobeNSIdentityOnRegister(name);
if (this.nodeDeclarations.get(registerId)) {
throw new Error(`gomlnode ${registerId.fqn} is already registerd.`);
}
if (this.debug && !Utility.isSnakeCase(registerId.name)) {
console.warn(`node ${registerId.name} is registerd. but,it should be 'snake-case'.`);
}
const declaration = new NodeDeclaration(registerId, requiredComponents || [], defaults || {}, superNode, freezeAttributes);
this.nodeDeclarations.set(registerId, declaration);
return declaration;
}
public getCompanion(scriptTag: Element): NSDictionary<any> {
const root = this.getRootNode(scriptTag);
if (root) {
return root.companion;
} else {
throw new Error("scriptTag is not goml");
}
}
/**
* Add specified nodes as root node managed by Grimoire.js
* This method is typically used for internal.
* @param tag the script element containing GOML source
* @param rootNode root node of Goml
*/
public addRootNode(tag: HTMLScriptElement, rootNode: GomlNode): string {
if (!rootNode) {
throw new Error("can not register null to rootNodes.");
}
tag.setAttribute("x-rootNodeId", rootNode.id);
this.rootNodes[rootNode.id] = rootNode;
rootNode.companion.set(this.ns(Constants.defaultNamespace)("scriptElement"), tag);
// awake and mount tree.
rootNode.setMounted(true);
rootNode.broadcastMessage("treeInitialized", <ITreeInitializedInfo>{
ownerScriptTag: tag,
id: rootNode.id
});
rootNode.sendInitializedMessage(<ITreeInitializedInfo>{
ownerScriptTag: tag,
id: rootNode.id
});
// send events to catch root node appended
this.emit("root-node-added", {
ownerScriptTag: tag,
rootNode: rootNode
});
return rootNode.id;
}
public getRootNode(scriptTag: Element): Nullable<GomlNode> {
const id = scriptTag.getAttribute("x-rootNodeId");
if (id) {
let ret = this.rootNodes[id];
if (!ret) {
throw new Error(`threr is no rootNode has id ${id}`);
}
return ret;
} else {
return null;
}
}
public noConflict(): void {
(window as any)["gr"] = this.noConflictPreserve;
}
public queryRootNodes(query: string): GomlNode[] {
const scriptTags = document.querySelectorAll(query);
const nodes: GomlNode[] = [];
for (let i = 0; i < scriptTags.length; i++) {
const node = this.getRootNode(scriptTags.item(i));
if (node) {
nodes.push(node);
}
}
return nodes;
}
public registerConverter(name: Name, converter: ((val: any, attr: Attribute) => any)): void;
public registerConverter(declaration: IAttributeConverterDeclaration): void;
public registerConverter(arg1: Name | IAttributeConverterDeclaration, converter?: ((val: any, attr: Attribute) => any)): void {
if (converter) {
this.registerConverter({ name: this._ensureTobeNSIdentityOnRegister(arg1 as any), verify: () => true, convert: converter });
return;
}
const dec = arg1 as IAttributeConverterDeclaration;
this.converters.set(this._ensureTobeNSIdentityOnRegister(dec.name), dec);
}
public overrideDeclaration(targetDeclaration: Name, additionalComponents: Name[]): NodeDeclaration;
public overrideDeclaration(targetDeclaration: Name, defaults: { [attrName: string]: any }): NodeDeclaration;
public overrideDeclaration(targetDeclaration: Name, additionalComponents: Name[], defaults: { [attrName: string]: any }): NodeDeclaration;
public overrideDeclaration(targetDeclaration: Name, arg2: Name[] | { [attrName: string]: any }, defaults?: { [attrName: string]: any }): NodeDeclaration {
const dec = this.nodeDeclarations.get(targetDeclaration);
if (!dec) {
throw new Error(`attempt not-exist node declaration : ${Ensure.tobeNSIdentity(targetDeclaration).name}`);
}
if (!dec.resolvedDependency) {
dec.resolveDependency();
}
if (defaults) {
const additionalC = arg2 as Name[];
for (let i = 0; i < additionalC.length; i++) {
dec.addDefaultComponent(additionalC[i]);
}
dec.defaultAttributes.pushDictionary(Ensure.tobeNSDictionary(defaults));
} else if (Array.isArray(arg2)) { // only additiona components.
for (let i = 0; i < arg2.length; i++) {
dec.addDefaultComponent(arg2[i]);
}
} else {
dec.defaultAttributes.pushDictionary(Ensure.tobeNSDictionary(arg2));
}
return dec;
}
/**
* This method is not for users.
* Just for unit testing.
*
* Clear all configuration that GrimoireInterface contain.
*/
public clear(): void {
this.nodeDeclarations.clear();
this.componentDeclarations.clear();
this.converters.clear();
for (let key in this.rootNodes) {
delete this.rootNodes[key];
}
for (let key in this.nodeDictionary) {
delete this.nodeDictionary[key];
}
for (let key in this.componentDictionary) {
delete this.componentDictionary[key];
}
this.loadTasks.splice(0, this.loadTasks.length);
this._registeringPluginNamespace = Constants.defaultNamespace;
this.initialize();
}
public extendGrimoireInterface(name: string, func: Function): void {
if ((<any>this)[name]) {
throw new Error(`gr.${name} can not extend.it is already exist.`);
}
(<any>this)[name] = func.bind(this);
}
public extendGomlInterface(name: string, func: Function): void {
if ((GomlInterfaceImpl as any)[name]) {
throw new Error(`gr.${name} can not extend.it is already exist.`);
}
(GomlInterfaceImpl as any)[name] = func.bind(this);
}
public extendNodeInterface(name: string, func: Function): void {
if ((NodeInterface as any)[name]) {
throw new Error(`gr.${name} can not extend.it is already exist.`);
}
(NodeInterface as any)[name] = func.bind(this);
}
/**
* use for notify GrimoireInterface of plugin namespace to be ragister.
* notified namespace will use when resolve loadTask of the plugin.
* @param {string} namespace namespace of plugin to be ragister.
*/
public notifyRegisteringPlugin(namespace: string): void {
let res = /^[Gg]rimoire(?:js|JS)?-(.*)$/.exec(namespace);
if (res) {
namespace = res[1];
}
this._registeringPluginNamespace = namespace;
}
private _ensureNameTobeConstructor(component: Name | Ctor<Component>): Nullable<Ctor<Component>> {
if (!component) {
return null;
}
if (typeof component === "function") {
return component;
} else if (typeof component === "string") {
return this._ensureNameTobeConstructor(Ensure.tobeNSIdentity(component));
} else {
// here NSIdentity.
let c = this.componentDeclarations.get(component);
if (!c) {
return null;
}
return c.ctor;
}
}
private _ensureTobeNSIdentityOnRegister(name: Name): NSIdentity;
private _ensureTobeNSIdentityOnRegister(name: null | undefined): null;
private _ensureTobeNSIdentityOnRegister(name: Name | null | undefined): Nullable<NSIdentity> {
if (!name) {
return null;
}
if (typeof name === "string") {
if (name.indexOf("|") !== -1) {
return NSIdentity.fromFQN(name);
}
return NSIdentity.fromFQN(Namespace.define(this._registrationContext), name);
} else {
return name;
}
}
} | the_stack |
import 'webmidi';
declare namespace JZZ {
namespace SMPTE {
interface Constructor {
/** Create new SMPTE object
*
* https://jazz-soft.net/doc/JZZ/smpte.html#constructor */
new (...args: any[]): SMPTE;
/** Create new SMPTE object
*
* https://jazz-soft.net/doc/JZZ/smpte.html#constructor */
(...args: any[]): SMPTE;
}
}
interface SMPTE {
/** Convert SMPTE to human-readable string
*
* https://jazz-soft.net/doc/JZZ/smpte.html#tostring */
toString(): string;
/** SMPTE event is Full Frame
*
* https://jazz-soft.net/doc/JZZ/smpte.html#isFullFrame */
isFullFrame(): boolean;
/** Get SMPTE type
*
* https://jazz-soft.net/doc/JZZ/smpte.html#getType */
getType(): number;
/** Get SMPTE hour
*
* https://jazz-soft.net/doc/JZZ/smpte.html#getHour */
getHour(): number;
/** Get SMPTE minute
*
* https://jazz-soft.net/doc/JZZ/smpte.html#getMinute */
getMinute(): number;
/** Get SMPTE second
*
* https://jazz-soft.net/doc/JZZ/smpte.html#getSecond */
getSecond(): number;
/** Get SMPTE frame
*
* https://jazz-soft.net/doc/JZZ/smpte.html#getFrame */
getFrame(): number;
/** Get SMPTE quarter frame
*
* https://jazz-soft.net/doc/JZZ/smpte.html#getQuarter */
getQuarter(): number;
/** Set SMPTE type
*
* https://jazz-soft.net/doc/JZZ/smpte.html#setType */
setType(n: number): SMPTE;
/** Set SMPTE hour
*
* https://jazz-soft.net/doc/JZZ/smpte.html#setHour */
setHour(n: number): SMPTE;
/** Set SMPTE minute
*
* https://jazz-soft.net/doc/JZZ/smpte.html#setMinute */
setMinute(n: number): SMPTE;
/** Set SMPTE second
*
* https://jazz-soft.net/doc/JZZ/smpte.html#setSecond */
setSecond(n: number): SMPTE;
/** Set SMPTE frame
*
* https://jazz-soft.net/doc/JZZ/smpte.html#setFrame */
setFrame(n: number): SMPTE;
/** Set SMPTE quarter frame
*
* https://jazz-soft.net/doc/JZZ/smpte.html#setQuarter */
setQuarter(n: number): SMPTE;
/** Increase SMPTE time by one frame
*
* https://jazz-soft.net/doc/JZZ/smpte.html#incrFrame */
incrFrame(): SMPTE;
/** Decrease SMPTE time by one frame
*
* https://jazz-soft.net/doc/JZZ/smpte.html#decrFrame */
decrFrame(): SMPTE;
/** Increase SMPTE time by quarter frame
*
* https://jazz-soft.net/doc/JZZ/smpte.html#incrQF */
incrQF(): SMPTE;
/** Decrease SMPTE time by quarter frame
*
* https://jazz-soft.net/doc/JZZ/smpte.html#decrQF */
decrQF(): SMPTE;
/** Read MIDI Time Code message
*
* https://jazz-soft.net/doc/JZZ/smpte.html#read */
read(...args: any[]): boolean;
/** Reset SMPTE object
*
* https://jazz-soft.net/doc/JZZ/smpte.html#reset */
reset(...args: any[]): SMPTE;
}
namespace MIDI {
interface Constructor {
/** Create new MIDI message
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#constructor */
new (...args: any[]): MIDI;
/** Create new MIDI message
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#constructor */
(...args: any[]): MIDI;
// Channel-dependent
/** Note On: `[9x nn vv]`; `x`: channel, `nn`: note, `vv`: velocity
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#noteOn */
noteOn(x: number, nn: number | string, vv?: number): MIDI;
/** Note Off: `[8x nn vv]`; `x`: channel, `nn`: note, `vv`: velocity
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#noteOff */
noteOff(x: number, nn: number | string, vv?: number): MIDI;
/** Polyphonic aftetouch: `[Ax nn vv]`; `x`: channel, `nn`: note, `vv`: value
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#aftertouch */
aftertouch(x: number, nn: number | string, vv: number): MIDI;
/** MIDI control: `[Bx nn vv]`; `x`: channel, `nn`: function, `vv`: value
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#control */
control(x: number, nn: number, vv: number): MIDI;
/** Program change: `[Cx nn]`; `x`: channel, `nn`: program
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#program */
program(x: number, nn: number | string): MIDI;
/** Pressure: `[Dx nn]`; `x`: channel, `nn`: pressure
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#pressure */
pressure(x: number, nn: number): MIDI;
/** Pitch bend: `[Ex lsb msb]`; `x`: channel, `msb`/`lsb`: most/least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#pitchBend */
pitchBend(x: number, nn: number): MIDI;
/** Bank select MSB: `[Bx 00 nn]`; `x`: channel, `nn`: most significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#bankMSB */
bankMSB(x: number, nn: number): MIDI;
/** Bank select LSB: `[Bx 20 nn]`; `x`: channel, `nn`: least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#bankLSB */
bankLSB(x: number, nn: number): MIDI;
/** Modulation MSB: `[Bx 01 nn]`; `x`: channel, `nn`: most significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#modMSB */
modMSB(x: number, nn: number): MIDI;
/** Modulation LSB: `[Bx 21 nn]`; `x`: channel, `nn`: least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#modLSB */
modLSB(x: number, nn: number): MIDI;
/** Breath controller MSB: `[Bx 02 nn]`; `x`: channel, `nn`: most significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#breathMSB */
breathMSB(x: number, nn: number): MIDI;
/** Breath controller LSB: `[Bx 22 nn]`; `x`: channel, `nn`: least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#breathLSB */
breathLSB(x: number, nn: number): MIDI;
/** Foot controller MSB: `[Bx 04 nn]`; `x`: channel, `nn`: most significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#footMSB */
footMSB(x: number, nn: number): MIDI;
/** Foot controller LSB: `[Bx 24 nn]`; `x`: channel, `nn`: least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#footLSB */
footLSB(x: number, nn: number): MIDI;
/** Portamento MSB: `[Bx 05 nn]`; `x`: channel, `nn`: most significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#portamentoMSB */
portamentoMSB(x: number, nn: number): MIDI;
/** Portamento LSB: `[Bx 25 nn]`; `x`: channel, `nn`: least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#portamentoLSB */
portamentoLSB(x: number, nn: number): MIDI;
/** Volume MSB: `[Bx 07 nn]`; `x`: channel, `nn`: most significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#volumeMSB */
volumeMSB(x: number, nn: number): MIDI;
/** Volume LSB: `[Bx 27 nn]`; `x`: channel, `nn`: least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#volumeLSB */
volumeLSB(x: number, nn: number): MIDI;
/** Balance MSB: `[Bx 08 nn]`; `x`: channel, `nn`: most significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#balanceMSB */
balanceMSB(x: number, nn: number): MIDI;
/** Balance LSB: `[Bx 28 nn]`; `x`: channel, `nn`: least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#balanceLSB */
balanceLSB(x: number, nn: number): MIDI;
/** Pan MSB: `[Bx 0A nn]`; `x`: channel, `nn`: most significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#panMSB */
panMSB(x: number, nn: number): MIDI;
/** Pan LSB: `[Bx 2A nn]`; `x`: channel, `nn`: least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#panLSB */
panLSB(x: number, nn: number): MIDI;
/** Expression MSB: `[Bx 0B nn]`; `x`: channel, `nn`: most significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#expressionMSB */
expressionMSB(x: number, nn: number): MIDI;
/** Expression LSB: `[Bx 2B nn]`; `x`: channel, `nn`: least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#expressionLSB */
expressionLSB(x: number, nn: number): MIDI;
/** Damper on/off: `[Bx 40 nn]`; `x`: channel, `nn`: `bb ? 7f : 00`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#damper */
damper(x: number, bb: boolean): MIDI;
/** Portamento on/off: `[Bx 41 nn]`; `x`: channel, `nn`: `bb ? 7f : 00`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#portamento */
portamento(x: number, bb: boolean): MIDI;
/** Sostenuto on/off: `[Bx 42 nn]`; `x`: channel, `nn`: `bb ? 7f : 00`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#sostenuto */
sostenuto(x: number, bb: boolean): MIDI;
/** Soft on/off: `[Bx 43 nn]`; `x`: channel, `nn`: `bb ? 7f : 00`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#soft */
soft(x: number, bb: boolean): MIDI;
/** All sound off: `[Bx 78 00]`; `x`: channel
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#allSoundOff */
allSoundOff(x: number): MIDI;
/** All notes off: `[Bx 7B 00]`; `x`: channel
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#allNotesOff */
allNotesOff(x: number): MIDI;
// Channel-independent
/** Song position: `[F2 lsb msb]`; `msb`/`lsb`: most/least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#songPosition */
songPosition(nn: number): MIDI;
/** Song select: `[F3 nn]`; `nn`: song number
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#songSelect */
songSelect(nn: number): MIDI;
/** Tune: `[F6]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#tune */
tune(): MIDI;
/** Clock: `[F8]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#clock */
clock(): MIDI;
/** Start: `[FA]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#start */
start(): MIDI;
/** Continue: `[FB]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#continue */
continue(): MIDI;
/** Stop: `[FC]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#stop */
stop(): MIDI;
/** Active sense signal: `[FE]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#active */
active(): MIDI;
/** Reset: `[FF]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#reset */
reset(): MIDI;
/** ID Request SysEx: `[F0 7E 7F 06 01 F7]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#sxIdRequest */
sxIdRequest(): MIDI;
/** MIDI time code (SMPTE quarter frame): `[F1 xx]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#mtc */
mtc(t: SMPTE): MIDI;
/** SMPTE Full Frame SysEx: `[F0 7F 7F 01 01 xx xx xx xx F7]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#sxFullFrame */
sxFullFrame(t: SMPTE): MIDI;
// SMF
/** Standard MIDI File meta event: [FFxx len data]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smf */
smf(...args: any): MIDI;
/** SMF Sequence Number: [FF00 02 ssss]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfSeqNumber */
smfSeqNumber(ssss: number): MIDI;
/** SMF Text: [FF01 len text]; used in Karaoke files
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfText */
smfText(str: string): MIDI;
/** SMF Copyright: [FF02 len text]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfCopyright */
smfCopyright(str: string): MIDI;
/** SMF Sequence Name: [FF03 len text]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfSeqName */
smfSeqName(str: string): MIDI;
/** SMF Instrument Name: [FF04 len text]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfInstrName */
smfInstrName(str: string): MIDI;
/** SMF Lyric: [FF05 len text]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfLyric */
smfLyric(str: string): MIDI;
/** SMF Marker: [FF06 len text]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfMarker */
smfMarker(str: string): MIDI;
/** SMF Cue Point: [FF07 len text]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfCuePoint */
smfCuePoint(str: string): MIDI;
/** SMF Program Name: [FF08 len text]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfProgName */
smfProgName(str: string): MIDI;
/** SMF Device Name: [FF09 len text]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfDevName */
smfDevName(str: string): MIDI;
/** SMF Channel Prefix: [FF20 01 cc]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfChannelPrefix */
smfChannelPrefix(cc: number): MIDI;
/** SMF MIDI Port [FF21 01 pp]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfMidiPort */
smfMidiPort(pp: number): MIDI;
/** SMF End of Track: [FF2F 00]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfEndOfTrack */
smfEndOfTrack(): MIDI;
/** SMF Tempo: [FF51 03 tttttt]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfTempo */
smfTempo(tttttt: number): MIDI;
/** SMF Tempo, BMP: [FF51 03 tttttt]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfBPM */
smfBPM(bpm: number): MIDI;
/** SMF SMPTE offset: [FF54 05 hh mm ss fr ff]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfSMPTE */
smfSMPTE(smpte: SMPTE | number[]): MIDI;
/** SMF Time Signature: [FF58 04 nn dd cc bb]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfTimeSignature */
smfTimeSignature(nn: number, dd: number, cc?: number, bb?: number): MIDI;
/** SMF Key Signature: [FF59 02 sf mi]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfKeySignature */
smfKeySignature(key: string): MIDI;
/** SMF Sequencer-specific Data: [FF7F len data]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfSequencer */
smfSequencer(data: string): MIDI;
// Other
/** Note MIDI value by name
*
* https://jazz-soft.net/doc/JZZ/midigm.html#noteValue */
noteValue(note: number | string): number;
/** Program MIDI value by name
*
* https://jazz-soft.net/doc/JZZ/midigm.html#programValue */
programValue(prog: number | string): number;
/** Note frequency in HZ; `a5`: frequency of the `A5`, default: `440`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#freq */
freq(note: number | string, a5?: number): number;
}
}
interface MIDI extends Array<number> {
/** Convert MIDI to human-readable string
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#tostring */
toString(): string;
/** The message is Note On
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#isNoteOn */
isNoteOn(): boolean;
/** The message is Note Off
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#isNoteOff */
isNoteOff(): boolean;
/** The message is a SysEx
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#isSysEx */
isSysEx(): boolean;
/** The message is a full SysEx
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#isFullSysEx */
isFullSysEx(): boolean;
/** The message is a Standard MIDI File meta event
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#isSMF */
isSMF(): boolean;
/** The message is a Tempo meta event
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#isTempo */
isTempo(): boolean;
/** The message is a Time Signature meta event
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#isTimeSignature */
isTimeSignature(): boolean;
/** The message is a Key Signature meta event
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#isKeySignature */
isKeySignature(): boolean;
/** The message is an End of Track meta event
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#isEOT */
isEOT(): boolean;
/** Return the channel number where applicable
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#getChannel */
getChannel(): number;
/** Set the channel number where applicable
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#setChannel */
setChannel(cc: number): MIDI;
/** Return the note value where applicable
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#getNote */
getNote(): number;
/** Set the note where applicable
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#setNote */
setNote(note: number | string): MIDI;
/** Return the velocity where applicable
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#getVelocity */
getVelocity(): number;
/** Set the velocity where applicable
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#setVelocity */
setVelocity(vv: number): MIDI;
/** Return the SysEx channel number where applicable
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#getSysExChannel */
getSysExChannel(): number;
/** Set the SysEx channel number where applicable
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#setSysExChannel */
setSysExChannel(cc: number): MIDI;
/** Get data from SMF meta event
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#getData */
getData(): string;
/** Set data on SMF meta event
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#setData */
setData(data: string): MIDI;
/** Get UTF8 text from SMF meta event
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#getText */
getText(): string;
/** Set UTF8 text on SMF meta event
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#setText */
setText(str: string): MIDI;
/** Get tempo in ms per quarter note from SMF Tempo meta event
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#getTempo */
getTempo(): number;
/** Get tempo as BPM from SMF Tempo meta event
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#getBPM */
getBPM(): number;
/** Get time signature from SMF Time Signature meta event
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#getTimeSignature */
getTimeSignature(): number[];
/** Get key signature from SMF Key Signature meta event
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#getKeySignature */
getKeySignature(): any[];
}
namespace Stub {
interface Async extends Stub, PromiseLike<Stub> {}
}
interface Stub {
/** Print if OK
*
* https://jazz-soft.net/doc/JZZ/common.html#and */
and(text: string): Stub.Async;
/** Execute if OK
*
* https://jazz-soft.net/doc/JZZ/common.html#and */
and(func: (self?: Stub) => void): Stub.Async;
/** Print if not OK
*
* https://jazz-soft.net/doc/JZZ/common.html#or */
or(text: string): Stub.Async;
/** Execute if not OK
*
* https://jazz-soft.net/doc/JZZ/common.html#or */
or(func: (self?: Stub) => void): Stub.Async;
/** Wait `ms` milliseconds
*
* https://jazz-soft.net/doc/JZZ/common.html#wait */
wait(ms: number): Stub.Async;
}
namespace Engine {
interface Async extends Engine, PromiseLike<Engine> {}
}
interface Engine {
// Stub
/** Print if OK
*
* https://jazz-soft.net/doc/JZZ/common.html#and */
and(text: string): Engine.Async;
/** Execute if OK
*
* https://jazz-soft.net/doc/JZZ/common.html#and */
and(func: (self?: Stub) => void): Engine.Async;
/** Print if not OK
*
* https://jazz-soft.net/doc/JZZ/common.html#or */
or(text: string): Engine.Async;
/** Execute if not OK
*
* https://jazz-soft.net/doc/JZZ/common.html#or */
or(func: (self?: Stub) => void): Engine.Async;
/** Wait `ms` milliseconds
*
* https://jazz-soft.net/doc/JZZ/common.html#wait */
wait(ms: number): Engine.Async;
// Engine
/** Return an `info` object
*
* https://jazz-soft.net/doc/JZZ/jzz.html#info */
info(): any;
/** Refresh the list of available ports
*
* https://jazz-soft.net/doc/JZZ/jzz.html#refresh */
refresh(): Engine.Async;
/** Open MIDI-In port
*
* https://jazz-soft.net/doc/JZZ/midiin.html#open */
openMidiIn(arg?: any): Port.Async;
/** Open MIDI-Out port
*
* https://jazz-soft.net/doc/JZZ/midiout.html#open */
openMidiOut(arg?: any): Port.Async;
/** Watch MIDI connection changes
*
* https://jazz-soft.net/doc/JZZ/jzz.html#onChange */
onChange(arg?: any): Watcher.Async;
/** Close MIDI engine
*
* https://jazz-soft.net/doc/JZZ/jzz.html#close */
close(): Stub.Async;
}
namespace Port {
interface Async extends Port, PromiseLike<Port> {}
}
interface Port {
// Stub
/** Print if OK
*
* https://jazz-soft.net/doc/JZZ/common.html#and */
and(text: string): Port.Async;
/** Execute if OK
*
* https://jazz-soft.net/doc/JZZ/common.html#and */
and(func: (self?: Stub) => void): Port.Async;
/** Print if not OK
*
* https://jazz-soft.net/doc/JZZ/common.html#or */
or(text: string): Port.Async;
/** Execute if not OK
*
* https://jazz-soft.net/doc/JZZ/common.html#or */
or(func: (self?: Stub) => void): Port.Async;
/** Wait `ms` milliseconds
*
* https://jazz-soft.net/doc/JZZ/common.html#wait */
wait(ms: number): Port.Async;
// Port
/** Return an `info` object
*
* https://jazz-soft.net/doc/JZZ/midiin.html#info */
info(): any;
/** Return the port name
*
* https://jazz-soft.net/doc/JZZ/midiin.html#name */
name(): string;
/** Connect MIDI port
*
* https://jazz-soft.net/doc/JZZ/midiin.html#connect */
connect(arg: any): Port.Async;
/** Disonnect MIDI port
*
* https://jazz-soft.net/doc/JZZ/midiin.html#disconnect */
disconnect(arg?: any): Port.Async;
/** Send MIDI message
*
* https://jazz-soft.net/doc/JZZ/midiout.html#send */
send(...args: any[]): Port.Async;
/** Emit MIDI message
*
* https://jazz-soft.net/doc/JZZ/midithru.html#emit */
emit(...args: any[]): Port.Async;
/** Emit MIDI message
*
* https://jazz-soft.net/doc/JZZ/midithru.html#emit */
_emit(...args: any[]): void;
/** Close MIDI port
*
* https://jazz-soft.net/doc/JZZ/midiin.html#close */
close(): Stub.Async;
/** MIDI channel
*
* https://jazz-soft.net/doc/JZZ/midiout.html#ch */
ch(x: number): Channel.Async;
/** MIDI channel
*
* https://jazz-soft.net/doc/JZZ/midiout.html#ch */
ch(): Port.Async;
/** MPE channel
*
* https://jazz-soft.net/doc/JZZ/midiout.html#mpe */
mpe(m: number, n: number): MPE.Async;
/** MPE channel
*
* https://jazz-soft.net/doc/JZZ/midiout.html#mpe */
mpe(): Port.Async;
/** Play note
*
* https://jazz-soft.net/doc/JZZ/midiout.html#note */
note(x: number, nn: number | string, vv?: number, tt?: number): Port.Async;
// Channel-dependent
/** Note On: `[9x nn vv]`; `x`: channel, `nn`: note, `vv`: velocity
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#noteOn */
noteOn(x: number, nn: number | string, vv?: number): Port.Async;
/** Note Off: `[8x nn vv]`; `x`: channel, `nn`: note, `vv`: velocity
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#noteOff */
noteOff(x: number, nn: number | string, vv?: number): Port.Async;
/** Polyphonic aftetouch: `[Ax nn vv]`; `x`: channel, `nn`: note, `vv`: value
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#aftertouch */
aftertouch(x: number, nn: number | string, vv: number): Port.Async;
/** MIDI control: `[Bx nn vv]`; `x`: channel, `nn`: function, `vv`: value
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#control */
control(x: number, nn: number, vv: number): Port.Async;
/** Program change: `[Cx nn]`; `x`: channel, `nn`: program
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#program */
program(x: number, nn: number | string): Port.Async;
/** Pressure: `[Dx nn]`; `x`: channel, `nn`: pressure
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#pressure */
pressure(x: number, nn: number): Port.Async;
/** Pitch bend: `[Ex lsb msb]`; `x`: channel, `msb`/`lsb`: most/least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#pitchBend */
pitchBend(x: number, nn: number): Port.Async;
/** Bank select MSB: `[Bx 00 nn]`; `x`: channel, `nn`: most significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#bankMSB */
bankMSB(x: number, nn: number): Port.Async;
/** Bank select LSB: `[Bx 20 nn]`; `x`: channel, `nn`: least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#bankLSB */
bankLSB(x: number, nn: number): Port.Async;
/** Modulation MSB: `[Bx 01 nn]`; `x`: channel, `nn`: most significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#modMSB */
modMSB(x: number, nn: number): Port.Async;
/** Modulation LSB: `[Bx 21 nn]`; `x`: channel, `nn`: least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#modLSB */
modLSB(x: number, nn: number): Port.Async;
/** Breath controller MSB: `[Bx 02 nn]`; `x`: channel, `nn`: most significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#breathMSB */
breathMSB(x: number, nn: number): Port.Async;
/** Breath controller LSB: `[Bx 22 nn]`; `x`: channel, `nn`: least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#breathLSB */
breathLSB(x: number, nn: number): Port.Async;
/** Foot controller MSB: `[Bx 04 nn]`; `x`: channel, `nn`: most significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#footMSB */
footMSB(x: number, nn: number): Port.Async;
/** Foot controller LSB: `[Bx 24 nn]`; `x`: channel, `nn`: least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#footLSB */
footLSB(x: number, nn: number): Port.Async;
/** Portamento MSB: `[Bx 05 nn]`; `x`: channel, `nn`: most significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#portamentoMSB */
portamentoMSB(x: number, nn: number): Port.Async;
/** Portamento LSB: `[Bx 25 nn]`; `x`: channel, `nn`: least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#portamentoLSB */
portamentoLSB(x: number, nn: number): Port.Async;
/** Volume MSB: `[Bx 07 nn]`; `x`: channel, `nn`: most significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#volumeMSB */
volumeMSB(x: number, nn: number): Port.Async;
/** Volume LSB: `[Bx 27 nn]`; `x`: channel, `nn`: least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#volumeLSB */
volumeLSB(x: number, nn: number): Port.Async;
/** Balance MSB: `[Bx 08 nn]`; `x`: channel, `nn`: most significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#balanceMSB */
balanceMSB(x: number, nn: number): Port.Async;
/** Balance LSB: `[Bx 28 nn]`; `x`: channel, `nn`: least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#balanceLSB */
balanceLSB(x: number, nn: number): Port.Async;
/** Pan MSB: `[Bx 0A nn]`; `x`: channel, `nn`: most significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#panMSB */
panMSB(x: number, nn: number): Port.Async;
/** Pan LSB: `[Bx 2A nn]`; `x`: channel, `nn`: least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#panLSB */
panLSB(x: number, nn: number): Port.Async;
/** Expression MSB: `[Bx 0B nn]`; `x`: channel, `nn`: most significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#expressionMSB */
expressionMSB(x: number, nn: number): Port.Async;
/** Expression LSB: `[Bx 2B nn]`; `x`: channel, `nn`: least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#expressionLSB */
expressionLSB(x: number, nn: number): Port.Async;
/** Damper on/off: `[Bx 40 nn]`; `x`: channel, `nn`: `bb ? 7f : 00`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#damper */
damper(x: number, bb: boolean): Port.Async;
/** Portamento on/off: `[Bx 41 nn]`; `x`: channel, `nn`: `bb ? 7f : 00`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#portamento */
portamento(x: number, bb: boolean): Port.Async;
/** Sostenuto on/off: `[Bx 42 nn]`; `x`: channel, `nn`: `bb ? 7f : 00`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#sostenuto */
sostenuto(x: number, bb: boolean): Port.Async;
/** Soft on/off: `[Bx 43 nn]`; `x`: channel, `nn`: `bb ? 7f : 00`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#soft */
soft(x: number, bb: boolean): Port.Async;
/** All sound off: `[Bx 78 00]`; `x`: channel
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#allSoundOff */
allSoundOff(x: number): Port.Async;
/** All notes off: `[Bx 7B 00]`; `x`: channel
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#allNotesOff */
allNotesOff(x: number): Port.Async;
// Channel-independent
/** Song position: `[F2 lsb msb]`; `msb`/`lsb`: most/least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#songPosition */
songPosition(nn: number): Port.Async;
/** Song select: `[F3 nn]`; `nn`: song number
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#songSelect */
songSelect(nn: number): Port.Async;
/** Tune: `[F6]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#tune */
tune(): Port.Async;
/** Clock: `[F8]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#clock */
clock(): Port.Async;
/** Start: `[FA]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#start */
start(): Port.Async;
/** Continue: `[FB]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#continue */
continue(): Port.Async;
/** Stop: `[FC]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#stop */
stop(): Port.Async;
/** Active sense signal: `[FE]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#active */
active(): Port.Async;
/** Reset: `[FF]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#reset */
reset(): Port.Async;
/** ID Request SysEx: `[F0 7E 7F 06 01 F7]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#sxIdRequest */
sxIdRequest(): Port.Async;
/** MIDI time code (SMPTE quarter frame): `[F1 xx]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#mtc */
mtc(t: SMPTE): Port.Async;
/** SMPTE Full Frame SysEx: `[F0 7F 7F 01 01 xx xx xx xx F7]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#sxFullFrame */
sxFullFrame(t: SMPTE): Port.Async;
// SMF
/** Standard MIDI File meta event: [FFxx len data]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smf */
smf(...args: any): Port.Async;
/** SMF Sequence Number: [FF00 02 ssss]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfSeqNumber */
smfSeqNumber(ssss: number): Port.Async;
/** SMF Text: [FF01 len text]; used in Karaoke files
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfText */
smfText(str: string): Port.Async;
/** SMF Copyright: [FF02 len text]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfCopyright */
smfCopyright(str: string): Port.Async;
/** SMF Sequence Name: [FF03 len text]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfSeqName */
smfSeqName(str: string): Port.Async;
/** SMF Instrument Name: [FF04 len text]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfInstrName */
smfInstrName(str: string): Port.Async;
/** SMF Lyric: [FF05 len text]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfLyric */
smfLyric(str: string): Port.Async;
/** SMF Marker: [FF06 len text]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfMarker */
smfMarker(str: string): Port.Async;
/** SMF Cue Point: [FF07 len text]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfCuePoint */
smfCuePoint(str: string): Port.Async;
/** SMF Program Name: [FF08 len text]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfProgName */
smfProgName(str: string): Port.Async;
/** SMF Device Name: [FF09 len text]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfDevName */
smfDevName(str: string): Port.Async;
/** SMF Channel Prefix: [FF20 01 cc]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfChannelPrefix */
smfChannelPrefix(cc: number): Port.Async;
/** SMF MIDI Port [FF21 01 pp]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfMidiPort */
smfMidiPort(pp: number): Port.Async;
/** SMF End of Track: [FF2F 00]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfEndOfTrack */
smfEndOfTrack(): Port.Async;
/** SMF Tempo: [FF51 03 tttttt]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfTempo */
smfTempo(tttttt: number): Port.Async;
/** SMF Tempo, BMP: [FF51 03 tttttt]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfBPM */
smfBPM(bpm: number): Port.Async;
/** SMF SMPTE offset: [FF54 05 hh mm ss fr ff]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfSMPTE */
smfSMPTE(smpte: SMPTE | number[]): Port.Async;
/** SMF Time Signature: [FF58 04 nn dd cc bb]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfTimeSignature */
smfTimeSignature(nn: number, dd: number, cc?: number, bb?: number): Port.Async;
/** SMF Key Signature: [FF59 02 sf mi]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfKeySignature */
smfKeySignature(key: string): Port.Async;
/** SMF Sequencer-specific Data: [FF7F len data]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfSequencer */
smfSequencer(data: string): Port.Async;
}
namespace Channel {
interface Async extends Channel, PromiseLike<Channel> {}
}
interface Channel {
// Stub
/** Print if OK
*
* https://jazz-soft.net/doc/JZZ/common.html#and */
and(text: string): Channel.Async;
/** Execute if OK
*
* https://jazz-soft.net/doc/JZZ/common.html#and */
and(func: (self?: Stub) => void): Channel.Async;
/** Print if not OK
*
* https://jazz-soft.net/doc/JZZ/common.html#or */
or(text: string): Channel.Async;
/** Execute if not OK
*
* https://jazz-soft.net/doc/JZZ/common.html#or */
or(func: (self?: Stub) => void): Channel.Async;
/** Wait `ms` milliseconds
*
* https://jazz-soft.net/doc/JZZ/common.html#wait */
wait(ms: number): Channel.Async;
// Port
/** Return an `info` object
*
* https://jazz-soft.net/doc/JZZ/midiin.html#info */
info(): any;
/** Return the port name
*
* https://jazz-soft.net/doc/JZZ/midiin.html#name */
name(): string;
/** Connect MIDI port
*
* https://jazz-soft.net/doc/JZZ/midiin.html#connect */
connect(arg: any): Channel.Async;
/** Disonnect MIDI port
*
* https://jazz-soft.net/doc/JZZ/midiin.html#disconnect */
disconnect(arg?: any): Channel.Async;
/** Send MIDI message
*
* https://jazz-soft.net/doc/JZZ/midiout.html#send */
send(...args: any[]): Channel.Async;
/** Emit MIDI message
*
* https://jazz-soft.net/doc/JZZ/midithru.html#emit */
emit(...args: any[]): Channel.Async;
/** Emit MIDI message
*
* https://jazz-soft.net/doc/JZZ/midithru.html#emit */
_emit(...args: any[]): void;
/** Close MIDI port
*
* https://jazz-soft.net/doc/JZZ/midiin.html#close */
close(): Stub.Async;
/** MIDI channel
*
* https://jazz-soft.net/doc/JZZ/midiout.html#ch */
ch(x: number): Channel.Async;
/** MIDI channel
*
* https://jazz-soft.net/doc/JZZ/midiout.html#ch */
ch(): Port.Async;
/** MPE channel
*
* https://jazz-soft.net/doc/JZZ/midiout.html#mpe */
mpe(m: number, n: number): MPE.Async;
/** MPE channel
*
* https://jazz-soft.net/doc/JZZ/midiout.html#mpe */
mpe(): Port.Async;
/** Play note
*
* https://jazz-soft.net/doc/JZZ/midiout.html#note */
note(nn: number | string, vv?: number, tt?: number): Channel.Async;
// Channel-dependent
/** Note On: `[9x nn vv]`; `x`: channel, `nn`: note, `vv`: velocity
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#noteOn */
noteOn(nn: number | string, vv?: number): Channel.Async;
/** Note Off: `[8x nn vv]`; `x`: channel, `nn`: note, `vv`: velocity
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#noteOff */
noteOff(nn: number | string, vv?: number): Channel.Async;
/** Polyphonic aftetouch: `[Ax nn vv]`; `x`: channel, `nn`: note, `vv`: value
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#aftertouch */
aftertouch(nn: number | string, vv: number): Channel.Async;
/** MIDI control: `[Bx nn vv]`; `x`: channel, `nn`: function, `vv`: value
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#control */
control(nn: number, vv: number): Channel.Async;
/** Program change: `[Cx nn]`; `x`: channel, `nn`: program
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#program */
program(nn: number | string): Channel.Async;
/** Pressure: `[Dx nn]`; `x`: channel, `nn`: pressure
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#pressure */
pressure(nn: number): Channel.Async;
/** Pitch bend: `[Ex lsb msb]`; `x`: channel, `msb`/`lsb`: most/least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#pitchBend */
pitchBend(nn: number): Channel.Async;
/** Bank select MSB: `[Bx 00 nn]`; `x`: channel, `nn`: most significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#bankMSB */
bankMSB(nn: number): Channel.Async;
/** Bank select LSB: `[Bx 20 nn]`; `x`: channel, `nn`: least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#bankLSB */
bankLSB(nn: number): Channel.Async;
/** Modulation MSB: `[Bx 01 nn]`; `x`: channel, `nn`: most significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#modMSB */
modMSB(nn: number): Channel.Async;
/** Modulation LSB: `[Bx 21 nn]`; `x`: channel, `nn`: least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#modLSB */
modLSB(nn: number): Channel.Async;
/** Breath controller MSB: `[Bx 02 nn]`; `x`: channel, `nn`: most significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#breathMSB */
breathMSB(nn: number): Channel.Async;
/** Breath controller LSB: `[Bx 22 nn]`; `x`: channel, `nn`: least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#breathLSB */
breathLSB(nn: number): Channel.Async;
/** Foot controller MSB: `[Bx 04 nn]`; `x`: channel, `nn`: most significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#footMSB */
footMSB(nn: number): Channel.Async;
/** Foot controller LSB: `[Bx 24 nn]`; `x`: channel, `nn`: least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#footLSB */
footLSB(nn: number): Channel.Async;
/** Portamento MSB: `[Bx 05 nn]`; `x`: channel, `nn`: most significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#portamentoMSB */
portamentoMSB(nn: number): Channel.Async;
/** Portamento LSB: `[Bx 25 nn]`; `x`: channel, `nn`: least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#portamentoLSB */
portamentoLSB(nn: number): Channel.Async;
/** Volume MSB: `[Bx 07 nn]`; `x`: channel, `nn`: most significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#volumeMSB */
volumeMSB(nn: number): Channel.Async;
/** Volume LSB: `[Bx 27 nn]`; `x`: channel, `nn`: least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#volumeLSB */
volumeLSB(nn: number): Channel.Async;
/** Balance MSB: `[Bx 08 nn]`; `x`: channel, `nn`: most significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#balanceMSB */
balanceMSB(nn: number): Channel.Async;
/** Balance LSB: `[Bx 28 nn]`; `x`: channel, `nn`: least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#balanceLSB */
balanceLSB(nn: number): Channel.Async;
/** Pan MSB: `[Bx 0A nn]`; `x`: channel, `nn`: most significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#panMSB */
panMSB(nn: number): Channel.Async;
/** Pan LSB: `[Bx 2A nn]`; `x`: channel, `nn`: least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#panLSB */
panLSB(nn: number): Channel.Async;
/** Expression MSB: `[Bx 0B nn]`; `x`: channel, `nn`: most significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#expressionMSB */
expressionMSB(nn: number): Channel.Async;
/** Expression LSB: `[Bx 2B nn]`; `x`: channel, `nn`: least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#expressionLSB */
expressionLSB(nn: number): Channel.Async;
/** Damper on/off: `[Bx 40 nn]`; `x`: channel, `nn`: `bb ? 7f : 00`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#damper */
damper(bb: boolean): Channel.Async;
/** Portamento on/off: `[Bx 41 nn]`; `x`: channel, `nn`: `bb ? 7f : 00`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#portamento */
portamento(bb: boolean): Channel.Async;
/** Sostenuto on/off: `[Bx 42 nn]`; `x`: channel, `nn`: `bb ? 7f : 00`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#sostenuto */
sostenuto(bb: boolean): Channel.Async;
/** Soft on/off: `[Bx 43 nn]`; `x`: channel, `nn`: `bb ? 7f : 00`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#soft */
soft(bb: boolean): Channel.Async;
/** All sound off: `[Bx 78 00]`; `x`: channel
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#allSoundOff */
allSoundOff(): Channel.Async;
/** All notes off: `[Bx 7B 00]`; `x`: channel
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#allNotesOff */
allNotesOff(): Channel.Async;
// Channel-independent
/** Song position: `[F2 lsb msb]`; `msb`/`lsb`: most/least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#songPosition */
songPosition(nn: number): Channel.Async;
/** Song select: `[F3 nn]`; `nn`: song number
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#songSelect */
songSelect(nn: number): Channel.Async;
/** Tune: `[F6]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#tune */
tune(): Channel.Async;
/** Clock: `[F8]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#clock */
clock(): Channel.Async;
/** Start: `[FA]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#start */
start(): Channel.Async;
/** Continue: `[FB]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#continue */
continue(): Channel.Async;
/** Stop: `[FC]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#stop */
stop(): Channel.Async;
/** Active sense signal: `[FE]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#active */
active(): Channel.Async;
/** Reset: `[FF]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#reset */
reset(): Channel.Async;
/** ID Request SysEx: `[F0 7E 7F 06 01 F7]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#sxIdRequest */
sxIdRequest(): Channel.Async;
/** MIDI time code (SMPTE quarter frame): `[F1 xx]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#mtc */
mtc(t: SMPTE): Channel.Async;
/** SMPTE Full Frame SysEx: `[F0 7F 7F 01 01 xx xx xx xx F7]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#sxFullFrame */
sxFullFrame(t: SMPTE): Channel.Async;
// SMF
/** Standard MIDI File meta event: [FFxx len data]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smf */
smf(...args: any): Channel.Async;
/** SMF Sequence Number: [FF00 02 ssss]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfSeqNumber */
smfSeqNumber(ssss: number): Channel.Async;
/** SMF Text: [FF01 len text]; used in Karaoke files
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfText */
smfText(str: string): Channel.Async;
/** SMF Copyright: [FF02 len text]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfCopyright */
smfCopyright(str: string): Channel.Async;
/** SMF Sequence Name: [FF03 len text]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfSeqName */
smfSeqName(str: string): Channel.Async;
/** SMF Instrument Name: [FF04 len text]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfInstrName */
smfInstrName(str: string): Channel.Async;
/** SMF Lyric: [FF05 len text]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfLyric */
smfLyric(str: string): Channel.Async;
/** SMF Marker: [FF06 len text]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfMarker */
smfMarker(str: string): Channel.Async;
/** SMF Cue Point: [FF07 len text]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfCuePoint */
smfCuePoint(str: string): Channel.Async;
/** SMF Program Name: [FF08 len text]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfProgName */
smfProgName(str: string): Channel.Async;
/** SMF Device Name: [FF09 len text]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfDevName */
smfDevName(str: string): Channel.Async;
/** SMF Channel Prefix: [FF20 01 cc]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfChannelPrefix */
smfChannelPrefix(cc: number): Channel.Async;
/** SMF MIDI Port [FF21 01 pp]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfMidiPort */
smfMidiPort(pp: number): Channel.Async;
/** SMF End of Track: [FF2F 00]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfEndOfTrack */
smfEndOfTrack(): Channel.Async;
/** SMF Tempo: [FF51 03 tttttt]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfTempo */
smfTempo(tttttt: number): Channel.Async;
/** SMF Tempo, BMP: [FF51 03 tttttt]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfBPM */
smfBPM(bpm: number): Channel.Async;
/** SMF SMPTE offset: [FF54 05 hh mm ss fr ff]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfSMPTE */
smfSMPTE(smpte: SMPTE | number[]): Channel.Async;
/** SMF Time Signature: [FF58 04 nn dd cc bb]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfTimeSignature */
smfTimeSignature(nn: number, dd: number, cc?: number, bb?: number): Channel.Async;
/** SMF Key Signature: [FF59 02 sf mi]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfKeySignature */
smfKeySignature(key: string): Channel.Async;
/** SMF Sequencer-specific Data: [FF7F len data]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfSequencer */
smfSequencer(data: string): Channel.Async;
}
namespace MPE {
interface Async extends MPE, PromiseLike<MPE> {}
}
interface MPE {
// Stub
/** Print if OK
*
* https://jazz-soft.net/doc/JZZ/common.html#and */
and(text: string): MPE.Async;
/** Execute if OK
*
* https://jazz-soft.net/doc/JZZ/common.html#and */
and(func: (self?: Stub) => void): MPE.Async;
/** Print if not OK
*
* https://jazz-soft.net/doc/JZZ/common.html#or */
or(text: string): MPE.Async;
/** Execute if not OK
*
* https://jazz-soft.net/doc/JZZ/common.html#or */
or(func: (self?: Stub) => void): MPE.Async;
/** Wait `ms` milliseconds
*
* https://jazz-soft.net/doc/JZZ/common.html#wait */
wait(ms: number): MPE.Async;
// Port
/** Return an `info` object
*
* https://jazz-soft.net/doc/JZZ/midiin.html#info */
info(): any;
/** Return the port name
*
* https://jazz-soft.net/doc/JZZ/midiin.html#name */
name(): string;
/** Connect MIDI port
*
* https://jazz-soft.net/doc/JZZ/midiin.html#connect */
connect(arg: any): MPE.Async;
/** Disonnect MIDI port
*
* https://jazz-soft.net/doc/JZZ/midiin.html#disconnect */
disconnect(arg?: any): MPE.Async;
/** Send MIDI message
*
* https://jazz-soft.net/doc/JZZ/midiout.html#send */
send(...args: any[]): MPE.Async;
/** Emit MIDI message
*
* https://jazz-soft.net/doc/JZZ/midithru.html#emit */
emit(...args: any[]): MPE.Async;
/** Emit MIDI message
*
* https://jazz-soft.net/doc/JZZ/midithru.html#emit */
_emit(...args: any[]): void;
/** Close MIDI port
*
* https://jazz-soft.net/doc/JZZ/midiin.html#close */
close(): Stub.Async;
/** MIDI channel
*
* https://jazz-soft.net/doc/JZZ/midiout.html#ch */
ch(x: number): Channel.Async;
/** MIDI channel
*
* https://jazz-soft.net/doc/JZZ/midiout.html#ch */
ch(): Port.Async;
/** MPE channel
*
* https://jazz-soft.net/doc/JZZ/midiout.html#mpe */
mpe(m: number, n: number): MPE.Async;
/** MPE channel
*
* https://jazz-soft.net/doc/JZZ/midiout.html#mpe */
mpe(): Port.Async;
/** Play note
*
* https://jazz-soft.net/doc/JZZ/midiout.html#note */
note(nn: number | string, vv?: number, tt?: number): MPE.Async;
// Channel-dependent
/** Note On: `[9x nn vv]`; `x`: channel, `nn`: note, `vv`: velocity
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#noteOn */
noteOn(nn: number | string, vv?: number): MPE.Async;
/** Note Off: `[8x nn vv]`; `x`: channel, `nn`: note, `vv`: velocity
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#noteOff */
noteOff(nn: number | string, vv?: number): MPE.Async;
/** Polyphonic aftetouch: `[Ax nn vv]`; `x`: channel, `nn`: note, `vv`: value
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#aftertouch */
aftertouch(nn: number | string, vv: number): MPE.Async;
/** MIDI control: `[Bx nn vv]`; `x`: channel, `nn`: function, `vv`: value
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#control */
control(nn: number, vv: number): MPE.Async;
/** Program change: `[Cx nn]`; `x`: channel, `nn`: program
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#program */
program(nn: number | string): MPE.Async;
/** Pressure: `[Dx nn]`; `x`: channel, `nn`: pressure
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#pressure */
pressure(nn: number): MPE.Async;
/** Pitch bend: `[Ex lsb msb]`; `x`: channel, `msb`/`lsb`: most/least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#pitchBend */
pitchBend(nn: number): MPE.Async;
/** Bank select MSB: `[Bx 00 nn]`; `x`: channel, `nn`: most significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#bankMSB */
bankMSB(nn: number): MPE.Async;
/** Bank select LSB: `[Bx 20 nn]`; `x`: channel, `nn`: least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#bankLSB */
bankLSB(nn: number): MPE.Async;
/** Modulation MSB: `[Bx 01 nn]`; `x`: channel, `nn`: most significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#modMSB */
modMSB(nn: number): MPE.Async;
/** Modulation LSB: `[Bx 21 nn]`; `x`: channel, `nn`: least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#modLSB */
modLSB(nn: number): MPE.Async;
/** Breath controller MSB: `[Bx 02 nn]`; `x`: channel, `nn`: most significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#breathMSB */
breathMSB(nn: number): MPE.Async;
/** Breath controller LSB: `[Bx 22 nn]`; `x`: channel, `nn`: least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#breathLSB */
breathLSB(nn: number): MPE.Async;
/** Foot controller MSB: `[Bx 04 nn]`; `x`: channel, `nn`: most significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#footMSB */
footMSB(nn: number): MPE.Async;
/** Foot controller LSB: `[Bx 24 nn]`; `x`: channel, `nn`: least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#footLSB */
footLSB(nn: number): MPE.Async;
/** Portamento MSB: `[Bx 05 nn]`; `x`: channel, `nn`: most significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#portamentoMSB */
portamentoMSB(nn: number): MPE.Async;
/** Portamento LSB: `[Bx 25 nn]`; `x`: channel, `nn`: least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#portamentoLSB */
portamentoLSB(nn: number): MPE.Async;
/** Volume MSB: `[Bx 07 nn]`; `x`: channel, `nn`: most significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#volumeMSB */
volumeMSB(nn: number): MPE.Async;
/** Volume LSB: `[Bx 27 nn]`; `x`: channel, `nn`: least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#volumeLSB */
volumeLSB(nn: number): MPE.Async;
/** Balance MSB: `[Bx 08 nn]`; `x`: channel, `nn`: most significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#balanceMSB */
balanceMSB(nn: number): MPE.Async;
/** Balance LSB: `[Bx 28 nn]`; `x`: channel, `nn`: least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#balanceLSB */
balanceLSB(nn: number): MPE.Async;
/** Pan MSB: `[Bx 0A nn]`; `x`: channel, `nn`: most significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#panMSB */
panMSB(nn: number): MPE.Async;
/** Pan LSB: `[Bx 2A nn]`; `x`: channel, `nn`: least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#panLSB */
panLSB(nn: number): MPE.Async;
/** Expression MSB: `[Bx 0B nn]`; `x`: channel, `nn`: most significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#expressionMSB */
expressionMSB(nn: number): MPE.Async;
/** Expression LSB: `[Bx 2B nn]`; `x`: channel, `nn`: least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#expressionLSB */
expressionLSB(nn: number): MPE.Async;
/** Damper on/off: `[Bx 40 nn]`; `x`: channel, `nn`: `bb ? 7f : 00`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#damper */
damper(bb: boolean): MPE.Async;
/** Portamento on/off: `[Bx 41 nn]`; `x`: channel, `nn`: `bb ? 7f : 00`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#portamento */
portamento(bb: boolean): MPE.Async;
/** Sostenuto on/off: `[Bx 42 nn]`; `x`: channel, `nn`: `bb ? 7f : 00`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#sostenuto */
sostenuto(bb: boolean): MPE.Async;
/** Soft on/off: `[Bx 43 nn]`; `x`: channel, `nn`: `bb ? 7f : 00`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#soft */
soft(bb: boolean): MPE.Async;
/** All sound off: `[Bx 78 00]`; `x`: channel
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#allSoundOff */
allSoundOff(): MPE.Async;
/** All notes off: `[Bx 7B 00]`; `x`: channel
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#allNotesOff */
allNotesOff(): MPE.Async;
// Channel-independent
/** Song position: `[F2 lsb msb]`; `msb`/`lsb`: most/least significant 7 bits
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#songPosition */
songPosition(nn: number): MPE.Async;
/** Song select: `[F3 nn]`; `nn`: song number
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#songSelect */
songSelect(nn: number): MPE.Async;
/** Tune: `[F6]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#tune */
tune(): MPE.Async;
/** Clock: `[F8]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#clock */
clock(): MPE.Async;
/** Start: `[FA]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#start */
start(): MPE.Async;
/** Continue: `[FB]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#continue */
continue(): MPE.Async;
/** Stop: `[FC]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#stop */
stop(): MPE.Async;
/** Active sense signal: `[FE]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#active */
active(): MPE.Async;
/** Reset: `[FF]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#reset */
reset(): MPE.Async;
/** ID Request SysEx: `[F0 7E 7F 06 01 F7]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#sxIdRequest */
sxIdRequest(): MPE.Async;
/** MIDI time code (SMPTE quarter frame): `[F1 xx]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#mtc */
mtc(t: SMPTE): MPE.Async;
/** SMPTE Full Frame SysEx: `[F0 7F 7F 01 01 xx xx xx xx F7]`
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#sxFullFrame */
sxFullFrame(t: SMPTE): MPE.Async;
// SMF
/** Standard MIDI File meta event: [FFxx len data]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smf */
smf(...args: any): MPE.Async;
/** SMF Sequence Number: [FF00 02 ssss]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfSeqNumber */
smfSeqNumber(ssss: number): MPE.Async;
/** SMF Text: [FF01 len text]; used in Karaoke files
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfText */
smfText(str: string): MPE.Async;
/** SMF Copyright: [FF02 len text]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfCopyright */
smfCopyright(str: string): MPE.Async;
/** SMF Sequence Name: [FF03 len text]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfSeqName */
smfSeqName(str: string): MPE.Async;
/** SMF Instrument Name: [FF04 len text]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfInstrName */
smfInstrName(str: string): MPE.Async;
/** SMF Lyric: [FF05 len text]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfLyric */
smfLyric(str: string): MPE.Async;
/** SMF Marker: [FF06 len text]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfMarker */
smfMarker(str: string): MPE.Async;
/** SMF Cue Point: [FF07 len text]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfCuePoint */
smfCuePoint(str: string): MPE.Async;
/** SMF Program Name: [FF08 len text]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfProgName */
smfProgName(str: string): MPE.Async;
/** SMF Device Name: [FF09 len text]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfDevName */
smfDevName(str: string): MPE.Async;
/** SMF Channel Prefix: [FF20 01 cc]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfChannelPrefix */
smfChannelPrefix(cc: number): MPE.Async;
/** SMF MIDI Port [FF21 01 pp]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfMidiPort */
smfMidiPort(pp: number): MPE.Async;
/** SMF End of Track: [FF2F 00]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfEndOfTrack */
smfEndOfTrack(): MPE.Async;
/** SMF Tempo: [FF51 03 tttttt]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfTempo */
smfTempo(tttttt: number): MPE.Async;
/** SMF Tempo, BMP: [FF51 03 tttttt]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfBPM */
smfBPM(bpm: number): MPE.Async;
/** SMF SMPTE offset: [FF54 05 hh mm ss fr ff]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfSMPTE */
smfSMPTE(smpte: SMPTE | number[]): MPE.Async;
/** SMF Time Signature: [FF58 04 nn dd cc bb]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfTimeSignature */
smfTimeSignature(nn: number, dd: number, cc?: number, bb?: number): MPE.Async;
/** SMF Key Signature: [FF59 02 sf mi]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfKeySignature */
smfKeySignature(key: string): MPE.Async;
/** SMF Sequencer-specific Data: [FF7F len data]
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html#smfSequencer */
smfSequencer(data: string): MPE.Async;
}
namespace Watcher {
interface Async extends Watcher, PromiseLike<Watcher> {}
}
interface Watcher extends Engine {
/** Add the Watcher handle
*
* https://jazz-soft.net/doc/JZZ/jzz.html#connect */
connect(...args: any[]): Watcher.Async;
/** Remove the Watcher handle
*
* https://jazz-soft.net/doc/JZZ/jzz.html#disconnect */
disconnect(...args: any[]): Watcher.Async;
}
interface lib {
/** Open virtual MIDI-In port
*
* https://jazz-soft.net/doc/JZZ/lib.html#openMidiIn */
openMidiIn(...args: any[]): boolean;
/** Open virtual MIDI-Out port
*
* https://jazz-soft.net/doc/JZZ/lib.html#openMidiOut */
openMidiOut(...args: any[]): boolean;
/** Register virtual MIDI-In port
*
* https://jazz-soft.net/doc/JZZ/lib.html#registerMidiIn */
registerMidiIn(...args: any[]): boolean;
/** Register virtual MIDI-Out port
*
* https://jazz-soft.net/doc/JZZ/lib.html#registerMidiOut */
registerMidiOut(...args: any[]): boolean;
/** Activate and return window.AudioContext
*
* https://jazz-soft.net/doc/JZZ/lib.html#getAudioContext */
getAudioContext(): any;
/** Encode string to Base64
*
* https://jazz-soft.net/doc/JZZ/lib.html#toBase64 */
toBase64(txt: string): string;
/** Decode string from Base64
*
* https://jazz-soft.net/doc/JZZ/lib.html#fromBase64 */
fromBase64(txt: string): string;
/** Encode string to UTF8
*
* https://jazz-soft.net/doc/JZZ/lib.html#toUTF8 */
toUTF8(txt: string): string;
/** Decode string from UTF8
*
* https://jazz-soft.net/doc/JZZ/lib.html#fromUTF8 */
fromUTF8(txt: string): string;
}
}
interface JZZ {
readonly lib: JZZ.lib;
/** Start MIDI engine
*
* https://jazz-soft.net/doc/JZZ/jzz.html#jzz */
(arg?: any): JZZ.Engine.Async;
/** Return an `info` object
*
* https://jazz-soft.net/doc/JZZ/jzz.html#info */
readonly info: () => any;
/** Create virtual MIDI port
*
* https://jazz-soft.net/doc/JZZ/midithru.html#Widget */
readonly Widget: (...args: any[]) => JZZ.Port;
/** MIDI message
*
* https://jazz-soft.net/doc/JZZ/jzzmidi.html */
readonly MIDI: JZZ.MIDI.Constructor;
/** SMPTE message
*
* https://jazz-soft.net/doc/JZZ/smpte.html */
readonly SMPTE: JZZ.SMPTE.Constructor;
/** Invoke Web MIDI API
*
* https://jazz-soft.net/doc/JZZ/webmidi.html */
readonly requestMIDIAccess: (options?: WebMidi.MIDIOptions) => Promise<WebMidi.MIDIAccess>;
}
declare const jzz: JZZ;
export = jzz; | the_stack |
import {
concat as observableConcat,
empty as observableEmpty,
from as observableFrom,
of as observableOf,
Observable,
Subject,
Subscription,
} from "rxjs";
import {
catchError,
expand,
finalize,
first,
map,
mergeMap,
publishReplay,
refCount,
startWith,
tap,
takeLast,
} from "rxjs/operators";
import { FilterFunction } from "./FilterCreator";
import { FilterExpression } from "./FilterExpression";
import { Graph } from "./Graph";
import { GraphMode } from "./GraphMode";
import { Image } from "./Image";
import { Sequence } from "./Sequence";
import { LngLat } from "../api/interfaces/LngLat";
import { SubscriptionHolder } from "../util/SubscriptionHolder";
import { ProviderCellEvent } from "../api/events/ProviderCellEvent";
import { GraphMapillaryError } from "../error/GraphMapillaryError";
/**
* @class GraphService
*
* @classdesc Represents a service for graph operations.
*/
export class GraphService {
private _graph$: Observable<Graph>;
private _graphMode: GraphMode;
private _graphMode$: Observable<GraphMode>;
private _graphModeSubject$: Subject<GraphMode>;
private _firstGraphSubjects$: Subject<Graph>[];
private _dataAdded$: Subject<string> = new Subject<string>();
private _initializeCacheSubscriptions: Subscription[];
private _sequenceSubscriptions: Subscription[];
private _spatialSubscriptions: Subscription[];
private _subscriptions: SubscriptionHolder = new SubscriptionHolder();
/**
* Create a new graph service instance.
*
* @param {Graph} graph - Graph instance to be operated on.
*/
constructor(graph: Graph) {
const subs = this._subscriptions;
this._graph$ = observableConcat(
observableOf(graph),
graph.changed$).pipe(
publishReplay(1),
refCount());
subs.push(this._graph$.subscribe(() => { /*noop*/ }));
this._graphMode = GraphMode.Spatial;
this._graphModeSubject$ = new Subject<GraphMode>();
this._graphMode$ = this._graphModeSubject$.pipe(
startWith(this._graphMode),
publishReplay(1),
refCount());
subs.push(this._graphMode$.subscribe(() => { /*noop*/ }));
this._firstGraphSubjects$ = [];
this._initializeCacheSubscriptions = [];
this._sequenceSubscriptions = [];
this._spatialSubscriptions = [];
graph.api.data.on("datacreate", this._onDataAdded);
}
/**
* Get dataAdded$.
*
* @returns {Observable<string>} Observable emitting
* a cell id every time data has been added to a cell.
*/
public get dataAdded$(): Observable<string> {
return this._dataAdded$;
}
/**
* Get filter observable.
*
* @desciption Emits the filter every time it has changed.
*
* @returns {Observable<FilterFunction>} Observable
* emitting the filter function every time it is set.
*/
public get filter$(): Observable<FilterFunction> {
return this._graph$.pipe(
first(),
mergeMap(
(graph: Graph): Observable<FilterFunction> => {
return graph.filter$;
}));
}
/**
* Get graph mode observable.
*
* @description Emits the current graph mode.
*
* @returns {Observable<GraphMode>} Observable
* emitting the current graph mode when it changes.
*/
public get graphMode$(): Observable<GraphMode> {
return this._graphMode$;
}
/**
* Cache full images in a bounding box.
*
* @description When called, the full properties of
* the image are retrieved. The image cache is not initialized
* for any new images retrieved and the image assets are not
* retrieved, {@link cacheImage$} needs to be called for caching
* assets.
*
* @param {LngLat} sw - South west corner of bounding box.
* @param {LngLat} ne - North east corner of bounding box.
* @return {Observable<Array<Image>>} Observable emitting a single item,
* the images of the bounding box, when they have all been retrieved.
* @throws {Error} Propagates any IO image caching errors to the caller.
*/
public cacheBoundingBox$(sw: LngLat, ne: LngLat): Observable<Image[]> {
return this._graph$.pipe(
first(),
mergeMap(
(graph: Graph): Observable<Image[]> => {
return graph.cacheBoundingBox$(sw, ne);
}));
}
/**
* Cache full images in a cell.
*
* @description When called, the full properties of
* the image are retrieved. The image cache is not initialized
* for any new images retrieved and the image assets are not
* retrieved, {@link cacheImage$} needs to be called for caching
* assets.
*
* @param {string} cellId - Id of the cell.
* @return {Observable<Array<Image>>} Observable emitting a single item,
* the images of the cell, when they have all been retrieved.
* @throws {Error} Propagates any IO image caching errors to the caller.
*/
public cacheCell$(cellId: string): Observable<Image[]> {
return this._graph$.pipe(
first(),
mergeMap(
(graph: Graph): Observable<Image[]> => {
return graph.cacheCell$(cellId);
}));
}
/**
* Cache a image in the graph and retrieve it.
*
* @description When called, the full properties of
* the image are retrieved and the image cache is initialized.
* After that the image assets are cached and the image
* is emitted to the observable when.
* In parallel to caching the image assets, the sequence and
* spatial edges of the image are cached. For this, the sequence
* of the image and the required tiles and spatial images are
* retrieved. The sequence and spatial edges may be set before
* or after the image is returned.
*
* @param {string} id - Id of the image to cache.
* @return {Observable<Image>} Observable emitting a single item,
* the image, when it has been retrieved and its assets are cached.
* @throws {Error} Propagates any IO image caching errors to the caller.
*/
public cacheImage$(id: string): Observable<Image> {
const firstGraphSubject$: Subject<Graph> = new Subject<Graph>();
this._firstGraphSubjects$.push(firstGraphSubject$);
const firstGraph$: Observable<Graph> = firstGraphSubject$.pipe(
publishReplay(1),
refCount());
const image$: Observable<Image> = firstGraph$.pipe(
map(
(graph: Graph): Image => {
return graph.getNode(id);
}),
mergeMap(
(image: Image): Observable<Image> => {
return image.assetsCached ?
observableOf(image) :
image.cacheAssets$();
}),
publishReplay(1),
refCount());
image$.subscribe(
undefined,
(error: Error): void => {
console.error(`Failed to cache image (${id}).`, error);
});
let initializeCacheSubscription: Subscription;
initializeCacheSubscription = this._graph$.pipe(
first(),
mergeMap(
(graph: Graph): Observable<Graph> => {
if (graph.isCachingFull(id) || !graph.hasNode(id)) {
return graph.cacheFull$(id);
}
if (graph.isCachingFill(id) || !graph.getNode(id).complete) {
return graph.cacheFill$(id);
}
return observableOf<Graph>(graph);
}),
tap(
(graph: Graph): void => {
if (!graph.hasNode(id)) {
throw new GraphMapillaryError(`Failed to cache image (${id})`);
}
if (!graph.hasInitializedCache(id)) {
graph.initializeCache(id);
}
}),
finalize(
(): void => {
if (initializeCacheSubscription == null) {
return;
}
this._removeFromArray(initializeCacheSubscription, this._initializeCacheSubscriptions);
this._removeFromArray(firstGraphSubject$, this._firstGraphSubjects$);
}))
.subscribe(
(graph: Graph): void => {
firstGraphSubject$.next(graph);
firstGraphSubject$.complete();
},
(error: Error): void => {
firstGraphSubject$.error(error);
});
if (!initializeCacheSubscription.closed) {
this._initializeCacheSubscriptions.push(initializeCacheSubscription);
}
const graphSequence$: Observable<Graph> = firstGraph$.pipe(
catchError(
(): Observable<Graph> => {
return observableEmpty();
}),
mergeMap(
(graph: Graph): Observable<Graph> => {
if (graph.isCachingNodeSequence(id) || !graph.hasNodeSequence(id)) {
return graph.cacheNodeSequence$(id);
}
return observableOf<Graph>(graph);
}),
publishReplay(1),
refCount());
let sequenceSubscription: Subscription;
sequenceSubscription = graphSequence$.pipe(
tap(
(graph: Graph): void => {
if (!graph.getNode(id).sequenceEdges.cached) {
graph.cacheSequenceEdges(id);
}
}),
finalize(
(): void => {
if (sequenceSubscription == null) {
return;
}
this._removeFromArray(sequenceSubscription, this._sequenceSubscriptions);
}))
.subscribe(
(): void => { return; },
(error: Error): void => {
console.error(`Failed to cache sequence edges (${id}).`, error);
});
if (!sequenceSubscription.closed) {
this._sequenceSubscriptions.push(sequenceSubscription);
}
if (this._graphMode === GraphMode.Spatial) {
let spatialSubscription: Subscription;
spatialSubscription = firstGraph$.pipe(
catchError(
(): Observable<Graph> => {
return observableEmpty();
}),
expand(
(graph: Graph): Observable<Graph> => {
if (graph.hasTiles(id)) {
return observableEmpty();
}
return observableFrom(graph.cacheTiles$(id)).pipe(
mergeMap(
(graph$: Observable<Graph>): Observable<Graph> => {
return graph$.pipe(
mergeMap(
(g: Graph): Observable<Graph> => {
if (g.isCachingTiles(id)) {
return observableEmpty();
}
return observableOf<Graph>(g);
}),
catchError(
(error: Error): Observable<Graph> => {
console.error(`Failed to cache tile data (${id}).`, error);
return observableEmpty();
}));
}));
}),
takeLast(1),
mergeMap(
(graph: Graph): Observable<Graph> => {
if (graph.hasSpatialArea(id)) {
return observableOf<Graph>(graph);
}
return observableFrom(graph.cacheSpatialArea$(id)).pipe(
mergeMap(
(graph$: Observable<Graph>): Observable<Graph> => {
return graph$.pipe(
catchError(
(error: Error): Observable<Graph> => {
console.error(`Failed to cache spatial images (${id}).`, error);
return observableEmpty();
}));
}));
}),
takeLast(1),
mergeMap(
(graph: Graph): Observable<Graph> => {
return graph.hasNodeSequence(id) ?
observableOf<Graph>(graph) :
graph.cacheNodeSequence$(id);
}),
tap(
(graph: Graph): void => {
if (!graph.getNode(id).spatialEdges.cached) {
graph.cacheSpatialEdges(id);
}
}),
finalize(
(): void => {
if (spatialSubscription == null) {
return;
}
this._removeFromArray(spatialSubscription, this._spatialSubscriptions);
}))
.subscribe(
(): void => { return; },
(error: Error): void => {
const message =
`Failed to cache spatial edges (${id}).`;
console.error(message, error);
});
if (!spatialSubscription.closed) {
this._spatialSubscriptions.push(spatialSubscription);
}
}
return image$.pipe(
first(
(image: Image): boolean => {
return image.assetsCached;
}));
}
/**
* Cache a sequence in the graph and retrieve it.
*
* @param {string} sequenceId - Sequence id.
* @returns {Observable<Sequence>} Observable emitting a single item,
* the sequence, when it has been retrieved and its assets are cached.
* @throws {Error} Propagates any IO image caching errors to the caller.
*/
public cacheSequence$(sequenceId: string): Observable<Sequence> {
return this._graph$.pipe(
first(),
mergeMap(
(graph: Graph): Observable<Graph> => {
if (graph.isCachingSequence(sequenceId) || !graph.hasSequence(sequenceId)) {
return graph.cacheSequence$(sequenceId);
}
return observableOf<Graph>(graph);
}),
map(
(graph: Graph): Sequence => {
return graph.getSequence(sequenceId);
}));
}
/**
* Cache a sequence and its images in the graph and retrieve the sequence.
*
* @description Caches a sequence and its assets are cached and
* retrieves all images belonging to the sequence. The image assets
* or edges will not be cached.
*
* @param {string} sequenceId - Sequence id.
* @param {string} referenceImageId - Id of image to use as reference
* for optimized caching.
* @returns {Observable<Sequence>} Observable emitting a single item,
* the sequence, when it has been retrieved, its assets are cached and
* all images belonging to the sequence has been retrieved.
* @throws {Error} Propagates any IO image caching errors to the caller.
*/
public cacheSequenceImages$(sequenceId: string, referenceImageId?: string): Observable<Sequence> {
return this._graph$.pipe(
first(),
mergeMap(
(graph: Graph): Observable<Graph> => {
if (graph.isCachingSequence(sequenceId) || !graph.hasSequence(sequenceId)) {
return graph.cacheSequence$(sequenceId);
}
return observableOf<Graph>(graph);
}),
mergeMap(
(graph: Graph): Observable<Graph> => {
if (graph.isCachingSequenceNodes(sequenceId) || !graph.hasSequenceNodes(sequenceId)) {
return graph.cacheSequenceNodes$(sequenceId, referenceImageId);
}
return observableOf<Graph>(graph);
}),
map(
(graph: Graph): Sequence => {
return graph.getSequence(sequenceId);
}));
}
/**
* Dispose the graph service and its children.
*/
public dispose(): void {
this._graph$
.pipe(first())
.subscribe((graph: Graph) => { graph.unsubscribe(); });
this._subscriptions.unsubscribe();
}
/**
* Set a spatial edge filter on the graph.
*
* @description Resets the spatial edges of all cached images.
*
* @param {FilterExpression} filter - Filter expression to be applied.
* @return {Observable<Graph>} Observable emitting a single item,
* the graph, when the spatial edges have been reset.
*/
public setFilter$(filter: FilterExpression): Observable<void> {
this._resetSubscriptions(this._spatialSubscriptions);
return this._graph$.pipe(
first(),
tap(
(graph: Graph): void => {
graph.resetSpatialEdges();
graph.setFilter(filter);
}),
map(
(): void => {
return undefined;
}));
}
/**
* Set the graph mode.
*
* @description If graph mode is set to spatial, caching
* is performed with emphasis on spatial edges. If graph
* mode is set to sequence no tile data is requested and
* no spatial edges are computed.
*
* When setting graph mode to sequence all spatial
* subscriptions are aborted.
*
* @param {GraphMode} mode - Graph mode to set.
*/
public setGraphMode(mode: GraphMode): void {
if (this._graphMode === mode) {
return;
}
if (mode === GraphMode.Sequence) {
this._resetSubscriptions(this._spatialSubscriptions);
}
this._graphMode = mode;
this._graphModeSubject$.next(this._graphMode);
}
/**
* Reset the graph.
*
* @description Resets the graph but keeps the images of the
* supplied ids.
*
* @param {Array<string>} keepIds - Ids of images to keep in graph.
* @return {Observable<Image>} Observable emitting a single item,
* the graph, when it has been reset.
*/
public reset$(keepIds: string[]): Observable<void> {
this._abortSubjects(this._firstGraphSubjects$);
this._resetSubscriptions(this._initializeCacheSubscriptions);
this._resetSubscriptions(this._sequenceSubscriptions);
this._resetSubscriptions(this._spatialSubscriptions);
return this._graph$.pipe(
first(),
tap(
(graph: Graph): void => {
graph.reset(keepIds);
}),
map(
(): void => {
return undefined;
}));
}
/**
* Uncache the graph.
*
* @description Uncaches the graph by removing tiles, images and
* sequences. Keeps the images of the supplied ids and the tiles
* related to those images.
*
* @param {Array<string>} keepIds - Ids of images to keep in graph.
* @param {Array<string>} keepCellIds - Ids of cells to keep in graph.
* @param {string} keepSequenceId - Optional id of sequence
* for which the belonging images should not be disposed or
* removed from the graph. These images may still be uncached if
* not specified in keep ids param.
* @return {Observable<Graph>} Observable emitting a single item,
* the graph, when the graph has been uncached.
*/
public uncache$(
keepIds: string[],
keepCellIds: string[],
keepSequenceId?: string)
: Observable<void> {
return this._graph$.pipe(
first(),
tap(
(graph: Graph): void => {
graph.uncache(keepIds, keepCellIds, keepSequenceId);
}),
map(
(): void => {
return undefined;
}));
}
private _abortSubjects<T>(subjects: Subject<T>[]): void {
for (const subject of subjects.slice()) {
this._removeFromArray(subject, subjects);
subject.error(new Error("Cache image request was aborted."));
}
}
private _onDataAdded = (event: ProviderCellEvent): void => {
this._graph$
.pipe(
first(),
mergeMap(
graph => {
return graph.updateCells$(event.cellIds).pipe(
tap(() => { graph.resetSpatialEdges(); }));
}))
.subscribe(cellId => { this._dataAdded$.next(cellId); });
};
private _removeFromArray<T>(object: T, objects: T[]): void {
const index: number = objects.indexOf(object);
if (index !== -1) {
objects.splice(index, 1);
}
}
private _resetSubscriptions(subscriptions: Subscription[]): void {
for (const subscription of subscriptions.slice()) {
this._removeFromArray(subscription, subscriptions);
if (!subscription.closed) {
subscription.unsubscribe();
}
}
}
} | the_stack |
import * as Immutable from "immutable";
import * as _ from "lodash";
import { Event } from "./event";
import { Key } from "./key";
import { Processor } from "./processor";
import util from "./util";
import { FillMethod, FillOptions } from "./types";
/**
* A processor that fills missing/invalid values in the `Event` with
* new values (zero, interpolated or padded).
*
* When doing a linear fill, Filler instances should be chained.
*/
export class Fill<T extends Key> extends Processor<T, T> {
// Options
private _fieldSpec: string[];
private _method: FillMethod;
private _limit: number | null;
// Internal state
private _previous: Event<T>;
private _keyCount;
private _lastGoodLinear;
private _linearFillCache;
constructor(options: FillOptions) {
super();
const { fieldSpec, method = FillMethod.Pad, limit = null } = options;
// Options
this._fieldSpec = _.isString(fieldSpec) ? [fieldSpec] : fieldSpec;
this._method = method;
this._limit = limit;
this._previous = null; // state for pad to refer to previous event
this._keyCount = {}; // key count for zero and pad fill
this._lastGoodLinear = null; // special state for linear fill
this._linearFillCache = []; // cache of events pending linear fill
// Special case: when using linear mode, only a single column
// will be processed per instance!
if (this._method === FillMethod.Linear && this._fieldSpec.length > 1) {
throw new Error("Linear fill takes a path to a single field");
}
}
/**
* Process and fill the values at the paths as apropos when the fill
* method is either pad or zero.
*/
constFill(data: Immutable.Map<string, any>) {
let newData = data;
for (const path of this._fieldSpec) {
const fieldPath = util.fieldAsArray(path);
const pathKey = fieldPath.join(":");
// initialize a counter for this column
if (!_.has(this._keyCount, pathKey)) {
this._keyCount[pathKey] = 0;
}
// this is pointing at a path that does not exist
if (!newData.hasIn(fieldPath)) {
continue;
}
// Get the next value using the fieldPath
const val = newData.getIn(fieldPath);
if (util.isMissing(val)) {
// Have we hit the limit?
if (this._limit && this._keyCount[pathKey] >= this._limit) {
continue;
}
if (this._method === FillMethod.Zero) {
// set to zero
newData = newData.setIn(fieldPath, 0);
this._keyCount[pathKey]++;
} else if (this._method === FillMethod.Pad) {
// set to previous value
if (!_.isNull(this._previous)) {
const prevVal = this._previous.getData().getIn(fieldPath);
if (!util.isMissing(prevVal)) {
newData = newData.setIn(fieldPath, prevVal);
this._keyCount[pathKey]++;
}
}
} else if (this._method === FillMethod.Linear) {
// noop
}
} else {
this._keyCount[pathKey] = 0;
}
}
return newData;
}
/**
* Check to see if an `Event` has good values when doing
* linear fill since we need to keep a completely intact
* event for the values.
* While we are inspecting the data payload, make a note if
* any of the paths are pointing at a list. Then it
* will trigger that filling code later.
*/
isValidLinearEvent(event: Event<T>) {
let valid = true;
const fieldPath = util.fieldAsArray(this._fieldSpec[0]);
// Detect path that doesn't exist
if (!event.getData().hasIn(fieldPath)) {
// tslint:disable-next-line
console.warn(`path does not exist: ${fieldPath}`);
return valid;
}
const val = event.getData().getIn(fieldPath);
// Detect if missing or not a number
if (util.isMissing(val) || !_.isNumber(val)) {
valid = false;
}
return valid;
}
/**
* This handles the linear filling. It returns a list of
* zero or more `Event`'s to be emitted.
*
* If an `Event` is valid:
* * it has valid values for all of the field paths
* * it is cached as "last good" and returned to be emitted.
* The return value is then a list of one `Event`.
*
* If an `Event` has invalid values, it is cached to be
* processed later and an empty list is returned.
*
* Additional invalid `Event`'s will continue to be cached until
* a new valid value is seen, then the cached events will
* be filled and returned. That will be a list of indeterminate
* length.
*/
linearFill(event: Event<T>): Array<Event<T>> {
// See if the event is valid and also if it has any
// list values to be filled.
const isValidEvent = this.isValidLinearEvent(event);
const events: Array<Event<T>> = [];
if (isValidEvent && !this._linearFillCache.length) {
// Valid event, no cached events, use as last good val
this._lastGoodLinear = event;
events.push(event);
} else if (!isValidEvent && !_.isNull(this._lastGoodLinear)) {
this._linearFillCache.push(event);
// Check limit
if (!_.isNull(this._limit) && this._linearFillCache.length >= this._limit) {
// Flush the cache now because limit is reached
this._linearFillCache.forEach(e => {
events.push(e);
});
// Reset
this._linearFillCache = [];
this._lastGoodLinear = null;
}
} else if (!isValidEvent && _.isNull(this._lastGoodLinear)) {
//
// An invalid event but we have not seen a good
// event yet so there is nothing to start filling "from"
// so just return and live with it.
//
events.push(event);
} else if (isValidEvent && this._linearFillCache) {
// Linear interpolation between last good and this event
const eventList = [this._lastGoodLinear, ...this._linearFillCache, event];
const interpolatedEvents = this.interpolateEventList(eventList);
//
// The first event in the returned list from interpolatedEvents
// is our last good event. This event has already been emitted so
// it is sliced off.
//
interpolatedEvents.slice(1).forEach(e => {
events.push(e);
});
// Reset
this._linearFillCache = [];
this._lastGoodLinear = event;
}
return events;
}
/**
* The fundamental linear interpolation workhorse code. Process
* a list of `Event`'s and return a new list. Does a pass for
* every `fieldSpec`.
*
* This is abstracted out like this because we probably want
* to interpolate a list of `Event`'s not tied to a `Collection`.
* A Pipeline result list, etc etc.
*
*/
interpolateEventList(events: Array<Event<T>>): Array<Event<T>> {
let prevValue;
let prevTime;
// new array of interpolated events for each field path
const newEvents: Array<Event<T>> = [];
const fieldPath = util.fieldAsArray(this._fieldSpec[0]);
// setup done, loop through the events
for (let i = 0; i < events.length; i++) {
const e = events[i];
// Can't interpolate first or last event so just save it
// as is and move on.
if (i === 0) {
prevValue = e.get(fieldPath);
prevTime = e.timestamp().getTime();
newEvents.push(e);
continue;
}
if (i === events.length - 1) {
newEvents.push(e);
continue;
}
// Detect non-numeric value
if (!util.isMissing(e.get(fieldPath)) && !_.isNumber(e.get(fieldPath))) {
// tslint:disable-next-line
console.warn(`linear requires numeric values - skipping this field_spec`);
return events;
}
// Found a missing value so start calculating.
if (util.isMissing(e.get(fieldPath))) {
// Find the next valid value in the original events
let ii = i + 1;
let nextValue = null;
let nextTime = null;
while (_.isNull(nextValue) && ii < events.length) {
const val = events[ii].get(fieldPath);
if (!util.isMissing(val)) {
nextValue = val;
// exits loop
nextTime = events[ii].timestamp().getTime();
}
ii++;
}
// Interpolate a new value to fill
if (!_.isNull(prevValue) && !_.isNull(nextValue)) {
const currentTime = e.timestamp().getTime();
if (nextTime === prevTime) {
// If times are the same, just avg
const newValue = (prevValue + nextValue) / 2;
const d = e.getData().setIn(fieldPath, newValue);
newEvents.push(e.setData(d));
} else {
const f = (currentTime - prevTime) / (nextTime - prevTime);
const newValue = prevValue + f * (nextValue - prevValue);
const d = e.getData().setIn(fieldPath, newValue);
newEvents.push(e.setData(d));
}
} else {
newEvents.push(e);
}
} else {
newEvents.push(e);
}
}
return newEvents;
}
/**
* Perform the fill operation on the `Event` and return filled
* in events
*/
addEvent(event: Event<T>): Immutable.List<Event<T>> {
const eventList = new Array<Event<T>>();
const d = event.getData();
if (this._method === FillMethod.Zero || this._method === FillMethod.Pad) {
const dd = this.constFill(d);
const e = event.setData(dd);
eventList.push(e);
this._previous = e;
} else if (this._method === FillMethod.Linear) {
this.linearFill(event).forEach(e => {
eventList.push(e);
});
}
return Immutable.List(eventList);
}
} | the_stack |
import { HttpResponse } from "@azure-rest/core-client";
import { ErrorModelOutput } from "./outputModels";
/** Get true Boolean value on path */
export interface PathsGetBooleanTrue200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get true Boolean value on path */
export interface PathsGetBooleanTruedefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get false Boolean value on path */
export interface PathsGetBooleanFalse200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get false Boolean value on path */
export interface PathsGetBooleanFalsedefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get '1000000' integer value */
export interface PathsGetIntOneMillion200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get '1000000' integer value */
export interface PathsGetIntOneMilliondefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get '-1000000' integer value */
export interface PathsGetIntNegativeOneMillion200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get '-1000000' integer value */
export interface PathsGetIntNegativeOneMilliondefaultResponse
extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get '10000000000' 64 bit integer value */
export interface PathsGetTenBillion200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get '10000000000' 64 bit integer value */
export interface PathsGetTenBilliondefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get '-10000000000' 64 bit integer value */
export interface PathsGetNegativeTenBillion200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get '-10000000000' 64 bit integer value */
export interface PathsGetNegativeTenBilliondefaultResponse
extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get '1.034E+20' numeric value */
export interface PathsFloatScientificPositive200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get '1.034E+20' numeric value */
export interface PathsFloatScientificPositivedefaultResponse
extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get '-1.034E-20' numeric value */
export interface PathsFloatScientificNegative200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get '-1.034E-20' numeric value */
export interface PathsFloatScientificNegativedefaultResponse
extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get '9999999.999' numeric value */
export interface PathsDoubleDecimalPositive200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get '9999999.999' numeric value */
export interface PathsDoubleDecimalPositivedefaultResponse
extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get '-9999999.999' numeric value */
export interface PathsDoubleDecimalNegative200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get '-9999999.999' numeric value */
export interface PathsDoubleDecimalNegativedefaultResponse
extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get '啊齄丂狛狜隣郎隣兀﨩' multi-byte string value */
export interface PathsStringUnicode200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get '啊齄丂狛狜隣郎隣兀﨩' multi-byte string value */
export interface PathsStringUnicodedefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get 'begin!*'();:@ &=+$,/?#[]end */
export interface PathsStringUrlEncoded200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get 'begin!*'();:@ &=+$,/?#[]end */
export interface PathsStringUrlEncodeddefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** https://tools.ietf.org/html/rfc3986#appendix-A 'path' accept any 'pchar' not encoded */
export interface PathsStringUrlNonEncoded200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** https://tools.ietf.org/html/rfc3986#appendix-A 'path' accept any 'pchar' not encoded */
export interface PathsStringUrlNonEncodeddefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get '' */
export interface PathsStringEmpty200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get '' */
export interface PathsStringEmptydefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get null (should throw) */
export interface PathsStringNull400Response extends HttpResponse {
status: "400";
body: Record<string, unknown>;
}
/** Get null (should throw) */
export interface PathsStringNulldefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get using uri with 'green color' in path parameter */
export interface PathsEnumValid200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get using uri with 'green color' in path parameter */
export interface PathsEnumValiddefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get null (should throw on the client before the request is sent on wire) */
export interface PathsEnumNull400Response extends HttpResponse {
status: "400";
body: Record<string, unknown>;
}
/** Get null (should throw on the client before the request is sent on wire) */
export interface PathsEnumNulldefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get '啊齄丂狛狜隣郎隣兀﨩' multibyte value as utf-8 encoded byte array */
export interface PathsByteMultiByte200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get '啊齄丂狛狜隣郎隣兀﨩' multibyte value as utf-8 encoded byte array */
export interface PathsByteMultiBytedefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get '' as byte array */
export interface PathsByteEmpty200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get '' as byte array */
export interface PathsByteEmptydefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get null as byte array (should throw) */
export interface PathsByteNull400Response extends HttpResponse {
status: "400";
body: Record<string, unknown>;
}
/** Get null as byte array (should throw) */
export interface PathsByteNulldefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get '2012-01-01' as date */
export interface PathsDateValid200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get '2012-01-01' as date */
export interface PathsDateValiddefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get null as date - this should throw or be unusable on the client side, depending on date representation */
export interface PathsDateNull400Response extends HttpResponse {
status: "400";
body: Record<string, unknown>;
}
/** Get null as date - this should throw or be unusable on the client side, depending on date representation */
export interface PathsDateNulldefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get '2012-01-01T01:01:01Z' as date-time */
export interface PathsDateTimeValid200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get '2012-01-01T01:01:01Z' as date-time */
export interface PathsDateTimeValiddefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get null as date-time, should be disallowed or throw depending on representation of date-time */
export interface PathsDateTimeNull400Response extends HttpResponse {
status: "400";
body: Record<string, unknown>;
}
/** Get null as date-time, should be disallowed or throw depending on representation of date-time */
export interface PathsDateTimeNulldefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get 'lorem' encoded value as 'bG9yZW0' (base64url) */
export interface PathsBase64Url200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get 'lorem' encoded value as 'bG9yZW0' (base64url) */
export interface PathsBase64UrldefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get an array of string ['ArrayPath1', 'begin!*'();:@ &=+$,/?#[]end' , null, ''] using the csv-array format */
export interface PathsArrayCsvInPath200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get an array of string ['ArrayPath1', 'begin!*'();:@ &=+$,/?#[]end' , null, ''] using the csv-array format */
export interface PathsArrayCsvInPathdefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get the date 2016-04-13 encoded value as '1460505600' (Unix time) */
export interface PathsUnixTimeUrl200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get the date 2016-04-13 encoded value as '1460505600' (Unix time) */
export interface PathsUnixTimeUrldefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get true Boolean value on path */
export interface QueriesGetBooleanTrue200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get true Boolean value on path */
export interface QueriesGetBooleanTruedefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get false Boolean value on path */
export interface QueriesGetBooleanFalse200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get false Boolean value on path */
export interface QueriesGetBooleanFalsedefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get null Boolean value on query (query string should be absent) */
export interface QueriesGetBooleanNull200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get null Boolean value on query (query string should be absent) */
export interface QueriesGetBooleanNulldefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get '1000000' integer value */
export interface QueriesGetIntOneMillion200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get '1000000' integer value */
export interface QueriesGetIntOneMilliondefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get '-1000000' integer value */
export interface QueriesGetIntNegativeOneMillion200Response
extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get '-1000000' integer value */
export interface QueriesGetIntNegativeOneMilliondefaultResponse
extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get null integer value (no query parameter) */
export interface QueriesGetIntNull200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get null integer value (no query parameter) */
export interface QueriesGetIntNulldefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get '10000000000' 64 bit integer value */
export interface QueriesGetTenBillion200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get '10000000000' 64 bit integer value */
export interface QueriesGetTenBilliondefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get '-10000000000' 64 bit integer value */
export interface QueriesGetNegativeTenBillion200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get '-10000000000' 64 bit integer value */
export interface QueriesGetNegativeTenBilliondefaultResponse
extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get 'null 64 bit integer value (no query param in uri) */
export interface QueriesGetLongNull200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get 'null 64 bit integer value (no query param in uri) */
export interface QueriesGetLongNulldefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get '1.034E+20' numeric value */
export interface QueriesFloatScientificPositive200Response
extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get '1.034E+20' numeric value */
export interface QueriesFloatScientificPositivedefaultResponse
extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get '-1.034E-20' numeric value */
export interface QueriesFloatScientificNegative200Response
extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get '-1.034E-20' numeric value */
export interface QueriesFloatScientificNegativedefaultResponse
extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get null numeric value (no query parameter) */
export interface QueriesFloatNull200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get null numeric value (no query parameter) */
export interface QueriesFloatNulldefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get '9999999.999' numeric value */
export interface QueriesDoubleDecimalPositive200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get '9999999.999' numeric value */
export interface QueriesDoubleDecimalPositivedefaultResponse
extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get '-9999999.999' numeric value */
export interface QueriesDoubleDecimalNegative200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get '-9999999.999' numeric value */
export interface QueriesDoubleDecimalNegativedefaultResponse
extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get null numeric value (no query parameter) */
export interface QueriesDoubleNull200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get null numeric value (no query parameter) */
export interface QueriesDoubleNulldefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get '啊齄丂狛狜隣郎隣兀﨩' multi-byte string value */
export interface QueriesStringUnicode200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get '啊齄丂狛狜隣郎隣兀﨩' multi-byte string value */
export interface QueriesStringUnicodedefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get 'begin!*'();:@ &=+$,/?#[]end */
export interface QueriesStringUrlEncoded200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get 'begin!*'();:@ &=+$,/?#[]end */
export interface QueriesStringUrlEncodeddefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get '' */
export interface QueriesStringEmpty200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get '' */
export interface QueriesStringEmptydefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get null (no query parameter in url) */
export interface QueriesStringNull200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get null (no query parameter in url) */
export interface QueriesStringNulldefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get using uri with query parameter 'green color' */
export interface QueriesEnumValid200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get using uri with query parameter 'green color' */
export interface QueriesEnumValiddefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get null (no query parameter in url) */
export interface QueriesEnumNull200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get null (no query parameter in url) */
export interface QueriesEnumNulldefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get '啊齄丂狛狜隣郎隣兀﨩' multibyte value as utf-8 encoded byte array */
export interface QueriesByteMultiByte200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get '啊齄丂狛狜隣郎隣兀﨩' multibyte value as utf-8 encoded byte array */
export interface QueriesByteMultiBytedefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get '' as byte array */
export interface QueriesByteEmpty200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get '' as byte array */
export interface QueriesByteEmptydefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get null as byte array (no query parameters in uri) */
export interface QueriesByteNull200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get null as byte array (no query parameters in uri) */
export interface QueriesByteNulldefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get '2012-01-01' as date */
export interface QueriesDateValid200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get '2012-01-01' as date */
export interface QueriesDateValiddefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get null as date - this should result in no query parameters in uri */
export interface QueriesDateNull200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get null as date - this should result in no query parameters in uri */
export interface QueriesDateNulldefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get '2012-01-01T01:01:01Z' as date-time */
export interface QueriesDateTimeValid200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get '2012-01-01T01:01:01Z' as date-time */
export interface QueriesDateTimeValiddefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get null as date-time, should result in no query parameters in uri */
export interface QueriesDateTimeNull200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get null as date-time, should result in no query parameters in uri */
export interface QueriesDateTimeNulldefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get an array of string ['ArrayQuery1', 'begin!*'();:@ &=+$,/?#[]end' , null, ''] using the csv-array format */
export interface QueriesArrayStringCsvValid200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get an array of string ['ArrayQuery1', 'begin!*'();:@ &=+$,/?#[]end' , null, ''] using the csv-array format */
export interface QueriesArrayStringCsvValiddefaultResponse
extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get a null array of string using the csv-array format */
export interface QueriesArrayStringCsvNull200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get a null array of string using the csv-array format */
export interface QueriesArrayStringCsvNulldefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get an empty array [] of string using the csv-array format */
export interface QueriesArrayStringCsvEmpty200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get an empty array [] of string using the csv-array format */
export interface QueriesArrayStringCsvEmptydefaultResponse
extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Array query has no defined collection format, should default to csv. Pass in ['hello', 'nihao', 'bonjour'] for the 'arrayQuery' parameter to the service */
export interface QueriesArrayStringNoCollectionFormatEmpty200Response
extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Array query has no defined collection format, should default to csv. Pass in ['hello', 'nihao', 'bonjour'] for the 'arrayQuery' parameter to the service */
export interface QueriesArrayStringNoCollectionFormatEmptydefaultResponse
extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get an array of string ['ArrayQuery1', 'begin!*'();:@ &=+$,/?#[]end' , null, ''] using the ssv-array format */
export interface QueriesArrayStringSsvValid200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get an array of string ['ArrayQuery1', 'begin!*'();:@ &=+$,/?#[]end' , null, ''] using the ssv-array format */
export interface QueriesArrayStringSsvValiddefaultResponse
extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get an array of string ['ArrayQuery1', 'begin!*'();:@ &=+$,/?#[]end' , null, ''] using the tsv-array format */
export interface QueriesArrayStringTsvValid200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get an array of string ['ArrayQuery1', 'begin!*'();:@ &=+$,/?#[]end' , null, ''] using the tsv-array format */
export interface QueriesArrayStringTsvValiddefaultResponse
extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** Get an array of string ['ArrayQuery1', 'begin!*'();:@ &=+$,/?#[]end' , null, ''] using the pipes-array format */
export interface QueriesArrayStringPipesValid200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** Get an array of string ['ArrayQuery1', 'begin!*'();:@ &=+$,/?#[]end' , null, ''] using the pipes-array format */
export interface QueriesArrayStringPipesValiddefaultResponse
extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** send globalStringPath='globalStringPath', pathItemStringPath='pathItemStringPath', localStringPath='localStringPath', globalStringQuery='globalStringQuery', pathItemStringQuery='pathItemStringQuery', localStringQuery='localStringQuery' */
export interface PathItemsGetAllWithValues200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** send globalStringPath='globalStringPath', pathItemStringPath='pathItemStringPath', localStringPath='localStringPath', globalStringQuery='globalStringQuery', pathItemStringQuery='pathItemStringQuery', localStringQuery='localStringQuery' */
export interface PathItemsGetAllWithValuesdefaultResponse extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** send globalStringPath='globalStringPath', pathItemStringPath='pathItemStringPath', localStringPath='localStringPath', globalStringQuery=null, pathItemStringQuery='pathItemStringQuery', localStringQuery='localStringQuery' */
export interface PathItemsGetGlobalQueryNull200Response extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** send globalStringPath='globalStringPath', pathItemStringPath='pathItemStringPath', localStringPath='localStringPath', globalStringQuery=null, pathItemStringQuery='pathItemStringQuery', localStringQuery='localStringQuery' */
export interface PathItemsGetGlobalQueryNulldefaultResponse
extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** send globalStringPath=globalStringPath, pathItemStringPath='pathItemStringPath', localStringPath='localStringPath', globalStringQuery=null, pathItemStringQuery='pathItemStringQuery', localStringQuery=null */
export interface PathItemsGetGlobalAndLocalQueryNull200Response
extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** send globalStringPath=globalStringPath, pathItemStringPath='pathItemStringPath', localStringPath='localStringPath', globalStringQuery=null, pathItemStringQuery='pathItemStringQuery', localStringQuery=null */
export interface PathItemsGetGlobalAndLocalQueryNulldefaultResponse
extends HttpResponse {
status: "500";
body: ErrorModelOutput;
}
/** send globalStringPath='globalStringPath', pathItemStringPath='pathItemStringPath', localStringPath='localStringPath', globalStringQuery='globalStringQuery', pathItemStringQuery=null, localStringQuery=null */
export interface PathItemsGetLocalPathItemQueryNull200Response
extends HttpResponse {
status: "200";
body: Record<string, unknown>;
}
/** send globalStringPath='globalStringPath', pathItemStringPath='pathItemStringPath', localStringPath='localStringPath', globalStringQuery='globalStringQuery', pathItemStringQuery=null, localStringQuery=null */
export interface PathItemsGetLocalPathItemQueryNulldefaultResponse
extends HttpResponse {
status: "500";
body: ErrorModelOutput;
} | the_stack |
import { BaseResource, CloudError, AzureServiceClientOptions } from "@azure/ms-rest-azure-js";
import * as msRest from "@azure/ms-rest-js";
export { BaseResource, CloudError };
/**
* Payload for Add Users operation on a Lab.
*/
export interface AddUsersPayload {
/**
* List of user emails addresses to add to the lab.
*/
emailAddresses: string[];
}
/**
* Creation parameters for Reference Vm
*/
export interface ReferenceVmCreationParameters {
/**
* The username of the virtual machine
*/
userName: string;
/**
* The password of the virtual machine.
*/
password: string;
}
/**
* Represents resource specific settings
*/
export interface ResourceSettingCreationParameters {
/**
* The location where the virtual machine will live
*/
location?: string;
/**
* The name of the resource setting
*/
name?: string;
/**
* The resource id of the gallery image used for creating the virtual machine
*/
galleryImageResourceId: string;
/**
* The size of the virtual machine. Possible values include: 'Basic', 'Standard', 'Performance'
*/
size?: ManagedLabVmSize;
/**
* Creation parameters for Reference Vm
*/
referenceVmCreationParameters: ReferenceVmCreationParameters;
}
/**
* Settings related to creating an environment setting
*/
export interface EnvironmentSettingCreationParameters {
/**
* The resource specific settings
*/
resourceSettingCreationParameters: ResourceSettingCreationParameters;
}
/**
* Settings related to creating a lab
*/
export interface LabCreationParameters {
/**
* Maximum number of users allowed in the lab.
*/
maxUsersInLab?: number;
}
/**
* Properties for creating a managed lab and a default environment setting
*/
export interface CreateLabProperties {
/**
* Settings related to creating an environment setting
*/
environmentSettingCreationParameters?: EnvironmentSettingCreationParameters;
/**
* Settings related to creating a lab
*/
labCreationParameters: LabCreationParameters;
/**
* The name of the resource
*/
name: string;
/**
* The location of the resource
*/
location?: string;
/**
* The tags of the resource.
*/
tags?: { [propertyName: string]: string };
}
/**
* Represents a VM and the setting Id it was created for.
*/
export interface ResourceSet {
/**
* VM resource Id for the environment
*/
vmResourceId?: string;
/**
* resourceSettingId for the environment
*/
resourceSettingId?: string;
}
/**
* Network details of the environment
*/
export interface NetworkInterface {
/**
* PrivateIp address of the Compute VM
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly privateIpAddress?: string;
/**
* Connection information for Linux
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly sshAuthority?: string;
/**
* Connection information for Windows
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly rdpAuthority?: string;
/**
* Username of the VM
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly username?: string;
}
/**
* Details of the status of an operation.
*/
export interface LatestOperationResult {
/**
* The current status of the operation.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly status?: string;
/**
* Error code on failure.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly errorCode?: string;
/**
* The error message.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly errorMessage?: string;
/**
* Request URI of the operation.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly requestUri?: string;
/**
* The HttpMethod - PUT/POST/DELETE for the operation.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly httpMethod?: string;
/**
* The URL to use to check long-running operation status
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly operationUrl?: string;
}
/**
* An Azure resource.
*/
export interface Resource extends BaseResource {
/**
* The identifier of the resource.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly id?: string;
/**
* The name of the resource.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly name?: string;
/**
* The type of the resource.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly type?: string;
/**
* The location of the resource.
*/
location?: string;
/**
* The tags of the resource.
*/
tags?: { [propertyName: string]: string };
}
/**
* Represents an environment instance
*/
export interface Environment extends Resource {
/**
* The set of a VM and the setting id it was created for
*/
resourceSets?: ResourceSet;
/**
* The AAD object Id of the user who has claimed the environment
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly claimedByUserObjectId?: string;
/**
* The user principal Id of the user who has claimed the environment
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly claimedByUserPrincipalId?: string;
/**
* The name or email address of the user who has claimed the environment
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly claimedByUserName?: string;
/**
* Is the environment claimed or not
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly isClaimed?: boolean;
/**
* Last known power state of the environment
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly lastKnownPowerState?: string;
/**
* Network details of the environment
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly networkInterface?: NetworkInterface;
/**
* How long the environment has been used by a lab user
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly totalUsage?: string;
/**
* When the password was last reset on the environment.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly passwordLastReset?: Date;
/**
* The provisioning status of the resource.
*/
provisioningState?: string;
/**
* The unique immutable identifier of a resource (Guid).
*/
uniqueIdentifier?: string;
/**
* The details of the latest operation. ex: status, error
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly latestOperationResult?: LatestOperationResult;
}
/**
* Details of the backing virtual machine.
*/
export interface VirtualMachineDetails {
/**
* Provisioning state of the Dtl VM
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly provisioningState?: string;
/**
* Connection information for Windows
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly rdpAuthority?: string;
/**
* Connection information for Linux
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly sshAuthority?: string;
/**
* PrivateIp address of the compute VM
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly privateIpAddress?: string;
/**
* Compute VM login user name
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly userName?: string;
/**
* Last known compute power state captured in DTL
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly lastKnownPowerState?: string;
}
/**
* This represents the details about a User's environment and its state.
*/
export interface EnvironmentDetails {
/**
* Name of the Environment
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly name?: string;
/**
* Description of the Environment
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly description?: string;
/**
* Resource Id of the environment
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly id?: string;
/**
* The provisioning state of the environment. This also includes LabIsFull and NotYetProvisioned
* status.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly provisioningState?: string;
/**
* Details of backing DTL virtual machine with compute and network details.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly virtualMachineDetails?: VirtualMachineDetails;
/**
* The details of the latest operation. ex: status, error
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly latestOperationResult?: LatestOperationResult;
/**
* Publishing state of the environment setting Possible values are Creating, Created, Failed
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly environmentState?: string;
/**
* How long the environment has been used by a lab user
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly totalUsage?: string;
/**
* When the password was last reset on the environment.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly passwordLastReset?: Date;
}
/**
* Represents a VM and the setting Id it was created for.
*/
export interface ResourceSetFragment {
/**
* VM resource Id for the environment
*/
vmResourceId?: string;
/**
* resourceSettingId for the environment
*/
resourceSettingId?: string;
}
/**
* Represents an environment instance
*/
export interface EnvironmentFragment extends Resource {
/**
* The set of a VM and the setting id it was created for
*/
resourceSets?: ResourceSetFragment;
/**
* The provisioning status of the resource.
*/
provisioningState?: string;
/**
* The unique immutable identifier of a resource (Guid).
*/
uniqueIdentifier?: string;
}
/**
* Represents payload for any Environment operations like get, start, stop, connect
*/
export interface EnvironmentOperationsPayload {
/**
* The resourceId of the environment
*/
environmentId: string;
}
/**
* Details about the state of the reference virtual machine.
*/
export interface VmStateDetails {
/**
* The RdpAuthority property is a server DNS host name or IP address followed by the service port
* number for RDP (Remote Desktop Protocol).
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly rdpAuthority?: string;
/**
* The SshAuthority property is a server DNS host name or IP address followed by the service port
* number for SSH.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly sshAuthority?: string;
/**
* The power state of the reference virtual machine.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly powerState?: string;
/**
* Last known compute power state captured in DTL
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly lastKnownPowerState?: string;
}
/**
* Details of a Reference Vm
*/
export interface ReferenceVm {
/**
* The username of the virtual machine
*/
userName: string;
/**
* The password of the virtual machine. This will be set to null in GET resource API
*/
password?: string;
/**
* The state details for the reference virtual machine.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly vmStateDetails?: VmStateDetails;
/**
* VM resource Id for the environment
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly vmResourceId?: string;
}
/**
* Represents resource specific settings
*/
export interface ResourceSettings {
/**
* The unique id of the resource setting
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly id?: string;
/**
* The resource id of the gallery image used for creating the virtual machine
*/
galleryImageResourceId?: string;
/**
* The name of the image used to created the environment setting
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly imageName?: string;
/**
* The size of the virtual machine. Possible values include: 'Basic', 'Standard', 'Performance'
*/
size?: ManagedLabVmSize;
/**
* The translated compute cores of the virtual machine
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly cores?: number;
/**
* Details specific to Reference Vm
*/
referenceVm: ReferenceVm;
}
/**
* Represents settings of an environment, from which environment instances would be created
*/
export interface EnvironmentSetting extends Resource {
/**
* Describes the readiness of this environment setting. Possible values include: 'Draft',
* 'Publishing', 'Published', 'PublishFailed', 'Scaling'
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly publishingState?: PublishingState;
/**
* Describes the user's progress in configuring their environment setting. Possible values
* include: 'NotApplicable', 'Completed'
*/
configurationState?: ConfigurationState;
/**
* Describes the environment and its resource settings
*/
description?: string;
/**
* Brief title describing the environment and its resource settings
*/
title?: string;
/**
* The resource specific settings
*/
resourceSettings: ResourceSettings;
/**
* Time when the template VM was last changed.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly lastChanged?: Date;
/**
* Time when the template VM was last sent for publishing.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly lastPublished?: Date;
/**
* The provisioning status of the resource.
*/
provisioningState?: string;
/**
* The unique immutable identifier of a resource (Guid).
*/
uniqueIdentifier?: string;
/**
* The details of the latest operation. ex: status, error
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly latestOperationResult?: LatestOperationResult;
}
/**
* Details of a Reference Vm
*/
export interface ReferenceVmFragment {
/**
* The username of the virtual machine
*/
userName?: string;
/**
* The password of the virtual machine. This will be set to null in GET resource API
*/
password?: string;
}
/**
* Represents resource specific settings
*/
export interface ResourceSettingsFragment {
/**
* The resource id of the gallery image used for creating the virtual machine
*/
galleryImageResourceId?: string;
/**
* The size of the virtual machine. Possible values include: 'Basic', 'Standard', 'Performance'
*/
size?: ManagedLabVmSize;
/**
* Details specific to Reference Vm
*/
referenceVm?: ReferenceVmFragment;
}
/**
* Represents settings of an environment, from which environment instances would be created
*/
export interface EnvironmentSettingFragment extends Resource {
/**
* Describes the user's progress in configuring their environment setting. Possible values
* include: 'NotApplicable', 'Completed'
*/
configurationState?: ConfigurationState;
/**
* Describes the environment and its resource settings
*/
description?: string;
/**
* Brief title describing the environment and its resource settings
*/
title?: string;
/**
* The resource specific settings
*/
resourceSettings?: ResourceSettingsFragment;
/**
* The provisioning status of the resource.
*/
provisioningState?: string;
/**
* The unique immutable identifier of a resource (Guid).
*/
uniqueIdentifier?: string;
}
/**
* Contains detailed information about a size
*/
export interface SizeInfo {
/**
* Represents the actual compute size, e.g. Standard_A2_v2.
*/
computeSize?: string;
/**
* The pay-as-you-go price per hour this size will cost. It does not include discounts and may
* not reflect the actual price the size will cost.
*/
price?: number;
/**
* The number of cores a VM of this size has.
*/
numberOfCores?: number;
/**
* The amount of memory available (in GB).
*/
memory?: number;
}
/**
* Represents a size category supported by this Lab Account (small, medium or large)
*/
export interface EnvironmentSize {
/**
* The size category. Possible values include: 'Basic', 'Standard', 'Performance'
*/
name?: ManagedLabVmSize;
/**
* Represents a set of compute sizes that can serve this given size type
*/
vmSizes?: SizeInfo[];
/**
* The pay-as-you-go dollar price per hour this size will cost. It does not include discounts and
* may not reflect the actual price the size will cost. This is the maximum price of all prices
* within this tier.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly maxPrice?: number;
/**
* The number of cores a VM of this size has. This is the minimum number of cores within this
* tier.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly minNumberOfCores?: number;
/**
* The amount of memory available (in GB). This is the minimum amount of memory within this tier.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly minMemory?: number;
}
/**
* Contains detailed information about a size
*/
export interface SizeInfoFragment {
/**
* Represents the actual compute size, e.g. Standard_A2_v2.
*/
computeSize?: string;
/**
* The pay-as-you-go price per hour this size will cost. It does not include discounts and may
* not reflect the actual price the size will cost.
*/
price?: number;
/**
* The number of cores a VM of this size has.
*/
numberOfCores?: number;
/**
* The amount of memory available (in GB).
*/
memory?: number;
}
/**
* Represents a size category supported by this Lab Account (small, medium or large)
*/
export interface EnvironmentSizeFragment {
/**
* The size category. Possible values include: 'Basic', 'Standard', 'Performance'
*/
name?: ManagedLabVmSize;
/**
* Represents a set of compute sizes that can serve this given size type
*/
vmSizes?: SizeInfoFragment[];
}
/**
* The reference information for an Azure Marketplace image.
*/
export interface GalleryImageReference {
/**
* The offer of the gallery image.
*/
offer?: string;
/**
* The publisher of the gallery image.
*/
publisher?: string;
/**
* The SKU of the gallery image.
*/
sku?: string;
/**
* The OS type of the gallery image.
*/
osType?: string;
/**
* The version of the gallery image.
*/
version?: string;
}
/**
* Represents an image from the Azure Marketplace
*/
export interface GalleryImage extends Resource {
/**
* The author of the gallery image.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly author?: string;
/**
* The creation date of the gallery image.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly createdDate?: Date;
/**
* The description of the gallery image.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly description?: string;
/**
* The image reference of the gallery image.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly imageReference?: GalleryImageReference;
/**
* The icon of the gallery image.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly icon?: string;
/**
* Indicates whether this gallery image is enabled.
*/
isEnabled?: boolean;
/**
* Indicates whether this gallery has been overridden for this lab account
*/
isOverride?: boolean;
/**
* The third party plan that applies to this image
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly planId?: string;
/**
* Indicates if the plan has been authorized for programmatic deployment.
*/
isPlanAuthorized?: boolean;
/**
* The provisioning status of the resource.
*/
provisioningState?: string;
/**
* The unique immutable identifier of a resource (Guid).
*/
uniqueIdentifier?: string;
/**
* The details of the latest operation. ex: status, error
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly latestOperationResult?: LatestOperationResult;
}
/**
* Represents an image from the Azure Marketplace
*/
export interface GalleryImageFragment extends Resource {
/**
* Indicates whether this gallery image is enabled.
*/
isEnabled?: boolean;
/**
* Indicates whether this gallery has been overridden for this lab account
*/
isOverride?: boolean;
/**
* Indicates if the plan has been authorized for programmatic deployment.
*/
isPlanAuthorized?: boolean;
/**
* The provisioning status of the resource.
*/
provisioningState?: string;
/**
* The unique immutable identifier of a resource (Guid).
*/
uniqueIdentifier?: string;
}
/**
* The reference information for an Azure Marketplace image.
*/
export interface GalleryImageReferenceFragment {
/**
* The offer of the gallery image.
*/
offer?: string;
/**
* The publisher of the gallery image.
*/
publisher?: string;
/**
* The SKU of the gallery image.
*/
sku?: string;
/**
* The OS type of the gallery image.
*/
osType?: string;
/**
* The version of the gallery image.
*/
version?: string;
}
/**
* Represents the environments details
*/
export interface GetEnvironmentResponse {
/**
* Details of the environment
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly environment?: EnvironmentDetails;
}
/**
* Represents the PersonalPreferences for the user
*/
export interface GetPersonalPreferencesResponse {
/**
* Id to be used by the cache orchestrator
*/
id?: string;
/**
* Array of favorite lab resource ids
*/
favoriteLabResourceIds?: string[];
}
/**
* Represents the size information
*/
export interface SizeAvailability {
/**
* The category of the size (Basic, Standard, Performance). Possible values include: 'Basic',
* 'Standard', 'Performance'
*/
sizeCategory?: ManagedLabVmSize;
/**
* Whether or not this size category is available
*/
isAvailable?: boolean;
}
/**
* The availability information of sizes across regions
*/
export interface RegionalAvailability {
/**
* Corresponding region
*/
region?: string;
/**
* List of all the size information for the region
*/
sizeAvailabilities?: SizeAvailability[];
}
/**
* The response model from the GetRegionalAvailability action
*/
export interface GetRegionalAvailabilityResponse {
/**
* Availability information for different size categories per region
*/
regionalAvailability?: RegionalAvailability[];
}
/**
* Represents a lab.
*/
export interface Lab extends Resource {
/**
* Maximum number of users allowed in the lab.
*/
maxUsersInLab?: number;
/**
* Maximum value MaxUsersInLab can be set to, as specified by the service
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly userQuota?: number;
/**
* Invitation code that users can use to join a lab.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly invitationCode?: string;
/**
* Object id of the user that created the lab.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly createdByObjectId?: string;
/**
* Maximum duration a user can use an environment for in the lab.
*/
usageQuota?: string;
/**
* Lab user access mode (open to all vs. restricted to those listed on the lab). Possible values
* include: 'Restricted', 'Open'
*/
userAccessMode?: LabUserAccessMode;
/**
* Lab creator name
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly createdByUserPrincipalName?: string;
/**
* Creation date for the lab
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly createdDate?: Date;
/**
* The provisioning status of the resource.
*/
provisioningState?: string;
/**
* The unique immutable identifier of a resource (Guid).
*/
uniqueIdentifier?: string;
/**
* The details of the latest operation. ex: status, error
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly latestOperationResult?: LatestOperationResult;
}
/**
* Represents the size configuration under the lab account
*/
export interface SizeConfigurationProperties {
/**
* Represents a list of size categories supported by this Lab Account (Small, Medium, Large)
*/
environmentSizes?: EnvironmentSize[];
}
/**
* Represents a lab account.
*/
export interface LabAccount extends Resource {
/**
* Represents the size configuration under the lab account
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly sizeConfiguration?: SizeConfigurationProperties;
/**
* Represents if region selection is enabled
*/
enabledRegionSelection?: boolean;
/**
* The provisioning status of the resource.
*/
provisioningState?: string;
/**
* The unique immutable identifier of a resource (Guid).
*/
uniqueIdentifier?: string;
/**
* The details of the latest operation. ex: status, error
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly latestOperationResult?: LatestOperationResult;
}
/**
* Represents a lab account.
*/
export interface LabAccountFragment extends Resource {
/**
* Represents if region selection is enabled
*/
enabledRegionSelection?: boolean;
/**
* The provisioning status of the resource.
*/
provisioningState?: string;
/**
* The unique immutable identifier of a resource (Guid).
*/
uniqueIdentifier?: string;
}
/**
* This represents the details about a lab that the User is in, and its state.
*/
export interface LabDetails {
/**
* Name of the lab
*/
name?: string;
/**
* The provisioning state of the lab.
*/
provisioningState?: string;
/**
* The Id of the lab.
*/
id?: string;
/**
* The maximum duration a user can use a VM in this lab.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly usageQuota?: string;
}
/**
* Represents a lab.
*/
export interface LabFragment extends Resource {
/**
* Maximum number of users allowed in the lab.
*/
maxUsersInLab?: number;
/**
* Maximum duration a user can use an environment for in the lab.
*/
usageQuota?: string;
/**
* Lab user access mode (open to all vs. restricted to those listed on the lab). Possible values
* include: 'Restricted', 'Open'
*/
userAccessMode?: LabUserAccessMode;
/**
* The provisioning status of the resource.
*/
provisioningState?: string;
/**
* The unique immutable identifier of a resource (Guid).
*/
uniqueIdentifier?: string;
}
/**
* Represents the payload to list environments owned by a user
*/
export interface ListEnvironmentsPayload {
/**
* The resource Id of the lab
*/
labId?: string;
}
/**
* Represents the list of environments owned by a user
*/
export interface ListEnvironmentsResponse {
/**
* List of all the environments
*/
environments?: EnvironmentDetails[];
}
/**
* Lists the labs owned by a user
*/
export interface ListLabsResponse {
/**
* List of all the labs
*/
labs?: LabDetails[];
}
/**
* Payload to get the status of an operation
*/
export interface OperationBatchStatusPayload {
/**
* The operation url of long running operation
*/
urls: string[];
}
/**
* Represents the status of an operation that used the batch API.
*/
export interface OperationBatchStatusResponseItem {
/**
* status of the long running operation for an environment
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly operationUrl?: string;
/**
* status of the long running operation for an environment
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly status?: string;
}
/**
* Status Details of the long running operation for an environment
*/
export interface OperationBatchStatusResponse {
/**
* Gets a collection of items that contain the operation url and status.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly items?: OperationBatchStatusResponseItem[];
}
/**
* Error details for the operation in case of a failure.
*/
export interface OperationError {
/**
* The error code of the operation error.
*/
code?: string;
/**
* The error message of the operation error.
*/
message?: string;
}
/**
* The object that describes the operations
*/
export interface OperationMetadataDisplay {
/**
* Friendly name of the resource provider
*/
provider?: string;
/**
* Resource type on which the operation is performed.
*/
resource?: string;
/**
* Operation type: read, write, delete, listKeys/action, etc.
*/
operation?: string;
/**
* Friendly name of the operation
*/
description?: string;
}
/**
* The REST API operation supported by ManagedLab ResourceProvider.
*/
export interface OperationMetadata {
/**
* Operation name: {provider}/{resource}/{operation}
*/
name?: string;
/**
* The object that describes the operations
*/
display?: OperationMetadataDisplay;
}
/**
* An Operation Result
*/
export interface OperationResult {
/**
* The operation status.
*/
status?: string;
/**
* Error details for the operation in case of a failure.
*/
error?: OperationError;
}
/**
* Payload to get the status of an operation
*/
export interface OperationStatusPayload {
/**
* The operation url of long running operation
*/
operationUrl: string;
}
/**
* Status Details of the long running operation for an environment
*/
export interface OperationStatusResponse {
/**
* status of the long running operation for an environment
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly status?: string;
}
/**
* Represents payload for any Environment operations like get, start, stop, connect
*/
export interface PersonalPreferencesOperationsPayload {
/**
* Resource Id of the lab account
*/
labAccountResourceId?: string;
/**
* Enum indicating if user is adding or removing a favorite lab. Possible values include: 'Add',
* 'Remove'
*/
addRemove?: AddRemove;
/**
* Resource Id of the lab to add/remove from the favorites list
*/
labResourceId?: string;
}
/**
* Payload for Publish operation on EnvironmentSetting.
*/
export interface PublishPayload {
/**
* Whether to use existing VM custom image when publishing.
*/
useExistingImage?: boolean;
}
/**
* Represents payload for Register action.
*/
export interface RegisterPayload {
/**
* The registration code of the lab.
*/
registrationCode?: string;
}
/**
* Represents the payload for resetting passwords.
*/
export interface ResetPasswordPayload {
/**
* The resourceId of the environment
*/
environmentId: string;
/**
* The username for which the password will be reset.
*/
username?: string;
/**
* The password to assign to the user specified in
*/
password?: string;
}
/**
* Represents the size configuration under the lab account
*/
export interface SizeConfigurationPropertiesFragment {
/**
* Represents a list of size categories supported by this Lab Account (Small, Medium, Large)
*/
environmentSizes?: EnvironmentSizeFragment[];
}
/**
* The User registered to a lab
*/
export interface User extends Resource {
/**
* The user email address, as it was specified during registration.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly email?: string;
/**
* The user family name, as it was specified during registration.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly familyName?: string;
/**
* The user given name, as it was specified during registration.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly givenName?: string;
/**
* The user tenant ID, as it was specified during registration.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly tenantId?: string;
/**
* How long the user has used his VMs in this lab
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly totalUsage?: string;
/**
* The provisioning status of the resource.
*/
provisioningState?: string;
/**
* The unique immutable identifier of a resource (Guid).
*/
uniqueIdentifier?: string;
/**
* The details of the latest operation. ex: status, error
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly latestOperationResult?: LatestOperationResult;
}
/**
* The User registered to a lab
*/
export interface UserFragment extends Resource {
/**
* The provisioning status of the resource.
*/
provisioningState?: string;
/**
* The unique immutable identifier of a resource (Guid).
*/
uniqueIdentifier?: string;
}
/**
* Optional Parameters.
*/
export interface GlobalUsersGetEnvironmentOptionalParams extends msRest.RequestOptionsBase {
/**
* Specify the $expand query. Example: 'properties($expand=environment)'
*/
expand?: string;
}
/**
* Optional Parameters.
*/
export interface LabAccountsListBySubscriptionOptionalParams extends msRest.RequestOptionsBase {
/**
* Specify the $expand query. Example: 'properties($expand=sizeConfiguration)'
*/
expand?: string;
/**
* The filter to apply to the operation.
*/
filter?: string;
/**
* The maximum number of resources to return from the operation.
*/
top?: number;
/**
* The ordering expression for the results, using OData notation.
*/
orderby?: string;
}
/**
* Optional Parameters.
*/
export interface LabAccountsListByResourceGroupOptionalParams extends msRest.RequestOptionsBase {
/**
* Specify the $expand query. Example: 'properties($expand=sizeConfiguration)'
*/
expand?: string;
/**
* The filter to apply to the operation.
*/
filter?: string;
/**
* The maximum number of resources to return from the operation.
*/
top?: number;
/**
* The ordering expression for the results, using OData notation.
*/
orderby?: string;
}
/**
* Optional Parameters.
*/
export interface LabAccountsGetOptionalParams extends msRest.RequestOptionsBase {
/**
* Specify the $expand query. Example: 'properties($expand=sizeConfiguration)'
*/
expand?: string;
}
/**
* Optional Parameters.
*/
export interface GalleryImagesListOptionalParams extends msRest.RequestOptionsBase {
/**
* Specify the $expand query. Example: 'properties($select=author)'
*/
expand?: string;
/**
* The filter to apply to the operation.
*/
filter?: string;
/**
* The maximum number of resources to return from the operation.
*/
top?: number;
/**
* The ordering expression for the results, using OData notation.
*/
orderby?: string;
}
/**
* Optional Parameters.
*/
export interface GalleryImagesGetOptionalParams extends msRest.RequestOptionsBase {
/**
* Specify the $expand query. Example: 'properties($select=author)'
*/
expand?: string;
}
/**
* Optional Parameters.
*/
export interface LabsListOptionalParams extends msRest.RequestOptionsBase {
/**
* Specify the $expand query. Example: 'properties($select=maxUsersInLab)'
*/
expand?: string;
/**
* The filter to apply to the operation.
*/
filter?: string;
/**
* The maximum number of resources to return from the operation.
*/
top?: number;
/**
* The ordering expression for the results, using OData notation.
*/
orderby?: string;
}
/**
* Optional Parameters.
*/
export interface LabsGetOptionalParams extends msRest.RequestOptionsBase {
/**
* Specify the $expand query. Example: 'properties($select=maxUsersInLab)'
*/
expand?: string;
}
/**
* Optional Parameters.
*/
export interface EnvironmentSettingsListOptionalParams extends msRest.RequestOptionsBase {
/**
* Specify the $expand query. Example: 'properties($select=publishingState)'
*/
expand?: string;
/**
* The filter to apply to the operation.
*/
filter?: string;
/**
* The maximum number of resources to return from the operation.
*/
top?: number;
/**
* The ordering expression for the results, using OData notation.
*/
orderby?: string;
}
/**
* Optional Parameters.
*/
export interface EnvironmentSettingsGetOptionalParams extends msRest.RequestOptionsBase {
/**
* Specify the $expand query. Example: 'properties($select=publishingState)'
*/
expand?: string;
}
/**
* Optional Parameters.
*/
export interface EnvironmentsListOptionalParams extends msRest.RequestOptionsBase {
/**
* Specify the $expand query. Example: 'properties($expand=networkInterface)'
*/
expand?: string;
/**
* The filter to apply to the operation.
*/
filter?: string;
/**
* The maximum number of resources to return from the operation.
*/
top?: number;
/**
* The ordering expression for the results, using OData notation.
*/
orderby?: string;
}
/**
* Optional Parameters.
*/
export interface EnvironmentsGetOptionalParams extends msRest.RequestOptionsBase {
/**
* Specify the $expand query. Example: 'properties($expand=networkInterface)'
*/
expand?: string;
}
/**
* Optional Parameters.
*/
export interface UsersListOptionalParams extends msRest.RequestOptionsBase {
/**
* Specify the $expand query. Example: 'properties($select=email)'
*/
expand?: string;
/**
* The filter to apply to the operation.
*/
filter?: string;
/**
* The maximum number of resources to return from the operation.
*/
top?: number;
/**
* The ordering expression for the results, using OData notation.
*/
orderby?: string;
}
/**
* Optional Parameters.
*/
export interface UsersGetOptionalParams extends msRest.RequestOptionsBase {
/**
* Specify the $expand query. Example: 'properties($select=email)'
*/
expand?: string;
}
/**
* An interface representing ManagedLabsClientOptions.
*/
export interface ManagedLabsClientOptions extends AzureServiceClientOptions {
baseUri?: string;
}
/**
* @interface
* Result of the request to list REST API operations
* @extends Array<OperationMetadata>
*/
export interface ProviderOperationResult extends Array<OperationMetadata> {
/**
* URL to get the next set of operation list results if there are any.
* **NOTE: This property will not be serialized. It can only be populated by the server.**
*/
readonly nextLink?: string;
}
/**
* @interface
* The response of a list operation.
* @extends Array<LabAccount>
*/
export interface ResponseWithContinuationLabAccount extends Array<LabAccount> {
/**
* Link for next set of results.
*/
nextLink?: string;
}
/**
* @interface
* The response of a list operation.
* @extends Array<GalleryImage>
*/
export interface ResponseWithContinuationGalleryImage extends Array<GalleryImage> {
/**
* Link for next set of results.
*/
nextLink?: string;
}
/**
* @interface
* The response of a list operation.
* @extends Array<Lab>
*/
export interface ResponseWithContinuationLab extends Array<Lab> {
/**
* Link for next set of results.
*/
nextLink?: string;
}
/**
* @interface
* The response of a list operation.
* @extends Array<EnvironmentSetting>
*/
export interface ResponseWithContinuationEnvironmentSetting extends Array<EnvironmentSetting> {
/**
* Link for next set of results.
*/
nextLink?: string;
}
/**
* @interface
* The response of a list operation.
* @extends Array<Environment>
*/
export interface ResponseWithContinuationEnvironment extends Array<Environment> {
/**
* Link for next set of results.
*/
nextLink?: string;
}
/**
* @interface
* The response of a list operation.
* @extends Array<User>
*/
export interface ResponseWithContinuationUser extends Array<User> {
/**
* Link for next set of results.
*/
nextLink?: string;
}
/**
* Defines values for ManagedLabVmSize.
* Possible values include: 'Basic', 'Standard', 'Performance'
* @readonly
* @enum {string}
*/
export type ManagedLabVmSize = 'Basic' | 'Standard' | 'Performance';
/**
* Defines values for PublishingState.
* Possible values include: 'Draft', 'Publishing', 'Published', 'PublishFailed', 'Scaling'
* @readonly
* @enum {string}
*/
export type PublishingState = 'Draft' | 'Publishing' | 'Published' | 'PublishFailed' | 'Scaling';
/**
* Defines values for ConfigurationState.
* Possible values include: 'NotApplicable', 'Completed'
* @readonly
* @enum {string}
*/
export type ConfigurationState = 'NotApplicable' | 'Completed';
/**
* Defines values for LabUserAccessMode.
* Possible values include: 'Restricted', 'Open'
* @readonly
* @enum {string}
*/
export type LabUserAccessMode = 'Restricted' | 'Open';
/**
* Defines values for AddRemove.
* Possible values include: 'Add', 'Remove'
* @readonly
* @enum {string}
*/
export type AddRemove = 'Add' | 'Remove';
/**
* Contains response data for the list operation.
*/
export type ProviderOperationsListResponse = ProviderOperationResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: ProviderOperationResult;
};
};
/**
* Contains response data for the listNext operation.
*/
export type ProviderOperationsListNextResponse = ProviderOperationResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: ProviderOperationResult;
};
};
/**
* Contains response data for the getEnvironment operation.
*/
export type GlobalUsersGetEnvironmentResponse = GetEnvironmentResponse & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: GetEnvironmentResponse;
};
};
/**
* Contains response data for the getOperationBatchStatus operation.
*/
export type GlobalUsersGetOperationBatchStatusResponse = OperationBatchStatusResponse & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: OperationBatchStatusResponse;
};
};
/**
* Contains response data for the getOperationStatus operation.
*/
export type GlobalUsersGetOperationStatusResponse = OperationStatusResponse & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: OperationStatusResponse;
};
};
/**
* Contains response data for the getPersonalPreferences operation.
*/
export type GlobalUsersGetPersonalPreferencesResponse = GetPersonalPreferencesResponse & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: GetPersonalPreferencesResponse;
};
};
/**
* Contains response data for the listEnvironments operation.
*/
export type GlobalUsersListEnvironmentsResponse = ListEnvironmentsResponse & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: ListEnvironmentsResponse;
};
};
/**
* Contains response data for the listLabs operation.
*/
export type GlobalUsersListLabsResponse = ListLabsResponse & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: ListLabsResponse;
};
};
/**
* Contains response data for the listBySubscription operation.
*/
export type LabAccountsListBySubscriptionResponse = ResponseWithContinuationLabAccount & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: ResponseWithContinuationLabAccount;
};
};
/**
* Contains response data for the listByResourceGroup operation.
*/
export type LabAccountsListByResourceGroupResponse = ResponseWithContinuationLabAccount & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: ResponseWithContinuationLabAccount;
};
};
/**
* Contains response data for the get operation.
*/
export type LabAccountsGetResponse = LabAccount & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: LabAccount;
};
};
/**
* Contains response data for the createOrUpdate operation.
*/
export type LabAccountsCreateOrUpdateResponse = LabAccount & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: LabAccount;
};
};
/**
* Contains response data for the update operation.
*/
export type LabAccountsUpdateResponse = LabAccount & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: LabAccount;
};
};
/**
* Contains response data for the getRegionalAvailability operation.
*/
export type LabAccountsGetRegionalAvailabilityResponse = GetRegionalAvailabilityResponse & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: GetRegionalAvailabilityResponse;
};
};
/**
* Contains response data for the listBySubscriptionNext operation.
*/
export type LabAccountsListBySubscriptionNextResponse = ResponseWithContinuationLabAccount & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: ResponseWithContinuationLabAccount;
};
};
/**
* Contains response data for the listByResourceGroupNext operation.
*/
export type LabAccountsListByResourceGroupNextResponse = ResponseWithContinuationLabAccount & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: ResponseWithContinuationLabAccount;
};
};
/**
* Contains response data for the get operation.
*/
export type OperationsGetResponse = OperationResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: OperationResult;
};
};
/**
* Contains response data for the list operation.
*/
export type GalleryImagesListResponse = ResponseWithContinuationGalleryImage & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: ResponseWithContinuationGalleryImage;
};
};
/**
* Contains response data for the get operation.
*/
export type GalleryImagesGetResponse = GalleryImage & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: GalleryImage;
};
};
/**
* Contains response data for the createOrUpdate operation.
*/
export type GalleryImagesCreateOrUpdateResponse = GalleryImage & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: GalleryImage;
};
};
/**
* Contains response data for the update operation.
*/
export type GalleryImagesUpdateResponse = GalleryImage & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: GalleryImage;
};
};
/**
* Contains response data for the listNext operation.
*/
export type GalleryImagesListNextResponse = ResponseWithContinuationGalleryImage & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: ResponseWithContinuationGalleryImage;
};
};
/**
* Contains response data for the list operation.
*/
export type LabsListResponse = ResponseWithContinuationLab & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: ResponseWithContinuationLab;
};
};
/**
* Contains response data for the get operation.
*/
export type LabsGetResponse = Lab & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: Lab;
};
};
/**
* Contains response data for the createOrUpdate operation.
*/
export type LabsCreateOrUpdateResponse = Lab & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: Lab;
};
};
/**
* Contains response data for the update operation.
*/
export type LabsUpdateResponse = Lab & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: Lab;
};
};
/**
* Contains response data for the listNext operation.
*/
export type LabsListNextResponse = ResponseWithContinuationLab & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: ResponseWithContinuationLab;
};
};
/**
* Contains response data for the list operation.
*/
export type EnvironmentSettingsListResponse = ResponseWithContinuationEnvironmentSetting & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: ResponseWithContinuationEnvironmentSetting;
};
};
/**
* Contains response data for the get operation.
*/
export type EnvironmentSettingsGetResponse = EnvironmentSetting & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: EnvironmentSetting;
};
};
/**
* Contains response data for the createOrUpdate operation.
*/
export type EnvironmentSettingsCreateOrUpdateResponse = EnvironmentSetting & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: EnvironmentSetting;
};
};
/**
* Contains response data for the update operation.
*/
export type EnvironmentSettingsUpdateResponse = EnvironmentSetting & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: EnvironmentSetting;
};
};
/**
* Contains response data for the beginCreateOrUpdate operation.
*/
export type EnvironmentSettingsBeginCreateOrUpdateResponse = EnvironmentSetting & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: EnvironmentSetting;
};
};
/**
* Contains response data for the listNext operation.
*/
export type EnvironmentSettingsListNextResponse = ResponseWithContinuationEnvironmentSetting & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: ResponseWithContinuationEnvironmentSetting;
};
};
/**
* Contains response data for the list operation.
*/
export type EnvironmentsListResponse = ResponseWithContinuationEnvironment & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: ResponseWithContinuationEnvironment;
};
};
/**
* Contains response data for the get operation.
*/
export type EnvironmentsGetResponse = Environment & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: Environment;
};
};
/**
* Contains response data for the createOrUpdate operation.
*/
export type EnvironmentsCreateOrUpdateResponse = Environment & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: Environment;
};
};
/**
* Contains response data for the update operation.
*/
export type EnvironmentsUpdateResponse = Environment & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: Environment;
};
};
/**
* Contains response data for the listNext operation.
*/
export type EnvironmentsListNextResponse = ResponseWithContinuationEnvironment & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: ResponseWithContinuationEnvironment;
};
};
/**
* Contains response data for the list operation.
*/
export type UsersListResponse = ResponseWithContinuationUser & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: ResponseWithContinuationUser;
};
};
/**
* Contains response data for the get operation.
*/
export type UsersGetResponse = User & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: User;
};
};
/**
* Contains response data for the createOrUpdate operation.
*/
export type UsersCreateOrUpdateResponse = User & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: User;
};
};
/**
* Contains response data for the update operation.
*/
export type UsersUpdateResponse = User & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: User;
};
};
/**
* Contains response data for the listNext operation.
*/
export type UsersListNextResponse = ResponseWithContinuationUser & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: ResponseWithContinuationUser;
};
}; | the_stack |
import {HttpClient} from "@angular/common/http";
import {Compiler, Component, Inject, Injector, Input, NgModuleFactory, NgModuleFactoryLoader, OnChanges, OnDestroy, OnInit, SimpleChanges, Type} from "@angular/core";
import {FormGroup} from "@angular/forms";
import {Observable} from "rxjs/Observable";
import {ArrayObservable} from "rxjs/observable/ArrayObservable";
import {empty} from "rxjs/observable/empty";
import {fromPromise} from "rxjs/observable/fromPromise";
import {catchError} from "rxjs/operators/catchError";
import {concatMap} from "rxjs/operators/concatMap";
import {filter} from "rxjs/operators/filter";
import {find} from "rxjs/operators/find";
import {map} from "rxjs/operators/map";
import {single} from "rxjs/operators/single";
import {Subscription} from "rxjs/Subscription";
import {EmptyError} from "rxjs/util/EmptyError";
import {ProcessorRef} from "../../../../../../lib";
import {UiComponentsService} from "../../../../services/UiComponentsService";
import {ProcessorTemplate} from "./processor-template";
declare const SystemJS: any;
enum State {
ERROR = "ERROR",
FORM = "FORM",
LOADING = "LOADING",
TEMPLATE = "TEMPLATE",
EMPTY = "EMPTY"
}
@Component({
selector: "feed-details-processor-field",
templateUrl: "./feed-details-processor-field.component.html"
})
export class FeedDetailsProcessorFieldComponent implements OnInit, OnChanges, OnDestroy {
@Input()
processor: ProcessorRef;
@Input()
readonly: boolean;
childInjector: Injector;
childModule: NgModuleFactory<any>;
childType: Type<any>;
error: string;
/**
* Map of the forms associated for each processor
* @type {{}}
*/
forms : {[key:string]: FormGroup} = {};
state = State.LOADING;
statusSubscription: Subscription;
private isSystemJsSetup = false;
constructor(private http: HttpClient, private injector: Injector, private moduleFactoryLoader: NgModuleFactoryLoader,
@Inject("UiComponentsService") private uiComponentsService: UiComponentsService, private _compiler: Compiler) {
}
ngOnInit(){
if(this.processor == undefined){
this.state = State.EMPTY;
}
}
ngOnDestroy(): void {
if (this.statusSubscription != null) {
this.statusSubscription.unsubscribe();
}
}
/**
* Ensure the current processor is in the form map
* @private
*/
private _ensureProcessorForm(){
if(this.forms[this.processor.id] == undefined){
this.forms[this.processor.id] = new FormGroup({});
}
}
/**
* Get the form for the current processor
* @return {}
*/
getProcessorForm(){
this._ensureProcessorForm();
return this.forms[this.processor.id];
}
ngOnChanges(changes: SimpleChanges): void {
if (changes.processor || (changes.readonly && changes.readonly.currentValue == false)) {
// Unsubscribe from form status changes
if (this.statusSubscription != null) {
this.statusSubscription.unsubscribe();
}
let processor = changes.processor != undefined ? changes.processor.currentValue : this.processor;
let previousProcessorValue = changes.processor != undefined ? changes.processor.previousValue: undefined;
if(processor != undefined) {
if ((changes.processor && changes.processor.currentValue) || (changes.readonly && changes.readonly.currentValue == false)) {
// Ensure form state matches readonly state
this.statusSubscription = this.processor.form.statusChanges.subscribe(status => {
if (this.readonly === true && status !== "DISABLED") {
this.processor.form.disable();
}
});
// Fetch template and update state
this.getProcessorTemplate().pipe(
single(),
catchError(err => {
if (err instanceof EmptyError) {
this.state = State.FORM;
processor.control = this.getProcessorForm();
return empty();
} else {
throw err;
}
})
).subscribe(null, (err: any) => {
console.error(err);
this.error = err;
this.state = State.ERROR;
});
} else {
this.childType = null;
this.state = State.LOADING;
if (previousProcessorValue) {
previousProcessorValue.control = null;
}
}
}
}
// Ensure form state matches readonly state
if (changes.readonly && this.processor != null) {
if (this.readonly) {
this.processor.form.disable();
this.getProcessorForm().disable();
} else {
this.processor.form.enable();
this.getProcessorForm().enable();
}
}
}
private getProcessorTemplate(): Observable<void> {
return fromPromise(this.uiComponentsService.getProcessorTemplates()).pipe(
concatMap((templates: ProcessorTemplate[]) => ArrayObservable.create(templates)),
find(template => {
if (template.module != null && template.module != "" && (template.processorDisplayName == null || template.processorDisplayName === this.processor.name)) {
const match = template.processorTypes.find(type => type === this.processor.type);
return typeof match !== "undefined";
} else {
return false;
}
}),
filter(template => typeof template !== "undefined"),
concatMap(template => {
// const template = args[0];
// const kyloModule = args[1];
if (!this.isSystemJsSetup) {
this.setupSystemJs();
this.isSystemJsSetup = true;
}
const split = template.module.split('#');
const module = split[0];
const exportName = split[1];
return SystemJS.import("js/" + module)
.then((module: any) => module[exportName])
.then((type: any) => this.checkNotEmpty(type, module, exportName))
.then((type: any) => this._compiler.compileModuleAsync(type));
// .then((x: any) => [x, kyloModule]);
}),
concatMap(template => SystemJS.import("@kylo/feed").then(kyloModule => [template, kyloModule])),
map((imports: any) => {
const moduleFactory = imports[0];
const kyloModule = imports[1];
// Find processor control
const module = moduleFactory.create(this.injector);
const processorControl = module.injector.get(kyloModule.ProcessorControl as any).find((control:any) => control.supportsProcessorType(this.processor.type));
if (typeof processorControl === "undefined" || processorControl == null) {
throw new Error("Missing ProcessorControl provider for processor type: " + this.processor.type);
}
// Load component and update state
this.childInjector = Injector.create([{provide: kyloModule.ProcessorRef, useValue: this.processor}], module.injector);
this.childModule = moduleFactory;
this.childType = processorControl.component;
this.state = State.TEMPLATE;
return null;
})
);
}
private checkNotEmpty(value: any, modulePath: string, exportName: string): any {
if (!value) {
throw new Error(`Cannot find '${exportName}' in '${modulePath}'`);
}
return value;
}
private setupSystemJs() {
SystemJS.config({
defaultJSExtensions: true,
});
SystemJS.registerDynamic('angular', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('../../../../../../bower_components/angular/angular.min');
});
SystemJS.registerDynamic('@angular/core', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('@angular/core');
});
SystemJS.registerDynamic('@angular/material/dialog', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('@angular/material/dialog');
});
SystemJS.registerDynamic('@angular/material', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('@angular/material');
});
SystemJS.registerDynamic('@angular/material/toolbar', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('@angular/material/toolbar');
});
SystemJS.registerDynamic('@angular/material/divider', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('@angular/material/divider');
});
SystemJS.registerDynamic('@angular/material/checkbox', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('@angular/material/checkbox');
});
SystemJS.registerDynamic('@angular/material/core', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('@angular/material/core');
});
SystemJS.registerDynamic('@angular/material/form-field', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('@angular/material/form-field');
});
SystemJS.registerDynamic('@angular/material/card', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('@angular/material/card');
});
SystemJS.registerDynamic('@angular/material/list', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('@angular/material/list');
});
SystemJS.registerDynamic('@angular/material/tabs', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('@angular/material/tabs');
});
SystemJS.registerDynamic('@angular/common/http', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('@angular/common/http');
});
SystemJS.registerDynamic('@angular/forms', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('@angular/forms');
});
SystemJS.registerDynamic('@angular/material/autocomplete', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('@angular/material/autocomplete');
});
SystemJS.registerDynamic('rxjs/operators', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('../../../../../../node_modules/rxjs/operators');
});
SystemJS.registerDynamic('rxjs/Observable', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('../../../../../../node_modules/rxjs/Observable');
});
SystemJS.registerDynamic('rxjs/Subscription', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('../../../../../../node_modules/rxjs/Subscription');
});
SystemJS.registerDynamic('rxjs/add/operator/do', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('../../../../../../node_modules/rxjs/add/operator/do');
});
SystemJS.registerDynamic('rxjs/add/operator/debounceTime', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('../../../../../../node_modules/rxjs/add/operator/debounceTime');
});
SystemJS.registerDynamic('rxjs/add/observable/merge', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('../../../../../../node_modules/rxjs/add/observable/merge');
});
SystemJS.registerDynamic('rxjs/observable/of', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('../../../../../../node_modules/rxjs/observable/of');
});
SystemJS.registerDynamic('rxjs/operators/catchError', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('../../../../../../node_modules/rxjs/operators/catchError')
});
SystemJS.registerDynamic('rxjs/operators/debounceTime', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('../../../../../../node_modules/rxjs/operators/debounceTime')
});
SystemJS.registerDynamic('rxjs/operators/distinctUntilChanged', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('../../../../../../node_modules/rxjs/operators/distinctUntilChanged')
});
SystemJS.registerDynamic('rxjs/operators/filter', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('../../../../../../node_modules/rxjs/operators/filter')
});
SystemJS.registerDynamic('rxjs/operators/map', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('../../../../../../node_modules/rxjs/operators/map')
});
SystemJS.registerDynamic('rxjs/operators/skip', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('../../../../../../node_modules/rxjs/operators/skip')
});
SystemJS.registerDynamic('rxjs/operators/switchMap', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('../../../../../../node_modules/rxjs/operators/switchMap')
});
SystemJS.registerDynamic('underscore', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('../../../../../../bower_components/underscore/underscore-min')
});
SystemJS.registerDynamic('@angular/common', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('@angular/common');
});
SystemJS.registerDynamic('@angular/flex-layout', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('@angular/flex-layout');
});
SystemJS.registerDynamic('@angular/material/button', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('@angular/material/button');
});
SystemJS.registerDynamic('@angular/material/icon', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('@angular/material/icon');
});
SystemJS.registerDynamic('@angular/material/input', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('@angular/material/input');
});
SystemJS.registerDynamic('@angular/material/progress-spinner', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('@angular/material/progress-spinner');
});
SystemJS.registerDynamic('@angular/material/radio', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('@angular/material/radio');
});
SystemJS.registerDynamic('@angular/material/select', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('@angular/material/select');
});
SystemJS.registerDynamic('@covalent/core/common', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('@covalent/core/common');
});
SystemJS.registerDynamic('@ngx-translate/core', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('@ngx-translate/core');
});
SystemJS.registerDynamic('@covalent/core/chips', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('@covalent/core/chips');
});
SystemJS.registerDynamic('@covalent/core/dialogs', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('@covalent/core/dialogs');
});
SystemJS.registerDynamic('@kylo/feed', [], true, function(_require: any, _exports: any, _module: any) {
_module.exports = require('../../../../../../../../../../target/classes/static/lib/bundles/kylo-feed.umd.min.js');
});
}
} | the_stack |
import * as spec from '@jsii/spec';
import { Assembly } from '@jsii/spec';
import * as fs from 'fs';
import * as path from 'path';
import * as ts from 'typescript';
import { ProjectInfo } from '../project-info';
import { symbolIdentifier } from '../symbol-id';
export const WARNINGSCODE_FILE_NAME = '.warnings.jsii.js';
const WARNING_FUNCTION_NAME = 'print';
const PARAMETER_NAME = 'p';
const NAMESPACE = 'jsiiDeprecationWarnings';
const LOCAL_ENUM_NAMESPACE = 'ns';
const VISITED_OBJECTS_SET_NAME = 'visitedObjects';
const DEPRECATION_ERROR = 'DeprecationError';
const GET_PROPERTY_DESCRIPTOR = 'getPropertyDescriptor';
export class DeprecationWarningsInjector {
private transformers: ts.CustomTransformers = {
before: [],
};
public constructor(private readonly typeChecker: ts.TypeChecker) {}
public process(assembly: Assembly, projectInfo: ProjectInfo) {
const projectRoot = projectInfo.projectRoot;
const functionDeclarations: ts.FunctionDeclaration[] = [];
const types = assembly.types ?? {};
for (const type of Object.values(types)) {
const statements: ts.Statement[] = [];
let isEmpty = true;
// This will add the parameter to the set of visited objects, to prevent infinite recursion
statements.push(
ts.createExpressionStatement(
ts.createCall(
ts.createPropertyAccess(
ts.createIdentifier(VISITED_OBJECTS_SET_NAME),
'add',
),
undefined,
[ts.createIdentifier(PARAMETER_NAME)],
),
),
);
const tryStatements = [];
if (spec.isDeprecated(type) && spec.isEnumType(type)) {
// The type is deprecated
tryStatements.push(
createWarningFunctionCall(type.fqn, type.docs?.deprecated),
);
isEmpty = false;
}
if (spec.isEnumType(type) && type.locationInModule?.filename) {
tryStatements.push(
createEnumRequireStatement(type.locationInModule?.filename),
);
tryStatements.push(createDuplicateEnumValuesCheck(type));
for (const member of Object.values(type.members ?? [])) {
if (spec.isDeprecated(member)) {
// The enum member is deprecated
const condition = ts.createBinary(
ts.createIdentifier(PARAMETER_NAME),
ts.SyntaxKind.EqualsEqualsEqualsToken,
ts.createPropertyAccess(
ts.createPropertyAccess(
ts.createIdentifier(LOCAL_ENUM_NAMESPACE),
type.name,
),
member.name,
),
);
tryStatements.push(
createWarningFunctionCall(
`${type.fqn}#${member.name}`,
member.docs?.deprecated,
condition,
),
);
isEmpty = false;
}
}
} else if (spec.isInterfaceType(type) && type.datatype) {
const { statementsByProp, excludedProps } = processInterfaceType(
type,
types,
assembly,
projectInfo,
undefined,
undefined,
);
for (const [name, statement] of statementsByProp.entries()) {
if (!excludedProps.has(name)) {
tryStatements.push(statement);
isEmpty = false;
}
}
}
statements.push(
ts.createTry(
ts.createBlock(tryStatements),
undefined,
ts.createBlock([
ts.createExpressionStatement(
ts.createCall(
ts.createPropertyAccess(
ts.createIdentifier(VISITED_OBJECTS_SET_NAME),
'delete',
),
undefined,
[ts.createIdentifier(PARAMETER_NAME)],
),
),
]),
),
);
const paramValue = ts.createParameter(
undefined,
undefined,
undefined,
PARAMETER_NAME,
);
const functionName = fnName(type.fqn);
const functionDeclaration = ts.createFunctionDeclaration(
undefined,
undefined,
undefined,
functionName,
undefined,
[paramValue],
undefined,
createFunctionBlock(isEmpty ? [] : statements),
);
functionDeclarations.push(functionDeclaration);
}
this.transformers = {
before: [
(context) => {
const transformer = new Transformer(
this.typeChecker,
context,
projectRoot,
this.buildTypeIndex(assembly),
assembly,
);
return transformer.transform.bind(transformer);
},
],
};
generateWarningsFile(projectRoot, functionDeclarations);
}
public get customTransformers(): ts.CustomTransformers {
return this.transformers;
}
private buildTypeIndex(assembly: Assembly): Map<string, spec.Type> {
const result = new Map<string, spec.Type>();
for (const type of Object.values(assembly.types ?? {})) {
const symbolId = type.symbolId;
if (symbolId) {
result.set(symbolId, type);
}
}
return result;
}
}
function processInterfaceType(
type: spec.InterfaceType,
types: { [p: string]: spec.Type },
assembly: Assembly,
projectInfo: ProjectInfo,
statementsByProp: Map<string, ts.Statement> = new Map<string, ts.Statement>(),
excludedProps: Set<string> = new Set<string>(),
) {
for (const prop of Object.values(type.properties ?? {})) {
const fqn = `${type.fqn}#${prop.name}`;
if (spec.isDeprecated(prop) || spec.isDeprecated(type)) {
// If the property individually is deprecated, or the entire type is deprecated
const deprecatedDocs = prop.docs?.deprecated ?? type.docs?.deprecated;
const statement = createWarningFunctionCall(
fqn,
deprecatedDocs,
ts.createBinary(
ts.createStringLiteral(prop.name),
ts.SyntaxKind.InKeyword,
ts.createIdentifier(PARAMETER_NAME),
),
undefined,
);
statementsByProp.set(prop.name, statement);
} else {
/* If a prop is not deprecated, we don't want to generate a warning for it,
even if another property with the same name is deprecated in another
super-interface. */
excludedProps.add(prop.name);
}
if (
spec.isNamedTypeReference(prop.type) &&
Object.keys(types).includes(prop.type.fqn)
) {
const functionName = importedFunctionName(
prop.type.fqn,
assembly,
projectInfo,
);
if (functionName) {
const statement = createTypeHandlerCall(
functionName,
`${PARAMETER_NAME}.${prop.name}`,
);
statementsByProp.set(`${prop.name}_`, statement);
}
} else if (
spec.isCollectionTypeReference(prop.type) &&
spec.isNamedTypeReference(prop.type.collection.elementtype)
) {
const functionName = importedFunctionName(
prop.type.collection.elementtype.fqn,
assembly,
projectInfo,
);
if (functionName) {
const statement = createTypeHandlerCall(
functionName,
`${PARAMETER_NAME}.${prop.name}`,
);
statementsByProp.set(`${prop.name}_`, statement);
}
} else if (
spec.isUnionTypeReference(prop.type) &&
spec.isNamedTypeReference(prop.type.union.types[0]) &&
Object.keys(types).includes(prop.type.union.types[0].fqn)
) {
const functionName = importedFunctionName(
prop.type.union.types[0].fqn,
assembly,
projectInfo,
);
if (functionName) {
const statement = createTypeHandlerCall(
functionName,
`${PARAMETER_NAME}.${prop.name}`,
);
statementsByProp.set(`${prop.name}_`, statement);
}
}
}
// We also generate calls to all the supertypes
for (const interfaceName of type.interfaces ?? []) {
const assemblies = projectInfo.dependencyClosure.concat(assembly);
const superType = findType(interfaceName, assemblies);
if (superType.type) {
processInterfaceType(
superType.type as spec.InterfaceType,
types,
assembly,
projectInfo,
statementsByProp,
excludedProps,
);
}
}
return { statementsByProp, excludedProps };
}
function fnName(fqn: string): string {
return fqn.replace(/[^\w\d]/g, '_');
}
function createFunctionBlock(statements: ts.Statement[]): ts.Block {
if (statements.length > 0) {
const validation = ts.createIf(
ts.createBinary(
ts.createIdentifier(PARAMETER_NAME),
ts.SyntaxKind.EqualsEqualsToken,
ts.createNull(),
),
ts.createReturn(),
);
return ts.createBlock([validation, ...statements], true);
}
return ts.createBlock([], true);
}
function createWarningFunctionCall(
fqn: string,
message = '',
condition?: ts.Expression,
includeNamespace = false,
): ts.Statement {
const functionName = includeNamespace
? `${NAMESPACE}.${WARNING_FUNCTION_NAME}`
: WARNING_FUNCTION_NAME;
const mainStatement = ts.createExpressionStatement(
ts.createCall(ts.createIdentifier(functionName), undefined, [
ts.createLiteral(fqn),
ts.createLiteral(message),
]),
);
return condition ? ts.createIf(condition, mainStatement) : mainStatement;
}
function generateWarningsFile(
projectRoot: string,
functionDeclarations: ts.FunctionDeclaration[],
) {
const names = [...functionDeclarations]
.map((d) => d.name?.text)
.filter(Boolean);
const exportedSymbols = [
WARNING_FUNCTION_NAME,
GET_PROPERTY_DESCRIPTOR,
DEPRECATION_ERROR,
...names,
].join(',');
const functionText = `function ${WARNING_FUNCTION_NAME}(name, deprecationMessage) {
const deprecated = process.env.JSII_DEPRECATED;
const deprecationMode = ['warn', 'fail', 'quiet'].includes(deprecated) ? deprecated : 'warn';
const message = \`\${name} is deprecated.\\n \${deprecationMessage.trim()}\\n This API will be removed in the next major release.\`;
switch (deprecationMode) {
case "fail":
throw new ${DEPRECATION_ERROR}(message);
case "warn":
console.warn("[WARNING]", message);
break;
}
}
function ${GET_PROPERTY_DESCRIPTOR}(obj, prop) {
const descriptor = Object.getOwnPropertyDescriptor(obj, prop);
if (descriptor) {
return descriptor;
}
const proto = Object.getPrototypeOf(obj);
const prototypeDescriptor = proto && getPropertyDescriptor(proto, prop);
if (prototypeDescriptor) {
return prototypeDescriptor;
}
return {};
}
const ${VISITED_OBJECTS_SET_NAME} = new Set();
class ${DEPRECATION_ERROR} extends Error {
constructor(...args) {
super(...args);
Object.defineProperty(this, 'name', {
configurable: false,
enumerable: true,
value: '${DEPRECATION_ERROR}',
writable: false,
});
}
}
module.exports = {${exportedSymbols}}
`;
const printer = ts.createPrinter({ newLine: ts.NewLineKind.LineFeed });
const resultFile = ts.createSourceFile(
path.join(projectRoot, WARNINGSCODE_FILE_NAME),
functionText,
ts.ScriptTarget.Latest,
false,
ts.ScriptKind.JS,
);
const declarations = functionDeclarations.map((declaration) =>
printer.printNode(ts.EmitHint.Unspecified, declaration, resultFile),
);
const content = declarations.concat(printer.printFile(resultFile)).join('\n');
fs.writeFileSync(path.join(projectRoot, WARNINGSCODE_FILE_NAME), content);
}
class Transformer {
private warningCallsWereInjected = false;
public constructor(
private readonly typeChecker: ts.TypeChecker,
private readonly context: ts.TransformationContext,
private readonly projectRoot: string,
private readonly typeIndex: Map<string, spec.Type>,
private readonly assembly: Assembly,
) {}
public transform<T extends ts.Node>(node: T): T {
this.warningCallsWereInjected = false;
const result = this.visitEachChild(node);
if (ts.isSourceFile(result) && this.warningCallsWereInjected) {
const importDir = path.relative(
path.dirname(result.fileName),
this.projectRoot,
);
const importPath = importDir.startsWith('..')
? unixPath(path.join(importDir, WARNINGSCODE_FILE_NAME))
: `./${WARNINGSCODE_FILE_NAME}`;
return ts.updateSourceFileNode(result, [
createRequireStatement(NAMESPACE, importPath),
...result.statements,
]) as any;
}
return result;
}
private visitEachChild<T extends ts.Node>(node: T): T {
return ts.visitEachChild(node, this.visitor.bind(this), this.context);
}
private visitor<T extends ts.Node>(node: T): ts.VisitResult<T> {
if (ts.isMethodDeclaration(node) && node.body != null) {
const statements = this.getStatementsForDeclaration(node);
this.warningCallsWereInjected =
this.warningCallsWereInjected || statements.length > 0;
return ts.updateMethod(
node,
node.decorators,
node.modifiers,
node.asteriskToken,
node.name,
node.questionToken,
node.typeParameters,
node.parameters,
node.type,
ts.updateBlock(node.body, [
...wrapWithRethrow(
statements,
ts.createPropertyAccess(
ts.createThis(),
node.name.getText(node.getSourceFile()),
),
),
...node.body.statements,
]),
) as any;
} else if (ts.isGetAccessorDeclaration(node) && node.body != null) {
const statements = this.getStatementsForDeclaration(node);
this.warningCallsWereInjected =
this.warningCallsWereInjected || statements.length > 0;
return ts.updateGetAccessor(
node,
node.decorators,
node.modifiers,
node.name,
node.parameters,
node.type,
ts.updateBlock(node.body, [
...wrapWithRethrow(
statements,
ts.createPropertyAccess(
ts.createCall(
ts.createPropertyAccess(
ts.createIdentifier(NAMESPACE),
GET_PROPERTY_DESCRIPTOR,
),
undefined,
[
ts.createThis(),
ts.createLiteral(node.name.getText(node.getSourceFile())),
],
),
'get',
),
),
...node.body.statements,
]),
) as any;
} else if (ts.isSetAccessorDeclaration(node) && node.body != null) {
const statements = this.getStatementsForDeclaration(node);
this.warningCallsWereInjected =
this.warningCallsWereInjected || statements.length > 0;
return ts.updateSetAccessor(
node,
node.decorators,
node.modifiers,
node.name,
node.parameters,
ts.updateBlock(node.body, [
...wrapWithRethrow(
statements,
ts.createPropertyAccess(
ts.createCall(
ts.createPropertyAccess(
ts.createIdentifier(NAMESPACE),
GET_PROPERTY_DESCRIPTOR,
),
undefined,
[
ts.createThis(),
ts.createLiteral(node.name.getText(node.getSourceFile())),
],
),
'set',
),
),
...node.body.statements,
]),
) as any;
} else if (ts.isConstructorDeclaration(node) && node.body != null) {
const statements = this.getStatementsForDeclaration(node);
this.warningCallsWereInjected =
this.warningCallsWereInjected || statements.length > 0;
return ts.updateConstructor(
node,
node.decorators,
node.modifiers,
node.parameters,
ts.updateBlock(
node.body,
insertStatements(
node.body,
wrapWithRethrow(statements, node.parent.name!),
),
),
) as any;
}
return this.visitEachChild(node);
}
/**
* @param getOrSet for property accessors, determines which of the getter or
* setter should be used to get the caller function value.
*/
private getStatementsForDeclaration(
node:
| ts.MethodDeclaration
| ts.GetAccessorDeclaration
| ts.SetAccessorDeclaration
| ts.ConstructorDeclaration,
): ts.Statement[] {
const klass = node.parent;
const classSymbolId = symbolIdentifier(
this.typeChecker,
this.typeChecker.getTypeAtLocation(klass).symbol,
);
if (classSymbolId && this.typeIndex.has(classSymbolId)) {
const classType = this.typeIndex.get(classSymbolId)! as spec.ClassType;
if (ts.isConstructorDeclaration(node)) {
const initializer = classType?.initializer;
if (initializer) {
return this.getStatements(classType, initializer);
}
}
const methods = classType?.methods ?? [];
const method = methods.find(
(method) => method.name === node.name?.getText(),
);
if (method) {
return this.getStatements(classType, method);
}
const properties = classType?.properties ?? [];
const property = properties.find(
(property) => property.name === node.name?.getText(),
);
if (property) {
return createWarningStatementForElement(property, classType);
}
}
return [];
}
private getStatements(
classType: spec.ClassType,
method: spec.Method | spec.Initializer,
) {
const statements = createWarningStatementForElement(method, classType);
for (const parameter of Object.values(method.parameters ?? {})) {
const parameterType =
this.assembly.types && spec.isNamedTypeReference(parameter.type)
? this.assembly.types[parameter.type.fqn]
: undefined;
if (parameterType) {
const functionName = `${NAMESPACE}.${fnName(parameterType.fqn)}`;
statements.push(
ts.createExpressionStatement(
ts.createCall(ts.createIdentifier(functionName), undefined, [
ts.createIdentifier(parameter.name),
]),
),
);
}
}
return statements;
}
}
function createWarningStatementForElement(
element: spec.Callable | spec.Property,
classType: spec.ClassType,
): ts.Statement[] {
if (spec.isDeprecated(element)) {
const elementName = (element as spec.Method | spec.Property).name;
const fqn = elementName ? `${classType.fqn}#${elementName}` : classType.fqn;
const message = element.docs?.deprecated ?? classType.docs?.deprecated;
return [createWarningFunctionCall(fqn, message, undefined, true)];
}
return [];
}
/**
* Inserts a list of statements in the correct position inside a block of statements.
* If there is a `super` call, It inserts the statements just after it. Otherwise,
* insert the statements right at the beginning of the block.
*/
function insertStatements(block: ts.Block, newStatements: ts.Statement[]) {
function splicePoint(statement: ts.Statement | undefined) {
if (statement == null) {
return 0;
}
let isSuper = false;
statement.forEachChild((node) => {
if (
ts.isCallExpression(node) &&
node.expression.kind === ts.SyntaxKind.SuperKeyword
) {
isSuper = true;
}
});
return isSuper ? 1 : 0;
}
const result = [...block.statements];
result.splice(splicePoint(block.statements[0]), 0, ...newStatements);
return ts.createNodeArray(result);
}
function createEnumRequireStatement(typeLocation: string): ts.Statement {
const { ext } = path.parse(typeLocation);
const jsFileName = typeLocation.replace(ext, '.js');
return createRequireStatement(LOCAL_ENUM_NAMESPACE, `./${jsFileName}`);
}
function createRequireStatement(
name: string,
importPath: string,
): ts.Statement {
return ts.createVariableStatement(
undefined,
ts.createVariableDeclarationList(
[
ts.createVariableDeclaration(
name,
undefined,
ts.createCall(ts.createIdentifier('require'), undefined, [
ts.createLiteral(importPath),
]),
),
],
ts.NodeFlags.Const,
),
);
}
/**
* Returns a ready-to-used function name (including a `require`, if necessary)
*/
function importedFunctionName(
typeName: string,
assembly: Assembly,
projectInfo: ProjectInfo,
) {
const assemblies = projectInfo.dependencyClosure.concat(assembly);
const { type, moduleName } = findType(typeName, assemblies);
if (type) {
return moduleName !== assembly.name
? `require("${moduleName}/${WARNINGSCODE_FILE_NAME}").${fnName(type.fqn)}`
: fnName(type.fqn);
}
return undefined;
}
/**
* Find the type and module name in an array of assemblies
* matching a given type name
*/
function findType(typeName: string, assemblies: Assembly[]) {
for (const asm of assemblies) {
if (asm.metadata?.jsii?.compiledWithDeprecationWarnings) {
const types = asm.types ?? {};
for (const name of Object.keys(types)) {
if (typeName === name) {
return { type: types[name], moduleName: asm.name };
}
}
}
}
return {};
}
function createTypeHandlerCall(
functionName: string,
parameter: string,
): ts.Statement {
return ts.createIf(
ts.createPrefix(
ts.SyntaxKind.ExclamationToken,
ts.createCall(
ts.createPropertyAccess(
ts.createIdentifier(VISITED_OBJECTS_SET_NAME),
ts.createIdentifier('has'),
),
undefined,
[ts.createIdentifier(parameter)],
),
),
ts.createExpressionStatement(
ts.createCall(ts.createIdentifier(functionName), undefined, [
ts.createIdentifier(parameter),
]),
),
);
}
/**
* There is a chance an enum contains duplicates values with distinct keys,
* with one of those keys being deprecated. This is a potential pattern to "rename" an enum.
* In this case, we can't concretely determine if the deprecated member was used or not,
* so in those cases we skip the warnings altogether, rather than erroneously warning for valid usage.
* This create a statement to check if the enum value is a duplicate:
*
* if (Object.values(Foo).filter(x => x === p).length > 1) { return; }
*
* Note that we can't just check the assembly for these duplicates, due to:
* https://github.com/aws/jsii/issues/2782
*/
function createDuplicateEnumValuesCheck(
type: spec.TypeBase & spec.EnumType,
): ts.Statement {
return ts.createIf(
ts.createBinary(
ts.createPropertyAccess(
ts.createCall(
ts.createPropertyAccess(
ts.createCall(
ts.createPropertyAccess(ts.createIdentifier('Object'), 'values'),
undefined,
[
ts.createPropertyAccess(
ts.createIdentifier(LOCAL_ENUM_NAMESPACE),
type.name,
),
],
),
ts.createIdentifier('filter'),
),
undefined,
[
ts.createArrowFunction(
undefined,
undefined,
[ts.createParameter(undefined, undefined, undefined, 'x')],
undefined,
ts.createToken(ts.SyntaxKind.EqualsGreaterThanToken),
ts.createBinary(
ts.createIdentifier('x'),
ts.createToken(ts.SyntaxKind.EqualsEqualsEqualsToken),
ts.createIdentifier(PARAMETER_NAME),
),
),
],
),
ts.createIdentifier('length'),
),
ts.createToken(ts.SyntaxKind.GreaterThanToken),
ts.createNumericLiteral('1'),
),
ts.createReturn(),
);
}
// We try-then-rethrow exceptions to avoid runtimes displaying an uncanny wall of text if the place
// where the error was thrown is webpacked. For example, jest somehow manages to capture the throw
// location and renders the source line (which may be the whole file) when bundled.
function wrapWithRethrow(
statements: ts.Statement[],
caller: ts.Expression,
): ts.Statement[] {
if (statements.length === 0) {
return statements;
}
return [
ts.createTry(
ts.createBlock(statements),
ts.createCatchClause(
ts.createVariableDeclaration('error'),
ts.createBlock([
// If this is a DeprecationError, trim its stack trace to surface level before re-throwing,
// so we don't carry out possibly confusing frames from injected code. That can be toggled
// off by setting JSII_DEBUG=1, so we can also diagnose in-injected code faults.
ts.createIf(
ts.createBinary(
ts.createBinary(
ts.createPropertyAccess(
ts.createPropertyAccess(
ts.createIdentifier('process'),
'env',
),
'JSII_DEBUG',
),
ts.SyntaxKind.ExclamationEqualsEqualsToken,
ts.createLiteral('1'),
),
ts.SyntaxKind.AmpersandAmpersandToken,
ts.createBinary(
ts.createPropertyAccess(ts.createIdentifier('error'), 'name'),
ts.SyntaxKind.EqualsEqualsEqualsToken,
ts.createLiteral(DEPRECATION_ERROR),
),
),
ts.createBlock([
ts.createExpressionStatement(
ts.createCall(
ts.createPropertyAccess(
ts.createIdentifier('Error'),
'captureStackTrace',
),
undefined,
[ts.createIdentifier('error'), caller],
),
),
]),
),
ts.createThrow(ts.createIdentifier('error')),
]),
),
undefined,
),
];
}
/**
* Force a path to be UNIXy (use `/` as a separator)
*
* `path.join()` etc. will use the system-dependent path separator (either `/` or `\`
* depending on your platform).
*
* However, if we actually emit the path-dependent separator to the `.js` files, then
* files compiled with jsii on Windows cannot be used on any other platform. That seems
* like an unnecessary restriction, especially since a `/` will work fine on Windows,
* so make sure to always emit `/`.
*
* TSC itself always strictly emits `/` (or at least, emits the same what you put in).
*/
function unixPath(filePath: string) {
if (path.sep === '\\') {
return filePath.replace(/\\/g, '/');
}
return filePath;
} | the_stack |
import React, { Component } from 'react'
import {
Platform,
StyleSheet,
Text,
View,
ScrollView,
Switch,
TouchableWithoutFeedback,
TouchableOpacity,
ActivityIndicator,
Dimensions,
LayoutAnimation,
Image
} from 'react-native'
import SectionedMultiSelect from 'react-native-sectioned-multi-select'
import Icon from 'react-native-vector-icons/MaterialIcons'
import { SectionedMultiSelectProps } from '..'
const img = require('./z.jpg')
// Sorry for the mess
const items = [
{
title: 'Fruits',
id: 0,
children: [
{
title: 'Apple',
id: 10
},
{
title: 'Strawberry',
id: 11
},
{
title: 'Pineapple',
id: 13
},
{
title: 'Banana',
id: 14
},
{
title: 'Wátermelon',
id: 15
},
{
title: 'אבטיח',
id: 17
},
{
title: 'Raspberry',
id: 18
},
{
title: 'Orange',
id: 19
},
{
title: 'Mandarin',
id: 20
},
{
title: 'Papaya',
id: 21
},
{
title: 'Lychee',
id: 22
},
{
title: 'Cherry',
id: 23
},
{
title: 'Peach',
id: 24
},
{
title: 'Apricot',
id: 25
}
]
},
{
title: 'Gèms',
id: 1,
icon: 'cake',
children: [
{
title: 'Quartz',
id: 26
},
{
title: 'Zircon',
id: 27
},
{
title: 'Sapphirè',
id: 28
},
{
title: 'Topaz',
id: 29
}
]
},
{
title: 'Plants',
id: 2,
icon: img,
children: [
{
title: "Mother In Law's Tongue",
id: 30
},
{
title: 'Yucca',
id: 31
},
{
title: 'Monsteria',
id: 32
},
{
title: 'Palm',
id: 33
}
]
},
{
title: 'No child',
id: 34
}
]
console.log(items)
// const items2 =
// [{
// title: 'Plants',
// id: 2,
// children: [
// {
// title: "Mother In Law's Tongue",
// id: 30,
// },
// {
// title: 'Yucca',
// id: 31,
// },
// {
// title: 'Monsteria',
// id: 32,
// },
// {
// title: 'Palm',
// id: 33,
// },
// ],
// }]
const items2 = []
for (let i = 0; i < 100; i++) {
items2.push({
id: i,
title: `item ${i}`,
children: [
{
id: `10${i}`,
title: `child 10${i}`
},
{
id: `11${i}`,
title: `child 11${i}`
},
{
id: `12${i}`,
title: `child 12${i}`
}
]
})
}
const styles = StyleSheet.create({
center: {
flex: 1,
justifyContent: 'center',
alignItems: 'center',
marginTop: 30
},
container: {
paddingTop: 40,
paddingHorizontal: 20
},
welcome: {
fontSize: 20,
textAlign: 'center',
margin: 10,
color: '#333'
},
border: {
borderBottomWidth: 1,
borderBottomColor: '#dadada',
marginBottom: 20
},
heading: {
fontSize: 24,
fontWeight: 'bold',
marginBottom: 5,
marginTop: 20
},
label: {
fontWeight: 'bold'
},
switch: {
marginBottom: 20,
flexDirection: 'row',
alignItems: 'flex-end',
justifyContent: 'space-between'
}
})
const accentMap = {
â: 'a',
Â: 'A',
à: 'a',
À: 'A',
á: 'a',
Á: 'A',
ã: 'a',
Ã: 'A',
ê: 'e',
Ê: 'E',
è: 'e',
È: 'E',
é: 'e',
É: 'E',
î: 'i',
Î: 'I',
ì: 'i',
Ì: 'I',
í: 'i',
Í: 'I',
õ: 'o',
Õ: 'O',
ô: 'o',
Ô: 'O',
ò: 'o',
Ò: 'O',
ó: 'o',
Ó: 'O',
ü: 'u',
Ü: 'U',
û: 'u',
Û: 'U',
ú: 'u',
Ú: 'U',
ù: 'u',
Ù: 'U',
ç: 'c',
Ç: 'C'
}
const tintColor = '#174A87'
const Loading = (props) =>
props.hasErrored ? (
<TouchableWithoutFeedback onPress={props.fetchCategories}>
<View style={styles.center}>
<Text>oops... something went wrong. Tap to reload</Text>
</View>
</TouchableWithoutFeedback>
) : (
<View style={styles.center}>
<ActivityIndicator size="large" />
</View>
)
const Toggle = (props) => (
<TouchableWithoutFeedback
onPress={() => props.onPress(!props.val)}
disabled={props.disabled}
>
<View style={styles.switch}>
<Text style={styles.label}>{props.name}</Text>
<Switch
trackColor={{ false: tintColor, true: tintColor }}
onValueChange={(v) => props.onPress(v)}
value={props.val}
disabled={props.disabled}
/>
</View>
</TouchableWithoutFeedback>
)
type ItemType = {
id: number
title: string
icon?: string
children?: Omit<ItemType, 'children'>[]
}
type State = {
items: null | ItemType[]
loading: boolean
selectedItems: ItemType['id'][]
selectedItems2: ItemType['id'][]
selectedItemObjects: ItemType[]
currentItems: ItemType['id'][]
showDropDowns: boolean
expandDropDowns: boolean
single: boolean
readOnlyHeadings: boolean
highlightChildren: boolean
selectChildren: boolean
hideChipRemove: boolean
hasErrored: boolean
maxItemsReached: boolean
}
enum ToggleTypes {
'single',
'readOnlyHeadings',
'expandDropDowns',
'showDropDowns',
'highlightChildren',
'selectChildren',
'hideChipRemove'
}
export default class App extends Component<{}, State> {
private sectionedMultiSelectRef: SectionedMultiSelect<ItemType>
private termId: number
private maxItems: number
constructor(props) {
super(props)
this.state = {
items: null,
loading: false,
selectedItems: [],
selectedItems2: [],
selectedItemObjects: [],
currentItems: [],
showDropDowns: false,
expandDropDowns: false,
single: false,
readOnlyHeadings: false,
highlightChildren: false,
selectChildren: false,
hideChipRemove: false,
hasErrored: false,
maxItemsReached: false
}
this.termId = 100
this.maxItems = 5
}
componentDidMount() {
this.pretendToLoad()
// programatically opening the select
// this.sectionedMultiSelectRef._toggleSelector()
}
// custom icon renderer passed to iconRenderer prop
// see the switch for possible icon name
// values
icon = ({ name, size = 18, style }) => {
// flatten the styles
const flat = StyleSheet.flatten(style)
// remove out the keys that aren't accepted on View
const { color, fontSize, ...styles } = flat
let iconComponent
// the colour in the url on this site has to be a hex w/o hash
const iconColor =
color && color.substr(0, 1) === '#' ? `${color.substr(1)}/` : '/'
const Search = (
<Image
source={{
uri: `https://png.icons8.com/search/${iconColor}ios/`
}}
style={{ width: size, height: size }}
/>
)
const Down = (
<Image
source={{
uri: `https://png.icons8.com/down/${iconColor}ios/`
}}
style={{ width: size, height: size }}
/>
)
const Up = (
<Image
source={{
uri: `https://png.icons8.com/up/${iconColor}ios/`
}}
style={{ width: size, height: size }}
/>
)
const Close = (
<Image
source={{
uri: `https://png.icons8.com/multiply/${iconColor}ios/`
}}
style={{ width: size, height: size }}
/>
)
const Check = (
<Image
source={{
uri: `https://png.icons8.com/checkmark/${iconColor}android/`
}}
style={{
width: size / 1.5,
height: size / 1.5
}}
/>
)
const Cancel = (
<Image
source={{
uri: `https://png.icons8.com/cancel/${iconColor}ios/`
}}
style={{ width: size, height: size }}
/>
)
switch (name) {
case 'search':
iconComponent = Search
break
case 'keyboard-arrow-up':
iconComponent = Up
break
case 'keyboard-arrow-down':
iconComponent = Down
break
case 'close':
iconComponent = Close
break
case 'check':
iconComponent = Check
break
case 'cancel':
iconComponent = Cancel
break
default:
iconComponent = null
break
}
return <View style={styles}>{iconComponent}</View>
}
getProp = (object, key) => object && this.removerAcentos(object[key])
rejectProp = (items, fn) => items.filter(fn)
pretendToLoad = () => {
this.setState({ loading: true })
setTimeout(() => {
this.setState({ loading: false, items })
}, 4000)
}
// testing a custom filtering function that ignores accents
removerAcentos = (s) => s.replace(/[\W\[\] ]/g, (a) => accentMap[a] || a)
filterItems = (searchTerm, items, { subKey, displayKey, uniqueKey }) => {
let filteredItems = []
let newFilteredItems = []
items.forEach((item) => {
const parts = this.removerAcentos(searchTerm.trim()).split(
/[[ \][)(\\/?\-:]+/
)
const regex = new RegExp(`(${parts.join('|')})`, 'i')
if (regex.test(this.getProp(item, displayKey))) {
filteredItems.push(item)
}
if (item[subKey]) {
const newItem = Object.assign({}, item)
newItem[subKey] = []
item[subKey].forEach((sub) => {
if (regex.test(this.getProp(sub, displayKey))) {
newItem[subKey] = [...newItem[subKey], sub]
newFilteredItems = this.rejectProp(
filteredItems,
(singleItem) => item[uniqueKey] !== singleItem[uniqueKey]
)
newFilteredItems.push(newItem)
filteredItems = newFilteredItems
}
})
}
})
return filteredItems
}
onSelectedItemsChange = (selectedItems) => {
console.log(selectedItems, selectedItems.length)
if (selectedItems.length >= this.maxItems) {
if (selectedItems.length === this.maxItems) {
this.setState({ selectedItems })
}
this.setState({
maxItemsReached: true
})
return
}
this.setState({
maxItemsReached: false
})
const filteredItems = selectedItems.filter(
(val) => !this.state.selectedItems2.includes(val)
)
this.setState({
selectedItems: filteredItems
})
}
onSelectedItemsChange2 = (selectedItems) => {
const filteredItems = selectedItems.filter(
(val) => !this.state.selectedItems.includes(val)
)
this.setState({
selectedItems2: filteredItems
})
}
onConfirm = () => {
this.setState({
currentItems: this.state.selectedItems
})
}
onCancel = () => {
this.sectionedMultiSelectRef._removeAllItems()
this.setState({
selectedItems: this.state.currentItems
})
console.log(this.state.selectedItems)
}
onSelectedItemObjectsChange = (selectedItemObjects) => {
this.setState({ selectedItemObjects })
console.log(selectedItemObjects)
}
onSwitchToggle = (k: keyof typeof ToggleTypes) => {
const v = !this.state[k]
this.setState({ [k]: v } as Pick<State, keyof typeof ToggleTypes>)
}
fetchCategories = () => {
this.setState({ hasErrored: false })
fetch('http://www.mocky.io/v2/5a5573a22f00005c04beea49?mocky-delay=500ms')
.then((response) => response.json())
.then((responseJson) => {
this.setState({ selectedItems: responseJson })
})
.catch((error) => {
this.setState({ hasErrored: true })
throw error.message
})
}
filterDuplicates = (items) =>
items.sort().reduce((accumulator, current) => {
const length = accumulator.length
if (length === 0 || accumulator[length - 1] !== current) {
accumulator.push(current)
}
return accumulator
}, [])
noResults = (
<View key="a" style={styles.center}>
<Text>Sorry! No results...</Text>
</View>
)
handleAddSearchTerm = () => {
const searchTerm = this.sectionedMultiSelectRef._getSearchTerm()
const id = (this.termId += 1)
if (
searchTerm.length &&
!(this.state.items || []).some((item) => item.title.includes(searchTerm))
) {
const newItem = { id, title: searchTerm }
this.setState((prevState) => ({
items: [...(prevState.items || []), newItem]
}))
this.onSelectedItemsChange([...this.state.selectedItems, id])
this.sectionedMultiSelectRef._submitSelection()
}
}
searchAdornment = (searchTerm) =>
searchTerm.length ? (
<TouchableOpacity
style={{
alignItems: 'center',
justifyContent: 'center'
}}
onPress={this.handleAddSearchTerm}
>
<View style={{}}>
<Image
source={{
uri: 'https://png.icons8.com/plus'
}}
style={{
width: 16,
height: 16,
marginHorizontal: 15
}}
/>
{/* <Icon size={18} style={{ marginHorizontal: 15 }} name="add" /> */}
</View>
</TouchableOpacity>
) : null
renderSelectText = () => {
const { selectedItemObjects } = this.state
const selectText = selectedItemObjects.length
? `I like ${selectedItemObjects
.map((item, i) => {
let label = `${item.title}, `
if (i === selectedItemObjects.length - 2)
label = `${item.title} and `
if (i === selectedItemObjects.length - 1) label = `${item.title}.`
return label
})
.join('')}`
: 'Select a fruit'
return <Text style={{ color: 'red', fontSize: 24 }}>{selectText}</Text>
}
SelectOrRemoveAll = () =>
this.sectionedMultiSelectRef && (
<TouchableOpacity
style={{
justifyContent: 'center',
height: 44,
borderWidth: 0,
paddingHorizontal: 10,
backgroundColor: 'darkgrey',
alignItems: 'center'
}}
onPress={
this.state.selectedItems.length
? this.sectionedMultiSelectRef._removeAllItems
: this.sectionedMultiSelectRef._selectAllItems
}
>
<Text
style={{
color: 'white',
fontWeight: 'bold'
}}
>
{this.state.selectedItems.length ? 'Remove' : 'Select'} all
</Text>
</TouchableOpacity>
)
onToggleSelector = (toggled) => {
console.log('selector is ', toggled ? 'open' : 'closed')
}
customChipsRenderer = (props) => {
console.log('props', props)
return (
<View
style={{
backgroundColor: 'yellow',
padding: 15
}}
>
<Text>Selected:</Text>
{props.selectedItems.map((singleSelectedItem) => {
const item =
this.sectionedMultiSelectRef._findItem(singleSelectedItem)
if (!item || !item[props.displayKey]) return null
return (
<View
key={item[props.uniqueKey]}
style={{
flex: 0,
marginRight: 5,
padding: 10,
backgroundColor: 'orange'
}}
>
<TouchableOpacity
onPress={() => {
this.sectionedMultiSelectRef._removeItem(item)
}}
>
<Text>{item[props.displayKey]}</Text>
</TouchableOpacity>
</View>
)
})}
</View>
)
}
render() {
return (
<ScrollView
keyboardShouldPersistTaps="always"
style={{ backgroundColor: '#f8f8f8' }}
contentContainerStyle={styles.container}
>
<Text style={styles.welcome}>
React native sectioned multi select example.
</Text>
<SectionedMultiSelect<ItemType>
items={this.state.items}
ref={(SectionedMultiSelect) =>
(this.sectionedMultiSelectRef = SectionedMultiSelect)
}
uniqueKey="id"
subKey="children"
displayKey="title"
iconKey="icon"
autoFocus
modalWithTouchable
modalWithSafeAreaView
// showCancelButton
// headerComponent={this.SelectOrRemoveAll}
// hideConfirm
loading={this.state.loading}
// filterItems={this.filterItems}
// alwaysShowSelectText
// customChipsRenderer={this.customChipsRenderer}
itemsFlatListProps={{
// just checking the item type is passed to the flatlist props types
keyExtractor: (item, index) => `${item.id}-${index}`
}}
chipsPosition="top"
searchAdornment={(searchTerm) => this.searchAdornment(searchTerm)}
renderSelectText={this.renderSelectText}
// noResultsComponent={this.noResults}
loadingComponent={
<Loading
hasErrored={this.state.hasErrored}
fetchCategories={this.fetchCategories}
/>
}
IconRenderer={Icon}
// cancelIconComponent={<Text style={{color:'white'}}>Cancel</Text>}
showDropDowns={this.state.showDropDowns}
expandDropDowns={this.state.expandDropDowns}
animateDropDowns={false}
readOnlyHeadings={this.state.readOnlyHeadings}
single={this.state.single}
showRemoveAll
hideChipRemove={this.state.hideChipRemove}
selectChildren={this.state.selectChildren}
highlightChildren={this.state.highlightChildren}
// hideSearch
// itemFontFamily={fonts.boldCondensed}
onSelectedItemsChange={this.onSelectedItemsChange}
onSelectedItemObjectsChange={this.onSelectedItemObjectsChange}
onCancel={this.onCancel}
onConfirm={this.onConfirm}
confirmText={`${this.state.selectedItems.length}/${this.maxItems} - ${
this.state.maxItemsReached ? 'Max selected' : 'Confirm'
}`}
selectedItems={this.state.selectedItems}
colors={{
primary: '#5c3a9e',
success: '#5c3a9e'
}}
itemNumberOfLines={3}
selectLabelNumberOfLines={3}
styles={{
// chipText: {
// maxWidth: Dimensions.get('screen').width - 90,
// },
// itemText: {
// color: this.state.selectedItems.length ? 'black' : 'lightgrey'
// },
// selectedItemText: {
// color: 'blue',
// },
// subItemText: {
// color: this.state.selectedItems.length ? 'black' : 'lightgrey'
// },
item: {
paddingHorizontal: 10
},
subItem: {
paddingHorizontal: 10
},
selectedItem: {
backgroundColor: 'rgba(0,0,0,0.1)'
},
selectedSubItem: {
backgroundColor: 'rgba(0,0,0,0.1)'
},
// selectedSubItemText: {
// color: 'blue',
// },
scrollView: { paddingHorizontal: 0 }
}}
// cancelIconComponent={<Icon size={20} name="close" style={{ color: 'white' }} />}
/>
<View>
<View style={styles.border}>
<Text style={styles.heading}>Settings</Text>
</View>
<Toggle
name="Single"
onPress={() => this.onSwitchToggle('single')}
val={this.state.single}
/>
<Toggle
name="Read only headings"
onPress={() => this.onSwitchToggle('readOnlyHeadings')}
val={this.state.readOnlyHeadings}
/>
<Toggle
name="Expand dropdowns"
onPress={() => this.onSwitchToggle('expandDropDowns')}
val={this.state.expandDropDowns}
disabled={!this.state.showDropDowns}
/>
<Toggle
name="Show dropdown toggles"
onPress={() => this.onSwitchToggle('showDropDowns')}
val={this.state.showDropDowns}
/>
<Toggle
name="Auto-highlight children"
onPress={() => this.onSwitchToggle('highlightChildren')}
val={this.state.highlightChildren}
disabled={this.state.selectChildren}
/>
<Toggle
name="Auto-select children"
onPress={() => this.onSwitchToggle('selectChildren')}
disabled={this.state.highlightChildren}
val={this.state.selectChildren}
/>
<Toggle
name="Hide Chip Remove Buttons"
onPress={() => this.onSwitchToggle('hideChipRemove')}
val={this.state.hideChipRemove}
/>
<TouchableWithoutFeedback
onPress={() => this.sectionedMultiSelectRef._removeAllItems()}
>
<View style={styles.switch}>
<Text style={styles.label}>Remove All</Text>
</View>
</TouchableWithoutFeedback>
</View>
</ScrollView>
)
}
} | the_stack |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.