text
stringlengths
2.5k
6.39M
kind
stringclasses
3 values
import { join, resolve } from 'path'; import { pathToFileURL } from 'url'; import { TextDocument } from 'vscode-languageserver-textdocument'; import { StylelintRunner } from '../../../src/utils/stylelint'; const createDocument = (uri: string | null, languageId: string, contents: string): TextDocument => TextDocument.create( uri ? pathToFileURL(resolve(__dirname, '..', uri)).toString() : 'Untitled:Untitled', languageId, 1, contents, ); describe('StylelintRunner', () => { test('should be resolved with diagnostics when it lints CSS successfully', async () => { expect.assertions(1); const runner = new StylelintRunner(); const result = await runner.lintDocument(createDocument(null, 'css', ' a[id="id"]{}'), { config: { rules: { 'string-quotes': ['single', { severity: 'warning' }], indentation: ['tab'], }, }, }); expect(result.diagnostics).toMatchSnapshot(); }); test('should be resolved with an empty array when no errors and warnings are reported', async () => { expect.assertions(1); const runner = new StylelintRunner(); const result = await runner.lintDocument(createDocument(null, 'scss', ''), { config: { customSyntax: 'postcss-scss', rules: { indentation: [2] }, }, }); expect(result.diagnostics).toEqual([]); }); test('should be resolved with one diagnostic when the CSS is broken', async () => { expect.assertions(1); const runner = new StylelintRunner(); // TODO: Restore once postcss-markdown is PostCSS 8 compatible // const result = await runner.lintDocument( // createDocument( // 'markdown.md', // 'markdown', // `# Title // # Code block // \`\`\`css // a{ // \`\`\` // `, // ), // { // config: { // customSyntax: 'postcss-markdown', // rules: { // indentation: ['tab'], // }, // }, // }, // ); const result = await runner.lintDocument( createDocument('scss.scss', 'scss', ' a{\n'), { config: { customSyntax: 'postcss-scss', rules: { indentation: ['tab'], }, }, }, ); expect(result.diagnostics).toMatchSnapshot(); }); test('should be resolved even if no configs are defined', async () => { expect.assertions(1); const runner = new StylelintRunner(); // TODO: Restore once postcss-html is PostCSS 8 compatible // const result = await runner.lintDocument(createDocument(null, 'plaintext', '<style>a{}</style>'), { // customSyntax: 'postcss-html', // }); const result = await runner.lintDocument(createDocument(null, 'plaintext', 'a{}'), { customSyntax: 'postcss-scss', }); expect(result.diagnostics).toEqual([]); }); test('should support `.stylelintignore`.', async () => { expect.assertions(1); const runner = new StylelintRunner(); const result = await runner.lintDocument( createDocument('listed-in-stylelintignore.css', 'css', '}'), { ignorePath: require.resolve('./.stylelintignore'), }, ); expect(result.diagnostics).toEqual([]); }); test('should support CSS-in-JS with customSyntax', async () => { expect.assertions(1); const runner = new StylelintRunner(); const result = await runner.lintDocument( createDocument( null, 'javascript', `import glamorous from 'glamorous'; const styled = require("styled-components"); const A = glamorous.a({font: 'bold'}); const B = styled.b\` font: normal \`;`, ), { config: { customSyntax: '@stylelint/postcss-css-in-js', rules: { 'font-weight-notation': ['numeric'] }, }, }, ); expect(result.diagnostics).toMatchSnapshot(); }); test('should set `codeFilename` option from a TextDocument', async () => { expect.assertions(1); const runner = new StylelintRunner(); const result = await runner.lintDocument( createDocument( 'should-be-ignored.xml', 'xml', `<style> a { color: #000 } </style>`, ), { config: { rules: { 'color-hex-length': 'long', }, ignoreFiles: '**/*-ignored.xml', }, }, ); expect(result.diagnostics).toEqual([]); }); test('should support `processors` option', async () => { expect.assertions(1); const runner = new StylelintRunner(); const result = await runner.lintDocument( createDocument('processors.tsx', 'typescriptreact', 'styled.p`"`'), { config: { processors: ['stylelint-processor-styled-components'], rules: {}, }, }, ); expect(result.diagnostics).toMatchSnapshot(); }); test('should check CSS syntax even if no configuration is provided', async () => { expect.assertions(1); const runner = new StylelintRunner(); const result = await runner.lintDocument( createDocument('unclosed.css', 'css', 'a{color:rgba(}'), ); expect(result.diagnostics).toMatchSnapshot(); }); test('should check CSS syntax even if no rule is provided', async () => { expect.assertions(1); const runner = new StylelintRunner(); // TODO: Restore once postcss-html is PostCSS 8 compatible // const result = await runner.lintDocument(createDocument('at.xsl', 'xsl', '<style>@</style>'), { // customSyntax: 'postcss-html', // }); const result = await runner.lintDocument(createDocument('at.scss', 'scss', '@'), { customSyntax: 'postcss-scss', }); expect(result.diagnostics).toMatchSnapshot(); }); test('should be resolved even if no rules are defined', async () => { expect.assertions(1); const runner = new StylelintRunner(); const result = await runner.lintDocument(createDocument('no-rules.css', 'css', 'a{}'), { config: {}, }); expect(result.diagnostics).toEqual([]); }); test('should reject with a reason when it takes incorrect options', async () => { expect.assertions(1); const runner = new StylelintRunner(); const promise = runner.lintDocument( createDocument('invalid-options.css', 'css', ' a[id="id"]{}'), { config: { rules: { 'string-quotes': 'single', 'color-hex-case': 'foo', 'at-rule-empty-line-before': ['always', { bar: true }], }, }, }, ); await expect(promise).rejects.toThrowErrorMatchingSnapshot(); }); test('should be resolved with diagnostics when the rules include unknown rules', async () => { expect.assertions(1); const runner = new StylelintRunner(); const result = await runner.lintDocument(createDocument('unknown-rule.css', 'css', 'b{}'), { config: { rules: { 'this-rule-does-not-exist': 1, 'this-rule-also-does-not-exist': 1, }, }, }); expect(result.diagnostics).toMatchSnapshot(); }); }); describe('StylelintRunner with a configuration file', () => { test('should adhere to configuration file settings', async () => { expect.assertions(1); const runner = new StylelintRunner(); const result = await runner.lintDocument( createDocument( join(__dirname, 'has-config-file.tsx'), 'typescriptreact', ` const what: string = "is this"; <a css={{ width: "0px", what }} />; `, ), { configFile: join(__dirname, 'no-unknown.config.js') }, ); expect(result.diagnostics).toMatchSnapshot(); }); }); describe('StylelintRunner with auto-fix', () => { test('auto-fix should work properly if configs are defined', async () => { expect.assertions(1); const runner = new StylelintRunner(); const result = await runner.lintDocument(createDocument(null, 'css', 'a\n{\ncolor:red;\n}'), { config: { rules: { indentation: [2] } }, fix: true, }); expect(result.output).toMatchSnapshot(); }); test('auto-fix should only work properly for syntax errors if no rules are defined', async () => { expect.assertions(1); const runner = new StylelintRunner(); const result = await runner.lintDocument(createDocument('no-rules.css', 'css', 'a {'), { config: {}, fix: true, }); expect(result.output).toMatchInlineSnapshot(`"a {}"`); }); test('JS file auto-fix should not change the content if no rules are defined', async () => { expect.assertions(1); const runner = new StylelintRunner(); const result = await runner.lintDocument(createDocument('no-rules.js', 'javascript', '"a"'), { customSyntax: '@stylelint/postcss-css-in-js', config: {}, fix: true, }); expect(result.output).toMatchInlineSnapshot(`"\\"a\\""`); }); test('auto-fix should ignore if the file matches the ignoreFiles', async () => { expect.assertions(1); const runner = new StylelintRunner(); const result = await runner.lintDocument( createDocument('should-be-ignored.js', 'javascript', '"a"'), { customSyntax: '@stylelint/postcss-css-in-js', config: { rules: {}, ignoreFiles: '**/*-ignored.js', }, fix: true, }, ); expect(result.output).toBeUndefined(); }); test('auto-fix should work if there is syntax errors in css', async () => { expect.assertions(1); const runner = new StylelintRunner(); const result = await runner.lintDocument( createDocument( 'test.css', 'css', ` .a { width: 100% height: 100%; } `, ), { config: { rules: {} }, fix: true, }, ); expect(result.output).toMatchSnapshot(); }); test('auto-fix should ignore if there is syntax errors in scss', async () => { expect.assertions(1); const runner = new StylelintRunner(); const result = await runner.lintDocument( createDocument( 'test.scss', 'scss', ` .a { width: 100% height: 100%; } `, ), { customSyntax: 'postcss-scss', config: { rules: {} }, fix: true, }, ); expect(result.output).toBeUndefined(); }); test('auto-fix should work if there are errors that cannot be auto-fixed', async () => { const runner = new StylelintRunner(); const result = await runner.lintDocument( createDocument( 'test.css', 'css', ` unknown { width: 100%; height: 100%; } `, ), { config: { rules: { indentation: 2, 'selector-type-no-unknown': true, }, }, fix: true, }, ); expect(result).toMatchSnapshot(); }); }); describe('StylelintRunner with customSyntax', () => { test('should work properly if customSyntax is defined', async () => { expect.assertions(1); const runner = new StylelintRunner(); const result = await runner.lintDocument(createDocument('test.css', 'css', 'a\n color:red'), { config: { rules: { indentation: [2] } }, customSyntax: 'postcss-sass', }); expect(result).toMatchSnapshot(); }); test('auto-fix should work properly if customSyntax is defined', async () => { expect.assertions(1); const runner = new StylelintRunner(); const result = await runner.lintDocument(createDocument('test.css', 'css', 'a\n color:red'), { config: { rules: { indentation: [2] } }, customSyntax: 'postcss-sass', fix: true, }); expect(result).toMatchSnapshot(); }); }); describe('StylelintRunner with reportNeedlessDisables', () => { test('should work properly if reportNeedlessDisables is true', async () => { expect.assertions(1); const runner = new StylelintRunner(); const result = await runner.lintDocument( createDocument( 'test.css', 'css', ` .foo { /* stylelint-disable-next-line indentation */ color: red; } /* stylelint-disable indentation */ .bar { color: red; } /* stylelint-enable indentation */ .baz { color: red; /* stylelint-disable-line indentation */ } /* stylelint-disable indentation */ .bar { color: red; } `, ), { config: { rules: { indentation: [4] } }, reportNeedlessDisables: true, }, ); expect(result).toMatchSnapshot(); }); }); describe('StylelintRunner with reportInvalidScopeDisables', () => { test('should work properly if reportInvalidScopeDisables is true', async () => { expect.assertions(1); const runner = new StylelintRunner(); const result = await runner.lintDocument( createDocument( 'test.css', 'css', ` /* stylelint-disable-next-line foo */ /* stylelint-disable-line foo */ /* stylelint-disable foo */ /* stylelint-enable foo */ /* stylelint-disable-next-line indentation */ /* stylelint-disable-line indentation */ /* stylelint-disable indentation */ /* stylelint-enable indentation */ `, ), { config: { rules: { indentation: [4] } }, reportInvalidScopeDisables: true, }, ); expect(result).toMatchSnapshot(); }); }); describe('StylelintRunner with stylelintPath', () => { test('should work properly if stylelintPath is defined', async () => { expect.assertions(1); const runner = new StylelintRunner(); const result = await runner.lintDocument( createDocument('test.css', 'css', 'a{\n color:red}'), { config: { rules: { indentation: [2] } }, }, { stylelintPath: resolve(__dirname, '../../../node_modules/stylelint'), }, ); expect(result).toMatchSnapshot(); }); test('should work properly if custom path is defined in stylelintPath', async () => { expect.assertions(1); const runner = new StylelintRunner(); const result = await runner.lintDocument( createDocument('test.css', 'css', 'a{\n color:red}'), { config: { rules: { indentation: [2] } }, }, { stylelintPath: require.resolve('./fake-stylelint'), }, ); expect(result).toMatchSnapshot(); }); });
the_stack
// ## name // 有生成规则:比较解析后的 name // 无生成规则:直接比较 // ## type // 无类型转换:直接比较 // 有类型转换:先试着解析 template,然后再检查? // ## value vs. template // 基本类型 // 无生成规则:直接比较 // 有生成规则: // number // min-max.dmin-dmax // min-max.dcount // count.dmin-dmax // count.dcount // +step // 整数部分 // 小数部分 // boolean // string // min-max // count // ## properties // 对象 // 有生成规则:检测期望的属性个数,继续递归 // 无生成规则:检测全部的属性个数,继续递归 // ## items // 数组 // 有生成规则: // `'name|1': [{}, {} ...]` 其中之一,继续递归 // `'name|+1': [{}, {} ...]` 顺序检测,继续递归 // `'name|min-max': [{}, {} ...]` 检测个数,继续递归 // `'name|count': [{}, {} ...]` 检测个数,继续递归 // 无生成规则:检测全部的元素个数,继续递归 import constant from '../utils/constant' import { type, keys as objectKeys, isArray, isString, isFunction, isRegExp, isNumber } from '../utils' import toJSONSchema from './schema' import { SchemaResult, DiffResult } from '../types' import handler from './handler' const Diff = { diff: function (schema: SchemaResult, data: string | object, name?: string | number) { const result: DiffResult[] = [] // 先检测名称 name 和类型 type,如果匹配,才有必要继续检测 if (Diff.name(schema, data, name, result) && Diff.type(schema, data, name, result)) { Diff.value(schema, data, name, result) Diff.properties(schema, data, name, result) Diff.items(schema, data, name, result) } return result }, /* jshint unused:false */ name: function (schema: SchemaResult, _data, name: string | number | undefined, result: DiffResult[]) { const length = result.length Assert.equal('name', schema.path, name + '', schema.name + '', result) return result.length === length }, type: function (schema: SchemaResult, data, _name, result: DiffResult[]) { const length = result.length if (isString(schema.template)) { // 占位符类型处理 if (schema.template.match(constant.RE_PLACEHOLDER)) { const actualValue = handler.gen(schema.template) Assert.equal('type', schema.path, type(data), type(actualValue), result) return result.length === length } } else if (isArray(schema.template)) { if (schema.rule.parameters) { // name|count: array if (schema.rule.min !== undefined && schema.rule.max === undefined) { // 跳过 name|1: array,因为最终值的类型(很可能)不是数组,也不一定与 `array` 中的类型一致 if (schema.rule.count === 1) { return true } } // 跳过 name|+inc: array if (schema.rule.parameters[2]) { return true } } } else if (isFunction(schema.template)) { // 跳过 `'name': function`,因为函数可以返回任何类型的值。 return true } Assert.equal('type', schema.path, type(data), schema.type, result) return result.length === length }, value: function (schema: SchemaResult, data, name, result: DiffResult[]) { const length = result.length const rule = schema.rule const templateType = schema.type if (templateType === 'object' || templateType === 'array' || templateType === 'function') { return true } // 无生成规则 if (!rule.parameters) { if (isRegExp(schema.template)) { Assert.match('value', schema.path, data, schema.template, result) return result.length === length } if (isString(schema.template)) { // 同样跳过含有『占位符』的属性值,因为『占位符』的返回值会通常会与模板不一致 if (schema.template.match(constant.RE_PLACEHOLDER)) { return result.length === length } } Assert.equal('value', schema.path, data, schema.template, result) return result.length === length } // 有生成规则 let actualRepeatCount if (isNumber(schema.template)) { const parts: string[] = (data + '').split('.') const intPart = Number(parts[0]) const floatPart = parts[1] // 整数部分 // |min-max if (rule.min !== undefined && rule.max !== undefined) { Assert.greaterThanOrEqualTo('value', schema.path, intPart, Math.min(Number(rule.min), Number(rule.max)), result) // , 'numeric instance is lower than the required minimum (minimum: {expected}, found: {actual})') Assert.lessThanOrEqualTo('value', schema.path, intPart, Math.max(Number(rule.min), Number(rule.max)), result) } // |count if (rule.min !== undefined && rule.max === undefined) { Assert.equal('value', schema.path, intPart, Number(rule.min), result, '[value] ' + name) } // 小数部分 if (rule.decimal) { // |dmin-dmax if (rule.dmin !== undefined && rule.dmax !== undefined) { Assert.greaterThanOrEqualTo('value', schema.path, floatPart.length, Number(rule.dmin), result) Assert.lessThanOrEqualTo('value', schema.path, floatPart.length, Number(rule.dmax), result) } // |dcount if (rule.dmin !== undefined && rule.dmax === undefined) { Assert.equal('value', schema.path, floatPart.length, Number(rule.dmin), result) } } } else if (isString(schema.template)) { // 'aaa'.match(/a/g) actualRepeatCount = data.match(new RegExp(schema.template, 'g')) actualRepeatCount = actualRepeatCount ? actualRepeatCount.length : 0 // |min-max if (rule.min !== undefined && rule.max !== undefined) { Assert.greaterThanOrEqualTo('repeat count', schema.path, actualRepeatCount, Number(rule.min), result) Assert.lessThanOrEqualTo('repeat count', schema.path, actualRepeatCount, Number(rule.max), result) } // |count if (rule.min !== undefined && rule.max === undefined) { Assert.equal('repeat count', schema.path, actualRepeatCount, rule.min, result) } } else if (isRegExp(schema.template)) { actualRepeatCount = data.match(new RegExp(schema.template.source.replace(/^\^|\$$/g, ''), 'g')) actualRepeatCount = actualRepeatCount ? actualRepeatCount.length : 0 // |min-max if (rule.min !== undefined && rule.max !== undefined) { Assert.greaterThanOrEqualTo('repeat count', schema.path, actualRepeatCount, Number(rule.min), result) Assert.lessThanOrEqualTo('repeat count', schema.path, actualRepeatCount, Number(rule.max), result) } // |count if (rule.min !== undefined && rule.max === undefined) { Assert.equal('repeat count', schema.path, actualRepeatCount, rule.min, result) } } return result.length === length }, properties: function (schema: SchemaResult, data, _name, result: DiffResult[]) { const length = result.length const rule = schema.rule const keys = objectKeys(data) if (!schema.properties) { return } // 无生成规则 if (!schema.rule.parameters) { Assert.equal('properties length', schema.path, keys.length, schema.properties.length, result) } else { // 有生成规则 // |min-max if (rule.min !== undefined && rule.max !== undefined) { Assert.greaterThanOrEqualTo( 'properties length', schema.path, keys.length, Math.min(Number(rule.min), Number(rule.max)), result ) Assert.lessThanOrEqualTo( 'properties length', schema.path, keys.length, Math.max(Number(rule.min), Number(rule.max)), result ) } // |count if (rule.min !== undefined && rule.max === undefined) { // |1, |>1 if (rule.count !== 1) { Assert.equal('properties length', schema.path, keys.length, Number(rule.min), result) } } } if (result.length !== length) { return false } for (let i = 0; i < keys.length; i++) { let property: SchemaResult | undefined schema.properties.forEach((item) => { if (item.name === keys[i]) { property = item } }) property = property || schema.properties[i] result.push(...Diff.diff(property, data[keys[i]], keys[i])) } return result.length === length }, items: function (schema: SchemaResult, data, _name, result: DiffResult[]) { const length = result.length if (!schema.items) { return } const rule = schema.rule // 无生成规则 if (!schema.rule.parameters) { Assert.equal('items length', schema.path, data.length, schema.items.length, result) } else { // 有生成规则 // |min-max if (rule.min !== undefined && rule.max !== undefined) { Assert.greaterThanOrEqualTo( 'items', schema.path, data.length, Math.min(Number(rule.min), Number(rule.max)) * schema.items.length, result, '[{utype}] array is too short: {path} must have at least {expected} elements but instance has {actual} elements' ) Assert.lessThanOrEqualTo( 'items', schema.path, data.length, Math.max(Number(rule.min), Number(rule.max)) * schema.items.length, result, '[{utype}] array is too long: {path} must have at most {expected} elements but instance has {actual} elements' ) } // |count if (rule.min !== undefined && rule.max === undefined) { // |1, |>1 if (rule.count === 1) { return result.length === length } else { Assert.equal('items length', schema.path, data.length, (Number(rule.min) * schema.items.length), result) } } // |+inc if (rule.parameters && rule.parameters[2]) { return result.length === length } } if (result.length !== length) { return false } for (let i = 0; i < data.length; i++) { result.push( ...Diff.diff( schema.items[i % schema.items.length], data[i], i % schema.items.length ) ) } return result.length === length } } // 完善、友好的提示信息 // // Equal, not equal to, greater than, less than, greater than or equal to, less than or equal to // 路径 验证类型 描述 // // Expect path.name is less than or equal to expected, but path.name is actual. // // Expect path.name is less than or equal to expected, but path.name is actual. // Expect path.name is greater than or equal to expected, but path.name is actual. const Assert = { message: function (item: DiffResult) { if (item.message) { return item.message } const upperType = item.type.toUpperCase() const lowerType = item.type.toLowerCase() const path = isArray(item.path) && item.path.join('.') || item.path const action = item.action const expected = item.expected const actual = item.actual return `[${upperType}] Expect ${path}\'${lowerType} ${action} ${expected}, but is ${actual}` }, equal: function<T extends string | number> (type: string, path: string[], actual: T, expected: T, result: DiffResult[], message?: string) { if (actual === expected) { return true } // 正则模板 === 字符串最终值 if (type === 'type' && expected === 'regexp' && actual === 'string') { return true } result.push(Assert.createDiffResult( type, path, actual, expected, message, 'is equal to' )) return false }, // actual matches expected match: function (type: string, path: string[], actual: any, expected: RegExp, result: DiffResult[], message?: string) { if (expected.test(actual)) { return true } result.push(Assert.createDiffResult( type, path, actual, expected, message, 'matches' )) return false }, greaterThanOrEqualTo: function (type: string, path: string[], actual: number, expected: number, result: DiffResult[], message?: string) { if (actual >= expected) { return true } result.push(Assert.createDiffResult( type, path, actual, expected, message, 'is greater than or equal to' )) return false }, lessThanOrEqualTo: function (type: string, path: string[], actual: number, expected: number, result: DiffResult[], message?: string) { if (actual <= expected) { return true } result.push(Assert.createDiffResult( type, path, actual, expected, message, 'is less than or equal to' )) return false }, createDiffResult: function (type: string, path: string[], actual: any, expected: any, message: string | undefined, action: string) { const item = { path: path, type: type, actual: actual, expected: expected, action: action, message: message } item.message = Assert.message(item) return item } } const valid = function (template: string | object, data: string | object) { const schema = toJSONSchema(template) return Diff.diff(schema, data) } valid.Diff = Diff valid.Assert = Assert export default valid
the_stack
import { MetaUtils } from "../metadata/utils"; import { Decorators } from '../constants/decorators'; import { ConstantKeys } from '../constants'; import { DecoratorType } from '../enums/decorator-type'; import { IPreauthorizeParams } from './interfaces/preauthorize-params'; import { PrincipalContext } from '../../security/auth/principalContext'; import { User } from '../../security/auth/user'; import { PreAuthService } from '../services/pre-auth-service'; import {PostFilterService} from '../services/post-filter-service'; import { pathRepoMap, getEntity, getModel } from '../dynamic/model-entity'; import { InstanceService } from '../services/instance-service'; import * as Utils from '../utils'; import * as configUtil from '../../security-config'; import { RepoActions } from '../enums/repo-actions-enum'; import {IDynamicRepository, DynamicRepository} from '../dynamic/dynamic-repository'; import * as Enumerable from 'linq'; import Q = require('q'); import { Types } from "mongoose"; import * as utils from '../../mongoose/utils'; import * as configUtils from '../utils'; import {allAutherizationRulesMap} from '../initialize/initialize-messengers'; // name.role :{ role: string, accessmask: number, acl?: boolean } /** * Provides you three states (new, old, merged) for an entity as parameters on which * one can build logic from original data in db and from new incoming JSON data */ export interface EntityActionParam { /** * This is a readOnly data ( don not change on it), used for comapring original input data JSON from client side. */ inputEntity?: any; // entity comes from client side (front end incoming JSON) /** * This is a readOnly data (don not change on it), used for comparing the original stored data on DB. */ oldPersistentEntity?: any; // original entity stored on db /** * Any changes or modification can be done on newPersistentEntity which is final entity going to persist on the system. */ newPersistentEntity?: any; // merged entity of inputEntity and oldPersistentEntity } export function entityAction(params: IPreauthorizeParams): any { params = params || <any>{}; return function (target: Function, propertyKey: string, descriptor: any) { MetaUtils.addMetaData(target, { decorator: Decorators.PREAUTHORIZE, decoratorType: DecoratorType.METHOD, params: params, propertyKey: propertyKey }); var originalMethod = descriptor.value; descriptor.value = function () { var anonymous = MetaUtils.getMetaData(target, Decorators.ALLOWANONYMOUS, propertyKey); if (anonymous) return originalMethod.call(this, ...arguments); let args = Array.prototype.slice.call(arguments); // merge logic return mergeTask.apply(this, [args, originalMethod]).then(fullyQualifiedEntities => { //if (originalMethod.name === RepoActions.findOne) { // var ret = service.target[preAuthParam.methodName].apply(service.target, params); //} let checkIfAClrequired = () => { let user: User = PrincipalContext.User; if (user && user.getAuthorities() && allAutherizationRulesMap && allAutherizationRulesMap[this.path]) { let isACL = true; user.getAuthorities().forEach((curRole: string) => { let aclRule = allAutherizationRulesMap[this.path][curRole]; if (aclRule && aclRule.acl === false) { isACL = false; } }) return isACL; } return true } let findActions = [RepoActions.findAll, RepoActions.findByField, RepoActions.findChild, RepoActions.findMany, RepoActions.findOne, RepoActions.findWhere]; // Converting Repo method names into uppercase as check with original method name is in uppercase. // This is require othewise it will go in else condition and some of the entities will visible user without access e.g. questionnaire not assigned ot user. findActions = findActions.map(methodName => methodName.toUpperCase()); if (findActions.indexOf(originalMethod.name.toUpperCase()) >= 0) { //console.log("CanRead entity Security " + this.path); let promiseOfAuthServerice:any = Q.when(true); if (checkIfAClrequired()){ promiseOfAuthServerice = PostFilterService.postFilter(fullyQualifiedEntities, params); } return promiseOfAuthServerice.then(result => { //console.log("CanRead entity Security End " + this.path); if (!result) { fullyQualifiedEntities = null; } if (result instanceof Array) { let ids = result.map(x => x._id.toString()); // select only entities which have access fullyQualifiedEntities = Enumerable.from(fullyQualifiedEntities).where((x: EntityActionParam) => ids.indexOf(x.newPersistentEntity._id.toString()) != -1).toArray(); } if (args.length) { args[args.length] = fullyQualifiedEntities; } else { args[0] = fullyQualifiedEntities; } return originalMethod.call(this, ...args); }); } else { //console.log("CanSave entity Security" + this.path); //read security config //check for this.path if acl is false then execute //return originalMethod.call(this, ...args); let executeNextMethod = () => { if (args.length) { args[args.length] = fullyQualifiedEntities; } else { args[0] = fullyQualifiedEntities; } //} return originalMethod.call(this, ...args); } if (!checkIfAClrequired()) { return executeNextMethod(); } return PreAuthService.isPreAuthenticated([fullyQualifiedEntities], params, propertyKey).then(isAllowed => { //console.log("CanSave entity Security End" + this.path); //req.body = fullyQualifiedEntities; if (isAllowed) { return executeNextMethod(); } else { var error = 'unauthorize access for resource'; var res = PrincipalContext.get('res'); if (res) { res.set("Content-Type", "application/json"); res.send(403, JSON.stringify(error, null, 4)); } throw null; } }); } }); } return descriptor; } } function mergeTask(args: any, method: any): Q.Promise<any> { let prom: Q.Promise<any>; var response = []; let repo: IDynamicRepository = this; let rootRepo = repo.getRootRepo(); switch (method.name.toUpperCase()) { case RepoActions.findOne.toUpperCase(): prom = rootRepo.findOne(args[0], args[1]).then(res => { let mergedEntity = InstanceService.getInstance(this.getEntity(), null, res); return mergeProperties(res, undefined, mergedEntity); }); break; case RepoActions.findAll.toUpperCase(): prom = rootRepo.findAll().then((dbEntities: Array<any>) => { let mergedEntities = dbEntities.map(x => InstanceService.getInstance(this.getEntity(), null, x)); return mergeEntities(dbEntities, undefined, mergedEntities); }); break; case RepoActions.findWhere.toUpperCase(): prom = rootRepo.findWhere.apply(rootRepo, args).then((dbEntities: Array<any>) => { let mergedEntities = dbEntities.map(x => InstanceService.getInstance(this.getEntity(), null, x)); return mergeEntities(dbEntities, undefined, mergedEntities); }); break; case RepoActions.findMany.toLocaleUpperCase(): prom = rootRepo.findMany(args[0]).then((dbEntities: Array<any>) => { let mergedEntities = dbEntities.map(x => InstanceService.getInstance(this.getEntity(), null, x)); return mergeEntities(dbEntities, undefined, mergedEntities); }); break; // TODO: Need to write code for all remaining get entity(s) actions case RepoActions.post.toUpperCase(): // do nothing let mergedEntity1 = InstanceService.getInstance(this.getEntity(), null, args[0]); prom = Q.when(mergeProperties(undefined, InstanceService.getInstance(this.getEntity(), null, args[0]), mergedEntity1)); break; case RepoActions.put.toUpperCase(): case RepoActions.patch.toUpperCase(): // fetch single object let entityIdToUpdate = args[0]; let entityToUpdate = args[1]; entityToUpdate._id = entityIdToUpdate; let mergedEntity = InstanceService.getInstance(this.getEntity(), null, args[1]); prom = rootRepo.findOne(args[0]).then(res => { return mergeProperties(res, args[1], mergedEntity); }); break; case RepoActions.delete.toUpperCase(): // fetch single object prom = rootRepo.findMany([args[0]], true).then(res => { return mergeProperties(res[0], args[0], res[0]); }); break; case RepoActions.bulkPost.toUpperCase(): args[0].forEach(x => { var mergedEntity1 = InstanceService.getInstance(this.getEntity(), null, x); response.push(mergeProperties(undefined, InstanceService.getInstance(this.getEntity(), null, x), mergedEntity1)); }); prom = Q.when(response); break; case RepoActions.bulkPut.toUpperCase(): var ids = Enumerable.from(args[0]).select(x => x['_id'].toString()).toArray(); let mergeEntities1 = []; //console.log("entity action findmany instance service start " + this.path); args[0].forEach(x => { mergeEntities1.push(InstanceService.getInstance(this.getEntity(), null, x)); }); //console.log("entity action findmany start " + this.path); prom = rootRepo.findMany(ids, true).then(dbEntities => { //console.log("entity action merge entity start " + this.path); let retval = mergeEntities(dbEntities, args[0], mergeEntities1); //console.log("entity action merge entity end " + this.path); return retval; }); break; case RepoActions.bulkDel.toUpperCase(): if (args[0].length > 0) { var ids = []; Enumerable.from(args[0]).forEach(x => { if (Utils.isJSON(x)) { ids.push(x['_id']); } else { ids.push(x); } }); prom = rootRepo.findMany(ids).then(dbEntities => { return mergeEntities(undefined, dbEntities, dbEntities); }); } else { let mergeEntities1 = InstanceService.getInstance(this.getEntity(), null, args[0]); prom = Q.when(mergeProperties(args[0], undefined, mergeEntities1)); } break; default: prom = Q.when(mergeProperties(args[0], undefined)); break; } return prom.then(res => { // set fully loaded attribute to root element if (res instanceof Array) { res.forEach(x => { res[ConstantKeys.FullyLoaded] = true; }); } else { res[ConstantKeys.FullyLoaded] = true; } return res; }).catch(exc => { //console.log(exc); throw exc; }); } function mergeEntities(dbEntities, entities?, mergeEntities1?: Array<any>) { var res = []; if (!entities && dbEntities) { dbEntities.forEach(x => { res.push(mergeProperties(x, undefined, x)); }); return res; } let dbEntityKeyVal = {}; let megredEntityKeyVal = {}; if (dbEntities) { dbEntities.forEach(dbE => dbEntityKeyVal[dbE._id] = dbE); } if (mergeEntities1) { mergeEntities1.forEach(mgE => megredEntityKeyVal[mgE._id] = mgE); } entities.forEach(entity => { var dbEntity, mergeEntity; if (dbEntities) { dbEntity = dbEntityKeyVal[entity['_id']]; } if (mergeEntities1) { mergeEntity = megredEntityKeyVal[entity['_id']]; } res.push(mergeProperties(dbEntity, entity, mergeEntity)); }); return res; } function mergeProperties(dbEntity?: any, entity?: any, mergedEntity?: any): EntityActionParam { if (!mergedEntity) { mergedEntity = <any>{}; } let tempMergedEntity = {}; if (dbEntity) { for (var prop in dbEntity) { tempMergedEntity[prop] = dbEntity[prop]; } } if (entity) { for (var prop in entity) { tempMergedEntity[prop] = entity[prop]; } } if (tempMergedEntity && (tempMergedEntity instanceof Object && !(tempMergedEntity instanceof Types.ObjectId))) { for (var prop in tempMergedEntity) { if (Array.isArray(tempMergedEntity[prop])) { mergedEntity[prop] = [...tempMergedEntity[prop]]; continue; } if (typeof tempMergedEntity[prop] == "Object" && typeof mergedEntity[prop] == "Object") { mergedEntity[prop] = this.mergeProperties(mergedEntity[prop], tempMergedEntity[prop]); } else { if (tempMergedEntity[prop] === undefined) { delete mergedEntity[prop]; } else { mergedEntity[prop] = tempMergedEntity[prop]; } } } } mergedEntity[ConstantKeys.FullyLoaded] = true; return { inputEntity: entity, oldPersistentEntity: dbEntity, newPersistentEntity: mergedEntity }; }
the_stack
import * as ui from "../../ui"; import * as csx from "../../base/csx"; import * as React from "react"; import * as tab from "./tab"; import { server, cast } from "../../../socket/socketClient"; import * as commands from "../../commands/commands"; import * as utils from "../../../common/utils"; import * as d3 from "d3"; import { Types } from "../../../socket/socketContract"; import * as types from "../../../common/types"; import { IconType } from "../../../common/types"; import * as $ from "jquery"; import * as styles from "../../styles/styles"; import * as onresize from "onresize"; import { Clipboard } from "../../components/clipboard"; import * as typeIcon from "../../components/typeIcon"; import * as gls from "../../base/gls"; import * as typestyle from "typestyle"; import { MarkDown } from "../../markdown/markdown"; import { testResultsCache } from "../../clientTestResultsCache"; import { Icon } from "../../components/icon"; import * as state from "../../state/state"; import { blackHighlightColor } from "../../styles/styles"; export interface Props extends tab.TabProps { } export interface State { tests?: types.TestSuitesByFilePath; testResultsStats?: types.TestContainerStats; selected?: string | null; testedWorking?: types.Working; } export namespace TestedViewStyles { export const headerClassName = typestyle.style({ fontWeight: 'bold', cursor: 'pointer', $nest: { '&:hover': { textDecoration: 'underline' } } }); export const clickable = typestyle.style({ cursor: 'pointer', $nest: { '&:hover': { textDecoration: 'underline' } } }); } /** Utility */ const makeReactKeyOutOfPosition = (position: EditorPosition) => { return position.line + ':' + position.ch; } const formatStats = (stats: types.TestContainerStats): string => { return `Σ: ${stats.testCount} (✓: ${stats.passCount}, ✘: ${stats.failCount}, ◎: ${stats.skipCount}) ${utils.formatMilliseconds(stats.durationMs)}` } export class TestedView extends ui.BaseComponent<Props, State> { constructor(props: Props) { super(props); this.state = { tests: Object.create(null), selected: null }; } componentDidMount() { /** * Initial load + load on result change */ this.loadData(); this.disposible.add( testResultsCache.testResultsDelta.on(() => { this.loadData(); }) ); this.disposible.add(state.subscribeSub(s => s.testedWorking, (testedWorking) => { this.setState({ testedWorking }); })); // Listen to tab events const api = this.props.api; this.disposible.add(api.resize.on(this.resize)); this.disposible.add(api.focus.on(this.focus)); this.disposible.add(api.save.on(this.save)); this.disposible.add(api.close.on(this.close)); this.disposible.add(api.gotoPosition.on(this.gotoPosition)); // Listen to search tab events this.disposible.add(api.search.doSearch.on(this.search.doSearch)); this.disposible.add(api.search.hideSearch.on(this.search.hideSearch)); this.disposible.add(api.search.findNext.on(this.search.findNext)); this.disposible.add(api.search.findPrevious.on(this.search.findPrevious)); this.disposible.add(api.search.replaceNext.on(this.search.replaceNext)); this.disposible.add(api.search.replacePrevious.on(this.search.replacePrevious)); this.disposible.add(api.search.replaceAll.on(this.search.replaceAll)); } ctrls: { root?: HTMLDivElement } = {}; render() { return ( <div ref={(root) => this.ctrls.root = root} onFocus={this.props.onFocused} tabIndex={0} style={csx.extend(csx.vertical, csx.flex, csx.newLayerParent, styles.someChildWillScroll, { color: styles.textColor })} onKeyPress={this.handleKey}> <gls.FlexVertical style={{ overflow: 'hidden', padding: '10px' }}> {this.renderHeader()} <gls.SmallVerticalSpace /> <gls.FlexVertical> <gls.FlexHorizontal> <gls.ContentVertical style={{ overflow: 'auto', backgroundColor: styles.blackHighlightColor, padding: '10px', width: '200px' }}> { this.renderFiles() } </gls.ContentVertical> <gls.SmallHorizontalSpace /> <gls.FlexVertical style={{ overflow: 'auto', backgroundColor: styles.blackHighlightColor, padding: '10px' }}> { this.state.selected ? this.renderSelectedNode() : 'Select a module from the left to view results 🌹' } </gls.FlexVertical> </gls.FlexHorizontal> </gls.FlexVertical> </gls.FlexVertical> </div> ); } renderHeader() { if (!this.state.testResultsStats) { return <div>No test runs yet.</div> } const testResultsStats = testResultsCache.getStats(); const failing = !!testResultsStats.failCount; const totalThatRan = testResultsStats.passCount + testResultsStats.failCount; const working = this.state.testedWorking && this.state.testedWorking.working; const summary = formatStats(testResultsStats); const testStatsRendered = !!testResultsStats.testCount && <span> { failing ? <span style={{ color: styles.errorColor, fontWeight: 'bold' }}> <Icon name={styles.icons.tested} spin={!!working} /> {testResultsStats.failCount}/{totalThatRan} Tests Failing </span> : <span style={{ color: styles.successColor, fontWeight: 'bold' }}> <Icon name={styles.icons.tested} spin={!!working} /> {testResultsStats.passCount}/{totalThatRan} Tests Passed </span> } </span> return ( <gls.ContentHorizontal> <gls.Content> {testStatsRendered} </gls.Content> <gls.Flex /> <gls.Content> {summary} </gls.Content> </gls.ContentHorizontal> ); } renderFiles() { return Object.keys(this.state.tests).map((fp, i) => { const item = this.state.tests[fp]; const fileName = utils.getFileName(fp); const failing = !!item.stats.failCount; const totalThatRan = item.stats.passCount + item.stats.failCount; return ( <div key={i} title={fp} className={TestedViewStyles.headerClassName} style={csx.extend({ paddingTop: '2px', paddingBottom: '2px', paddingLeft: '2px', color: failing ? styles.errorColor : styles.successColor, backgroundColor: this.state.selected === fp ? styles.selectedBackgroundColor : 'transparent', }, styles.ellipsis)} onClick={() => this.handleModuleSelected(item)}> <Icon name="file-text-o" /> ({failing ? item.stats.failCount : item.stats.passCount}/{totalThatRan}) {fileName} </div> ) }); } renderSelectedNode() { const filePath = this.state.selected; const test = this.state.tests[filePath]; if (!test) { return <div>The selected filePath: {filePath} is no longer in the test restuls</div> } const someFailing = !!test.stats.failCount; return <gls.ContentVerticalContentPadded padding={10}> <div style={{ color: someFailing ? styles.errorColor : styles.successColor }}> <gls.InlineBlock> {formatStats(test.stats)} </gls.InlineBlock> </div> <div className={TestedViewStyles.clickable} style={{ fontSize: '.8em', textDecoration: 'underline' }}> <span onClick={() => { commands.doOpenOrFocusFile.emit({ filePath: filePath, }); } }>{filePath}</span> </div> <gls.Content> {test.suites.map(s => this.renderSuite(s))} </gls.Content> </gls.ContentVerticalContentPadded> } renderSuite(suite: types.TestSuiteResult) { const color = !!suite.stats.failCount ? styles.errorColor : !!suite.stats.passCount ? styles.successColor : styles.highlightColor; return <div key={makeReactKeyOutOfPosition(suite.testLogPosition.lastPositionInFile)} style={{ fontSize: '13px', border: `1px solid ${color}`, marginTop: '5px', padding: '5px' }}> <gls.InlineBlock style={{ color: color }}> <Icon name={styles.icons.testedSuite} /> <span className={TestedViewStyles.headerClassName} onClick={() => this.openTestLogPositionInSelectedModule(suite.testLogPosition)}> {suite.description} </span> </gls.InlineBlock> <gls.SmallVerticalSpace space={10} /> {formatStats(suite.stats)} <gls.SmallVerticalSpace space={10} /> {suite.tests.map(s => this.renderTest(s))} <gls.SmallVerticalSpace space={10} /> {suite.suites.map(s => this.renderSuite(s))} </div> } renderTest(test: types.TestResult) { return <div key={makeReactKeyOutOfPosition(test.testLogPosition.lastPositionInFile)} style={{ padding: '5px' }}> <gls.InlineBlock style={{ color: test.status === types.TestStatus.Success ? styles.successColor : test.status === types.TestStatus.Fail ? styles.errorColor : styles.highlightColor }}> <Icon name={styles.icons.testedTest} /> <span className={TestedViewStyles.headerClassName} onClick={() => this.openTestLogPositionInSelectedModule(test.testLogPosition)}> {test.description} </span> </gls.InlineBlock> &nbsp;&nbsp;<gls.InlineBlock style={{ fontSize: '10px' }}>{test.durationMs != undefined ? utils.formatMilliseconds(test.durationMs) : ''}</gls.InlineBlock> { test.status === types.TestStatus.Fail && <div style={{ padding: '5px', margin: '5px', backgroundColor: 'black' }}> <gls.ContentVerticalContentPadded padding={10}> <div><Clipboard text={test.error.message} /> {test.error.message}</div> {!!test.error.testLogPosition.stack.length && test.error.testLogPosition.stack.map(s => { return <div key={makeReactKeyOutOfPosition(s.position)} style={{ fontSize: '11px' }} onClick={() => this.openFilePathPosition(s)}> <Icon name="eye" /> &nbsp;&nbsp;<gls.InlineBlock className={TestedViewStyles.clickable}>{s.filePath}:{s.position.line + 1}:{s.position.ch + 1}</gls.InlineBlock> </div> })} </gls.ContentVerticalContentPadded> </div> } </div> } openTestLogPositionInSelectedModule = (pos: types.TestLogPosition) => { const filePath = this.state.selected; commands.doOpenOrFocusFile.emit({ filePath, position: pos.lastPositionInFile, }) } openFilePathPosition = (fpPos: types.FilePathPosition) => { commands.doOpenOrFocusFile.emit(fpPos); } handleNodeClick = (node: types.DocumentedType) => { commands.doOpenOrFocusFile.emit({ filePath: node.location.filePath, position: node.location.position }); } handleModuleSelected = (node: types.TestModule) => { this.setState({ selected: node.filePath }); } handleKey = (e: any) => { let unicode = e.charCode; if (String.fromCharCode(unicode).toLowerCase() === "r") { this.loadData(); } } loadData = () => { const results = testResultsCache.getResults(); const testResultsStats = testResultsCache.getStats(); this.setState({ tests: results, testResultsStats }); } /** * TAB implementation */ resize = () => { // Not needed } focus = () => { this.ctrls.root.focus(); } save = () => { } close = () => { } gotoPosition = (position: EditorPosition) => { } search = { doSearch: (options: FindOptions) => { }, hideSearch: () => { }, findNext: (options: FindOptions) => { }, findPrevious: (options: FindOptions) => { }, replaceNext: ({newText}: { newText: string }) => { }, replacePrevious: ({newText}: { newText: string }) => { }, replaceAll: ({newText}: { newText: string }) => { } } }
the_stack
import React, { Component } from "react"; import ReactDOM from "react-dom"; import * as d3 from "d3"; import { InfoTooltip } from "./InfoTooltip.tsx" import { DirectionalHint } from '@fluentui/react'; import { PrimaryButton, DefaultButton } from '@fluentui/react/lib/Button'; type IncompatiblePointDistributionState = { selectedDataPoint: any, page: number } type IncompatiblePointDistributionProps = { selectedDataPoint: any, selectedClass?: number, setSelectedClass: any, setSelectedChart: any, pageSize?: number, filterByInstanceIds: any } class IncompatiblePointDistribution extends Component<IncompatiblePointDistributionProps, IncompatiblePointDistributionState> { public static defaultProps = { pageSize: 5 }; constructor(props) { super(props); this.state = { selectedDataPoint: this.props.selectedDataPoint, page: 0 }; this.node = React.createRef<HTMLDivElement>(); this.createDistributionBarChart = this.createDistributionBarChart.bind(this); } node: React.RefObject<HTMLDivElement> componentDidMount() { this.createDistributionBarChart(); } componentWillReceiveProps(nextProps) { this.setState({ selectedDataPoint: nextProps.selectedDataPoint, }); } componentDidUpdate() { this.createDistributionBarChart(); } createDistributionBarChart() { var _this = this; var body = d3.select(this.node.current); var margin = { top: 20, right: 15, bottom: 20, left: 55 } var h = 191 - margin.top - margin.bottom var w = 363; // SVG d3.select("#incompatiblepointdistribution").remove(); var svg = body.append('svg') .attr('id', "incompatiblepointdistribution") .attr('height',h + margin.top + margin.bottom + 16) .attr('width',w + margin.left + margin.right) .attr('float', 'left'); if (this.props.selectedDataPoint != null) { // Sort the data into the dataRows based on the ordering of the sorted classes var totalIncompatible = 0; var startI = this.state.page * this.props.pageSize; var endI = Math.min(startI + this.props.pageSize, this.props.selectedDataPoint.sorted_classes.length); for (var i = startI; i < endI; i++) { var instanceClass = this.props.selectedDataPoint.sorted_classes[i]; var dataRow = this.props.selectedDataPoint.h2_incompatible_instance_ids_by_class.filter( dataDict => (dataDict["class"] == instanceClass)).pop(); if (dataRow) { totalIncompatible += dataRow["incompatibleInstanceIds"]?.length ?? 0; } } // We add the following so that we do not get a divide by zero // error later on if there are no incompatible points. if (totalIncompatible == 0) { totalIncompatible = 1; } var dataRows = []; for (var i=startI; i < endI; i++) { var instanceClass = this.props.selectedDataPoint.sorted_classes[i]; var dataRow = this.props.selectedDataPoint.h2_incompatible_instance_ids_by_class.filter( dataDict => (dataDict["class"] == instanceClass)).pop(); if (dataRow) { dataRows.push(dataRow); } else { dataRows.push({class: instanceClass, incompatibleInstanceIds: []}) } } var xScale = d3.scaleBand().range([0, w]).padding(0.4), yScale = d3.scaleLinear().range([h, 0]); var g = svg.append("g") .attr("transform", "translate(" + 55 + "," + margin.top + ")"); xScale.domain(dataRows.map(function(d) { return d.class; })); yScale.domain([0, 100]); g.append("g") .attr("transform", "translate(0," + h + ")") .call(d3.axisBottom(xScale)) .append("text") .attr("y", 30) .attr("x", (w + margin.left)/2) .attr("text-anchor", "end") .attr("fill", "black") .text("Classes"); g.append("g") .call(d3.axisLeft(yScale).tickFormat(function(d){ return d; }) .ticks(10)) .append("text") .attr("transform", "rotate(-90)") .attr("y", 6) .attr("dy", "-5.1em") .attr("text-anchor", "end") .attr("fill", "black") .text("Percentage of Incompatible Points"); g.selectAll(".bar") .data(dataRows) .enter().append("rect") .attr("class", "bar") .attr("x", function(d) { return xScale(d.class); }) .attr("y", function(d) { return yScale(d.incompatibleInstanceIds.length/totalIncompatible * 100); }) .attr("width", xScale.bandwidth()) .attr("height", function(d) { return h - yScale(d.incompatibleInstanceIds.length/totalIncompatible * 100); }) .classed("highlighted-bar", function(d) { return d.class == _this.props.selectedClass }) .on("click", function(d) { _this.props.filterByInstanceIds(d.incompatibleInstanceIds); _this.props.setSelectedClass(d.class); }); } } render() { let numClasses = this.props.selectedDataPoint?.sorted_classes?.length ?? 0; let numPages = Math.ceil(numClasses/this.props.pageSize) - 1; const message = "Displays distribution of errors not made by the previous model as they occur across classes of the newly trained model.​"; return ( <div className="plot plot-distribution"> <BarChartSelector selectedChart={"incompatible-points"} setSelectedChart={this.props.setSelectedChart} /> <div className="plot-title-row"> Distribution of Incompatible Points <InfoTooltip message={message} direction={DirectionalHint.topCenter} /> </div> <div ref={this.node}/> <div className="page-button-row"> <button onClick={() => { this.setState({ page: Math.max(0, this.state.page-1) }) }}>&lt;</button> <span>{this.state.page+1} of {numPages+1}</span> <button onClick={() => { this.setState({ page: Math.min(numPages, this.state.page+1) }) }}>&gt;</button> </div> </div> ); } } type BarChartSelectorProps = { selectedChart: string setSelectedChart: Function } class BarChartSelector extends Component<BarChartSelectorProps, null> { render () { let getButtons = () => { if (this.props.selectedChart == "model-accuracy") { return ( <React.Fragment> <PrimaryButton text="Model Accuracy" onClick={() => this.props.setSelectedChart("model-accuracy")} styles={{root: {marginLeft: '2px', marginRight: '2px'} }} /> <DefaultButton text="Incompatible Points" onClick={() => this.props.setSelectedChart("incompatible-points")} styles={{root: {marginLeft: '2px', marginRight: '2px'} }} /> </React.Fragment> ); } else { return ( <React.Fragment> <DefaultButton text="Model Accuracy" onClick={() => this.props.setSelectedChart("model-accuracy")} styles={{root: {marginLeft: '2px', marginRight: '2px'} }} /> <PrimaryButton text="Incompatible Points" onClick={() => this.props.setSelectedChart("incompatible-points")} styles={{root: {marginLeft: '2px', marginRight: '2px'}}} /> </React.Fragment> ); } } return ( <div className="chart-selector-row"> {getButtons()} </div> ) } }; type ModelAccuracyByClassState = { selectedDataPoint: any, page: number } type ModelAccuracyByClassProps = { selectedDataPoint: any, pageSize?: number, filterByInstanceIds: any, setSelectedModelAccuracyClass: any, selectedModelAccuracyClass: any, setSelectedChart: any } class ModelAccuracyByClass extends Component<ModelAccuracyByClassProps, ModelAccuracyByClassState> { public static defaultProps = { pageSize: 5 }; constructor(props) { super(props); this.state = { selectedDataPoint: this.props.selectedDataPoint, page: 0 }; this.node = React.createRef<HTMLDivElement>(); this.createDistributionBarChart = this.createDistributionBarChart.bind(this); } node: React.RefObject<HTMLDivElement> componentDidMount() { this.createDistributionBarChart(); } componentWillReceiveProps(nextProps) { this.setState({ selectedDataPoint: nextProps.selectedDataPoint, }); } componentDidUpdate() { this.createDistributionBarChart(); } createDistributionBarChart() { var _this = this; var body = d3.select(this.node.current); var margin = { top: 5, right: 15, bottom: 50, left: 55 } var h = 177 - margin.top - margin.bottom var w = 363; if (this.props.selectedDataPoint != null) { // SVG d3.select("#modelaccuracybyclass").remove(); var svg = body.append('svg') .attr('id', "modelaccuracybyclass") .attr('height',h + margin.top + margin.bottom) .attr('width',w + margin.left + margin.right) .attr('float', 'left'); // Sort the data into the dataRows based on the ordering of the sorted classes var startI = this.state.page * this.props.pageSize; var endI = Math.min(startI + this.props.pageSize, this.props.selectedDataPoint.sorted_classes.length); var h1DataRows = []; var h2DataRows = []; for (var i=startI; i < endI; i++) { var instanceClass = this.props.selectedDataPoint.sorted_classes[i]; var h1dataRow = this.props.selectedDataPoint.h1_accuracy_by_class.filter( dataDict => (dataDict["class"] == instanceClass)).pop(); var h2dataRow = this.props.selectedDataPoint.h2_accuracy_by_class.filter( dataDict => (dataDict["class"] == instanceClass)).pop(); if (h1dataRow) { h1DataRows.push(h1dataRow); } else { h1DataRows.push({class: instanceClass, "accuracy": 0.0, incompatibleInstanceIds: []}) } if (h2dataRow) { h2DataRows.push(h2dataRow); } else { h2DataRows.push({class: instanceClass, "accuracy": 0.0, incompatibleInstanceIds: []}) } } var xScale = d3.scaleBand().range([0, w]).padding(0.4), yScale = d3.scaleLinear().range([h, 0]); var g = svg.append("g") .attr("transform", "translate(" + 55 + "," + margin.top + ")"); xScale.domain(h1DataRows.map(function(d) { return d.class; })); yScale.domain([0, 100]); g.append("g") .attr("transform", "translate(0," + h + ")") .call(d3.axisBottom(xScale)) .append("text") .attr("y", 30) .attr("x", (w + margin.left)/2) .attr("text-anchor", "end") .attr("fill", "black") .text("Classes"); g.append("g") .call(d3.axisLeft(yScale).tickFormat(function(d){ return d; }) .ticks(10)) .append("text") .attr("transform", "rotate(-90)") .attr("y", 6) .attr("x", -50) .attr("dy", "-5.1em") .attr("text-anchor", "end") .attr("fill", "black") .text("Model Accuracy"); g.selectAll(".barh1") .data(h1DataRows) .enter().append("rect") .attr("class", "barh1") .attr("x", function(d) { return xScale(d.class); }) .attr("y", function(d) { return yScale(d.accuracy * 100); }) .attr("width", xScale.bandwidth()/2) .attr("height", function(d) { return h - yScale(d.accuracy * 100); }) .classed("highlighted-bar", function(d) { return (_this.props.selectedModelAccuracyClass != null) && (d.class == _this.props.selectedModelAccuracyClass.classLabel && _this.props.selectedModelAccuracyClass.modelName == "h1") }) .on("click", function(d) { _this.props.filterByInstanceIds(d.error_instance_ids); _this.props.setSelectedModelAccuracyClass("h1", d.class); }); g.selectAll(".barh2") .data(h2DataRows) .enter().append("rect") .attr("class", "barh2") .attr("x", function(d) { return xScale(d.class) + xScale.bandwidth()/2; }) .attr("y", function(d) { return yScale(d.accuracy * 100); }) .attr("width", xScale.bandwidth()/2) .attr("height", function(d) { return h - yScale(d.accuracy * 100); }) .classed("highlighted-bar", function(d) { return (_this.props.selectedModelAccuracyClass != null) && (d.class == _this.props.selectedModelAccuracyClass.classLabel && _this.props.selectedModelAccuracyClass.modelName == "h2") }) .on("click", function(d) { _this.props.filterByInstanceIds(d.error_instance_ids); _this.props.setSelectedModelAccuracyClass("h2", d.class); }); } else { // SVG d3.select("#modelaccuracybyclass").remove(); var svg = body.append('svg') .attr('id', "modelaccuracybyclass") .attr('height', 216) .attr('width',w + margin.left + margin.right) .attr('float', 'left'); } } render() { let numClasses = this.props.selectedDataPoint?.sorted_classes?.length ?? 0; let numPages = Math.ceil(numClasses/this.props.pageSize) - 1; const message = "Displays the distribution of model accuracies per class.​"; let getClassAccuracyLegend = () => { if (this.props.selectedDataPoint != null) { return ( <div className="class-accuracy-legend-row"> <div className="class-accuracy-legend-row-block"> <div className="class-accuracy-h1" /> h1 </div> <div className="class-accuracy-legend-row-block"> <div className="class-accuracy-h2" /> h2 </div> </div> ); } else { return (<React.Fragment />); } } return ( <div className="plot plot-distribution"> <BarChartSelector selectedChart={"model-accuracy"} setSelectedChart={this.props.setSelectedChart} /> <div className="plot-title-row"> Model Accuracy by Class <InfoTooltip message={message} direction={DirectionalHint.topCenter} /> </div> {getClassAccuracyLegend()} <div ref={this.node}/> <div className="page-button-row"> <button onClick={() => { this.setState({ page: Math.max(0, this.state.page-1) }) }}>&lt;</button> <span>{this.state.page+1} of {numPages+1}</span> <button onClick={() => { this.setState({ page: Math.min(numPages, this.state.page+1) }) }}>&gt;</button> </div> </div> ); } } type ClassStatisticsState = { selectedPlot: string } type ClassStatisticsProps = { selectedDataPoint: any, selectedClass?: number, selectedModelAccuracyClass: any, setSelectedClass: any, setSelectedModelAccuracyClass: any, pageSize?: number, filterByInstanceIds: any } class ClassStatisticsPanel extends Component<ClassStatisticsProps, ClassStatisticsState> { constructor(props) { super(props); this.state = { selectedPlot: "model-accuracy" }; this.selectModelAccuracyPlot = this.selectModelAccuracyPlot.bind(this); this.selectIncompatiblePointsPlot = this.selectIncompatiblePointsPlot.bind(this); this.setSelectedPlot = this.setSelectedPlot.bind(this); } selectModelAccuracyPlot() { this.setState({ selectedPlot: "model-accuracy" }); } selectIncompatiblePointsPlot() { this.setState({ selectedPlot: "incompatible-points" }); } setSelectedPlot(plotName: string) { this.setState({ selectedPlot: plotName }); } render() { var renderPlot = () => { if (this.state.selectedPlot == "model-accuracy") { return ( <ModelAccuracyByClass selectedDataPoint={this.props.selectedDataPoint} setSelectedModelAccuracyClass={this.props.setSelectedModelAccuracyClass} selectedModelAccuracyClass={this.props.selectedModelAccuracyClass} filterByInstanceIds={this.props.filterByInstanceIds} setSelectedChart={this.setSelectedPlot} /> ); } else if (this.state.selectedPlot == "incompatible-points") { return ( <IncompatiblePointDistribution selectedDataPoint={this.props.selectedDataPoint} setSelectedClass={this.props.setSelectedClass} selectedClass={this.props.selectedClass} filterByInstanceIds={this.props.filterByInstanceIds} setSelectedChart={this.setSelectedPlot} /> ); } } return ( <div className="plot"> {renderPlot()} </div> ); } } export default ClassStatisticsPanel;
the_stack
// clang-format off import 'chrome://resources/cr_elements/cr_slider/cr_slider.js'; import {CrSliderElement} from 'chrome://resources/cr_elements/cr_slider/cr_slider.js'; import {pressAndReleaseKeyOn} from 'chrome://resources/polymer/v3_0/iron-test-helpers/mock-interactions.js'; import {flush} from 'chrome://resources/polymer/v3_0/polymer/polymer_bundled.min.js'; import {assertEquals, assertFalse, assertTrue} from 'chrome://webui-test/chai_assert.js'; import {eventToPromise, flushTasks} from 'chrome://webui-test/test_util.js'; // clang-format on suite('cr-slider', function() { let crSlider: CrSliderElement; setup(function() { document.body.innerHTML = ` <style> #wrapper { width: 200px; } </style> <div id="wrapper"> <cr-slider min="0" max="100"></cr-slider> </div> `; crSlider = document.body.querySelector('cr-slider')!; crSlider.value = 0; return flushTasks(); }); function checkDisabled(expected: boolean) { assertEquals( expected, window.getComputedStyle(crSlider).getPropertyValue('pointer-events') === 'none'); const expectedTabindex = expected ? '-1' : '0'; assertEquals(expectedTabindex, crSlider.getAttribute('tabindex')); } function pressArrowRight() { pressAndReleaseKeyOn(crSlider, 39, [], 'ArrowRight'); } function pressArrowLeft() { pressAndReleaseKeyOn(crSlider, 37, [], 'ArrowLeft'); } function pressPageUp() { pressAndReleaseKeyOn(crSlider, 33, [], 'PageUp'); } function pressPageDown() { pressAndReleaseKeyOn(crSlider, 34, [], 'PageDown'); } function pressArrowUp() { pressAndReleaseKeyOn(crSlider, 38, [], 'ArrowUp'); } function pressArrowDown() { pressAndReleaseKeyOn(crSlider, 40, [], 'ArrowDown'); } function pressHome() { pressAndReleaseKeyOn(crSlider, 36, [], 'Home'); } function pressEnd() { pressAndReleaseKeyOn(crSlider, 35, [], 'End'); } function pointerEvent(eventType: string, ratio: number) { const rect = crSlider.shadowRoot!.querySelector( '#container')!.getBoundingClientRect(); crSlider.dispatchEvent(new PointerEvent(eventType, { buttons: 1, pointerId: 1, clientX: rect.left + (ratio * rect.width), })); } function pointerDown(ratio: number) { pointerEvent('pointerdown', ratio); } function pointerMove(ratio: number) { pointerEvent('pointermove', ratio); } function pointerUp() { // Ignores clientX for pointerup event. pointerEvent('pointerup', 0); } test('key events', () => { pressArrowRight(); assertEquals(1, crSlider.value); pressPageUp(); assertEquals(2, crSlider.value); pressArrowUp(); assertEquals(3, crSlider.value); pressHome(); assertEquals(0, crSlider.value); pressArrowLeft(); assertEquals(0, crSlider.value); pressArrowDown(); assertEquals(0, crSlider.value); pressPageDown(); assertEquals(0, crSlider.value); pressEnd(); assertEquals(100, crSlider.value); pressArrowRight(); assertEquals(100, crSlider.value); pressPageUp(); assertEquals(100, crSlider.value); pressArrowUp(); assertEquals(100, crSlider.value); pressArrowLeft(); assertEquals(99, crSlider.value); pressArrowDown(); assertEquals(98, crSlider.value); pressPageDown(); assertEquals(97, crSlider.value); }); test('no-keybindings', () => { crSlider.noKeybindings = true; pressArrowRight(); assertEquals(0, crSlider.value); crSlider.noKeybindings = false; pressArrowRight(); assertEquals(1, crSlider.value); crSlider.noKeybindings = true; pressArrowRight(); assertEquals(1, crSlider.value); crSlider.noKeybindings = false; pressArrowRight(); assertEquals(2, crSlider.value); }); test('mouse events', () => { assertFalse(crSlider.dragging); pointerMove(.25); assertEquals(0, crSlider.value); pointerDown(.5); assertTrue(crSlider.dragging); assertEquals(50, crSlider.value); pointerMove(.75); assertEquals(75, crSlider.value); pointerMove(-1); assertEquals(0, crSlider.value); pointerMove(2); assertEquals(100, crSlider.value); pointerUp(); assertEquals(100, crSlider.value); assertFalse(crSlider.dragging); pointerMove(.25); assertEquals(100, crSlider.value); }); test('snaps to closest value after minimum traversal', () => { // Snaps to closest value after traversing a minimum of .8 units. const tolerance = .000001; crSlider.snaps = true; crSlider.ticks = []; pointerDown(.501); assertEquals(50, crSlider.value); pointerMove(.505); assertEquals(50, crSlider.value); // Before threshold. pointerMove(.508 - tolerance); assertEquals(50, crSlider.value); // After threshold. pointerMove(.508 + tolerance); assertEquals(51, crSlider.value); // Before threshold. pointerMove(.502 + tolerance); assertEquals(51, crSlider.value); // After threshold. pointerMove(.502 - tolerance); assertEquals(50, crSlider.value); // Move far away rounds to closest whole number. pointerMove(.605); assertEquals(61, crSlider.value); }); test('markers', () => { const markersElement = crSlider.shadowRoot!.querySelector<HTMLElement>('#markers')!; assertTrue(markersElement.hidden); crSlider.markerCount = 10; assertFalse(markersElement.hidden); flush(); const markers = Array.from(crSlider.shadowRoot!.querySelectorAll('#markers div')); assertEquals(9, markers.length); markers.forEach(marker => { assertTrue(marker.classList.contains('inactive-marker')); }); crSlider.value = 100; markers.forEach(marker => { assertTrue(marker.classList.contains('active-marker')); }); crSlider.value = 50; markers.slice(0, 5).forEach(marker => { assertTrue(marker.classList.contains('active-marker')); }); markers.slice(5).forEach(marker => { assertTrue(marker.classList.contains('inactive-marker')); }); }); test('ticks and aria', () => { crSlider.value = 2; crSlider.ticks = [1, 2, 4, 8]; assertEquals('1', crSlider.getAttribute('aria-valuemin')); assertEquals('8', crSlider.getAttribute('aria-valuemax')); assertEquals('4', crSlider.getAttribute('aria-valuetext')); assertEquals('4', crSlider.getAttribute('aria-valuenow')); assertEquals( '', crSlider.shadowRoot!.querySelector('#label')!.innerHTML.trim()); assertEquals(2, crSlider.value); pressArrowRight(); assertEquals(3, crSlider.value); assertEquals('8', crSlider.getAttribute('aria-valuetext')); assertEquals('8', crSlider.getAttribute('aria-valuenow')); assertEquals( '', crSlider.shadowRoot!.querySelector('#label')!.innerHTML.trim()); crSlider.value = 2; crSlider.ticks = [ { value: 10, ariaValue: 1, label: 'First', }, { value: 20, label: 'Second', }, { value: 30, ariaValue: 3, label: 'Third', }, ]; assertEquals('1', crSlider.getAttribute('aria-valuemin')); assertEquals('3', crSlider.getAttribute('aria-valuemax')); assertEquals('Third', crSlider.getAttribute('aria-valuetext')); assertEquals( 'Third', crSlider.shadowRoot!.querySelector('#label')!.innerHTML.trim()); assertEquals('3', crSlider.getAttribute('aria-valuenow')); pressArrowLeft(); assertEquals('Second', crSlider.getAttribute('aria-valuetext')); assertEquals('20', crSlider.getAttribute('aria-valuenow')); assertEquals( 'Second', crSlider.shadowRoot!.querySelector('#label')!.innerHTML.trim()); }); test('disabled whenever public |disabled| is true', () => { crSlider.disabled = true; crSlider.ticks = []; checkDisabled(true); crSlider.ticks = [1]; checkDisabled(true); crSlider.ticks = [1, 2, 3]; checkDisabled(true); }); test('not disabled or snaps when |ticks| is empty', () => { assertFalse(crSlider.disabled); crSlider.ticks = []; checkDisabled(false); assertFalse(crSlider.snaps); assertEquals(0, crSlider.min); assertEquals(100, crSlider.max); }); test('effectively disabled when only one tick', () => { assertFalse(crSlider.disabled); crSlider.ticks = [1]; checkDisabled(true); assertFalse(crSlider.snaps); assertEquals(0, crSlider.min); assertEquals(100, crSlider.max); }); test('not disabled and |snaps| true when |ticks.length| > 0', () => { assertFalse(crSlider.disabled); crSlider.ticks = [1, 2, 3]; checkDisabled(false); assertTrue(crSlider.snaps); assertEquals(0, crSlider.min); assertEquals(2, crSlider.max); }); test('disabled, max, min and snaps update when ticks is mutated', () => { assertFalse(crSlider.disabled); checkDisabled(false); // Single tick is effectively disabled. crSlider.push('ticks', 1); checkDisabled(true); assertFalse(crSlider.snaps); assertEquals(0, crSlider.min); assertEquals(100, crSlider.max); // Multiple ticks is enabled. crSlider.push('ticks', 2); checkDisabled(false); assertTrue(crSlider.snaps); assertEquals(0, crSlider.min); assertEquals(1, crSlider.max); }); test('value updated before dragging-changed event handled', () => { const wait = new Promise<void>(resolve => { crSlider.addEventListener('dragging-changed', e => { if (!(e as CustomEvent<{value: number}>).detail.value) { assertEquals(50, crSlider.value); resolve(); } }); }); pointerDown(0); pointerMove(.5); pointerUp(); return wait; }); test('smooth position transition only on pointerdown', async () => { function assertNoTransition() { const expected = 'all 0s ease 0s'; assertEquals( expected, getComputedStyle(crSlider.shadowRoot!.querySelector('#knobAndLabel')!) .transition); assertEquals( expected, getComputedStyle(crSlider.shadowRoot!.querySelector('#bar')!) .transition); } function assertTransition() { function getValue(propName: string) { return `${propName} 0.08s ease 0s`; } assertEquals( getValue('margin-inline-start'), getComputedStyle(crSlider.shadowRoot!.querySelector('#knobAndLabel')!) .transition); assertEquals( getValue('width'), getComputedStyle(crSlider.shadowRoot!.querySelector('#bar')!) .transition); } assertNoTransition(); pointerDown(.5); assertTransition(); const knobAndLabel = crSlider.shadowRoot!.querySelector<HTMLElement>('#knobAndLabel')!; await eventToPromise('transitionend', knobAndLabel); assertNoTransition(); // Other operations that change the value do not have transitions. pointerMove(0); assertNoTransition(); assertEquals(0, crSlider.value); pointerUp(); pressArrowRight(); assertNoTransition(); assertEquals(1, crSlider.value); crSlider.value = 50; assertNoTransition(); // Check that the slider is not stuck with a transition when the value // does not change. crSlider.value = 0; pointerDown(0); assertTransition(); await eventToPromise('transitionend', knobAndLabel); assertNoTransition(); }); test('getRatio()', () => { crSlider.min = 1; crSlider.max = 11; crSlider.value = 1; assertEquals(0, crSlider.getRatio()); crSlider.value = 11; assertEquals(1, crSlider.getRatio()); crSlider.value = 6; assertEquals(.5, crSlider.getRatio()); }); test('cr-slider-value-changed event when mouse clicked', () => { const wait = eventToPromise('cr-slider-value-changed', crSlider); pointerDown(.1); return wait; }); test('cr-slider-value-changed event when key pressed', () => { const wait = eventToPromise('cr-slider-value-changed', crSlider); pressArrowRight(); return wait; }); test( 'out of range value updated back into min/max range with debounce', async () => { crSlider.min = -100; crSlider.max = 1000; crSlider.value = -50; await flushTasks(); assertEquals(-50, crSlider.value); crSlider.min = 0; crSlider.max = 100; crSlider.value = 150; // Clamping value should happen async, not sync, in order to not race // when min/max and value change at the same time. assertEquals(150, crSlider.value); await flushTasks(); assertEquals(100, crSlider.value); crSlider.max = 25; await flushTasks(); assertEquals(25, crSlider.value); crSlider.min = 50; crSlider.max = 100; await flushTasks(); assertEquals(50, crSlider.value); }); test('container hidden until value set', async () => { document.body.innerHTML = '<cr-slider></cr-slider>'; crSlider = document.body.querySelector('cr-slider')!; assertTrue( crSlider.shadowRoot!.querySelector<HTMLElement>('#container')!.hidden); crSlider.value = 0; await flushTasks(); assertFalse( crSlider.shadowRoot!.querySelector<HTMLElement>('#container')!.hidden); }); });
the_stack
import { EventEmitter } from 'events'; import { isIPv6 } from 'net'; import { RethinkDBError } from '../error/error'; import { TermJson } from '../internal-types'; import { r } from '../query-builder/r'; import { Cursor } from '../response/cursor'; import { Changes, Connection, MasterPool, RCursor, RethinkDBErrorType, RPoolConnectionOptions, RServer, RunOptions, } from '../types'; import { RethinkDBConnection } from './connection'; import { ServerConnectionPool } from './server-pool'; import { setConnectionDefaults } from './socket'; import { delay } from '../util'; function flat<T>(acc: T[], next: T[]) { return [...acc, ...next]; } // Try to extract the most global address // https://github.com/neumino/rethinkdbdash/blob/f77d2ffb77a8c0fa41aabc511d74aa86ea1136d9/lib/helper.js function getCanonicalAddress(addresses: RServer[]) { // We suppose that the addresses are all valid, and therefore use loose regex return addresses .map((address) => { if ( /^127(\.\d{1,3}){3}$/.test(address.host) || /0?:?0?:?0?:?0?:?0?:?0?:0?:1/.test(address.host) ) { return { address, value: 0 }; } if (isIPv6(address.host) && /^[fF]|[eE]80:.*:.*:/.test(address.host)) { return { address, value: 1 }; } if (/^169\.254\.\d{1,3}\.\d{1,3}$/.test(address.host)) { return { address, value: 2 }; } if (/^192\.168\.\d{1,3}\.\d{1,3}$/.test(address.host)) { return { address, value: 3 }; } if (/^172\.(1\d|2\d|30|31)\.\d{1,3}\.\d{1,3}$/.test(address.host)) { return { address, value: 4 }; } if (/^10(\.\d{1,3}){3}$/.test(address.host)) { return { address, value: 5 }; } if (isIPv6(address.host) && /^[fF]|[cCdD].*:.*:/.test('address.host')) { return { address, value: 6 }; } return { address, value: 7 }; }) .reduce((acc, next) => (acc.value > next.value ? acc : next)).address.host; } interface ServerStatus { id: string; name: string; network: { canonical_addresses: Array<{ host: string; port: number; }>; cluster_port: number; connected_to: Record<string, unknown>; hostname: string; http_admin_port: number; reql_port: number; time_connected: Date; }; process: { argv: string[]; cache_size_mb: number; pid: number; time_started: Date; version: string; }; } export class MasterConnectionPool extends EventEmitter implements MasterPool { public draining = false; private healthy: boolean | undefined = undefined; private discovery: boolean; private discoveryCursor?: RCursor<Changes<ServerStatus>>; private servers: RServer[]; private serverPools: ServerConnectionPool[]; private connParam: RPoolConnectionOptions; constructor({ db = 'test', user = 'admin', password = '', discovery = false, servers = [{ host: 'localhost', port: 28015 }], buffer = servers.length, max = servers.length, timeout = 20, pingInterval = -1, timeoutError = 1000, timeoutGb = 60 * 60 * 1000, maxExponent = 6, silent = false, log = (message: string) => undefined, }: RPoolConnectionOptions = {}) { super(); // min one per server but wont redistribute conn from failed servers this.discovery = discovery; this.connParam = { db, user, password, buffer: Math.max(buffer, 1), max: Math.max(max, buffer), timeout, pingInterval, timeoutError, timeoutGb, maxExponent, silent, log, }; this.servers = servers.map(setConnectionDefaults); this.serverPools = []; } public setOptions({ discovery = this.discovery, buffer = this.connParam.buffer, max = this.connParam.max, timeoutError = this.connParam.timeoutError, timeoutGb = this.connParam.timeoutGb, maxExponent = this.connParam.maxExponent, silent = this.connParam.silent, log = this.connParam.log, }) { if (this.discovery !== discovery) { this.discovery = discovery; if (discovery) { this.discover(); } else if (this.discoveryCursor) { this.discoveryCursor.close(); } } this.connParam = { ...this.connParam, buffer, max, timeoutError, timeoutGb, maxExponent, silent, log, }; this.setServerPoolsOptions(this.connParam); } public eventNames() { return [ 'draining', 'queueing', 'size', 'available-size', 'healthy', 'error', ]; } public async initServers(serverNum = 0): Promise<void> { if (serverNum < this.servers.length) { return this.createServerPool(this.servers[serverNum]).then((pool) => { if (!this.draining) { return this.initServers(serverNum + 1); } return pool.drain(); }); } if (!this.draining) { this.setServerPoolsOptions(this.connParam); } } public get isHealthy() { return this.serverPools.some((pool) => pool.isHealthy); } public waitForHealthy() { return new Promise<this>((resolve, reject) => { if (this.isHealthy) { resolve(this); } else { this.once('healthy', (healthy, error) => { if (healthy) { resolve(this); } else { reject( new RethinkDBError('Error initializing master pool', { type: RethinkDBErrorType.MASTER_POOL_FAIL, cause: error, }), ); } }); } }); } public async drain() { this.emit('draining'); this.draining = true; this.discovery = false; if (this.discoveryCursor) { this.discoveryCursor.close(); } this.setHealthy(false); await Promise.all( this.serverPools.map((pool) => this.closeServerPool(pool)), ); } // @ts-ignore public getPools() { return this.serverPools; } public getConnections(): Connection[] { return this.serverPools .map((pool) => pool.getConnections()) .reduce(flat, []); } public getLength() { return this.getOpenConnections().length; } public getAvailableLength() { return this.getIdleConnections().length; } public async queue( term: TermJson, globalArgs: RunOptions = {}, ): Promise<Cursor | undefined> { if (!this.isHealthy) { throw new RethinkDBError( 'None of the pools have an opened connection and failed to open a new one.', { type: RethinkDBErrorType.POOL_FAIL }, ); } this.emit('queueing'); const pool = this.getPoolWithMinQueries(); return pool.queue(term, globalArgs); } private async createServerPool(server: RServer) { const pool = new ServerConnectionPool(server, { ...this.connParam, buffer: 1, max: 1, }); this.serverPools.push(pool); this.subscribeToPool(pool); pool.initConnections().catch(() => undefined); return pool.waitForHealthy(); } private setServerPoolsOptions(params: RPoolConnectionOptions) { const { buffer = 1, max = 1, ...otherParams } = params; const pools = this.getPools(); const healthyLength = pools.filter((pool) => pool.isHealthy).length; for (let i = 0; i < pools.length; i++) { const pool = pools[i]; pool .setOptions( pool.isHealthy ? { ...otherParams, buffer: Math.floor(buffer / healthyLength) + (i === (buffer % healthyLength) - 1 ? 1 : 0), max: Math.floor(max / healthyLength) + (i === (max % healthyLength) - 1 ? 1 : 0), } : otherParams, ) .then(() => { if (this.draining) { pool.drain(); } }); } if (this.draining) { pools.forEach((pool) => pool.drain()); } } private async discover(): Promise<void> { this.discoveryCursor = await r .db('rethinkdb') .table<ServerStatus>('server_status') .changes({ includeInitial: true, includeStates: true }) .run(); const newServers: RServer[] = []; let state: 'initializing' | 'ready' = 'initializing'; return ( this.discoveryCursor .eachAsync(async (row) => { if (row.state) { state = row.state; if (row.state === 'ready') { this.servers.forEach((server) => { if (!newServers.some((s) => s === server)) { this.removeServer(server); } }); } } if (row.new_val) { const server = this.getServerFromStatus(row.new_val); if (state === 'initializing') { newServers.push(server); } if (!this.servers.includes(server)) { this.servers.push(server); this.createServerPool(server).then(() => this.setServerPoolsOptions(this.connParam), ); } } else if (row.old_val) { this.removeServer(this.getServerFromStatus(row.old_val)); } }) // handle disconnections .catch(() => delay(20_000)) .then(() => (this.discovery ? this.discover() : undefined)) ); } private getServerFromStatus(status: ServerStatus) { const oldServer = this.servers.find( (server) => (server.host === status.network.hostname || !!status.network.canonical_addresses.find( (addr) => addr.host === server.host, )) && server.port === status.network.reql_port, ); return ( oldServer || { host: getCanonicalAddress(status.network.canonical_addresses), port: status.network.reql_port, } ); } private async removeServer(server: RServer) { if (this.servers.includes(server)) { this.servers = this.servers.filter((s) => s !== server); } const pool = this.serverPools.find( (p) => server.host === p.server.host && server.port === p.server.port, ); if (pool) { await this.closeServerPool(pool); this.setServerPoolsOptions(this.connParam); } } private subscribeToPool(pool: ServerConnectionPool) { const size = this.getOpenConnections().length; this.emit('size', size); if (size > 0) { this.setHealthy(true); } pool .on('size', () => this.emit('size', this.getOpenConnections().length)) .on('available-size', () => this.emit('available-size', this.getAvailableLength()), ) .on('error', (error) => { if (this.listenerCount('error') > 0) { this.emit('error', error); } }) .on('healthy', (healthy?: boolean, error?: Error) => { if (!healthy) { const { server } = pool; this.closeServerPool(pool) .then( () => new Promise((resolve) => // fixme get rid of condition in number setTimeout(resolve, this.connParam.timeoutError || 1000), ), ) .then(() => { if (!this.draining) { this.createServerPool(server).catch(() => undefined); } }); } this.setHealthy(!!this.getHealthyServerPools().length, error); }); } private setHealthy(healthy: boolean | undefined, error?: Error) { if (healthy === undefined) { this.healthy = undefined; } else if (healthy !== this.healthy && healthy !== undefined) { this.healthy = healthy; this.emit('healthy', healthy, error); } } private async closeServerPool(pool: ServerConnectionPool) { if (pool) { pool.removeAllListeners(); const index = this.serverPools.indexOf(pool); if (index >= 0) { this.serverPools.splice(index, 1); } await pool.drain(); } } private getHealthyServerPools() { return this.serverPools.filter((pool) => pool.isHealthy); } private getPoolWithMinQueries() { return this.getHealthyServerPools().reduce((min, next) => min.getNumOfRunningQueries() < next.getNumOfRunningQueries() ? min : next, ); } private getOpenConnections() { return this.getConnections().filter((conn) => conn.open); } private getIdleConnections() { return this.getOpenConnections().filter( (conn) => !(conn as RethinkDBConnection).numOfQueries, ); } }
the_stack
import { TokenFactory, SocialTokenV0, ERC20Mock } from "./typechain"; import { sign, convertToHash, domainSeparator, getDigest, getHash } from "./utils/sign-utils"; import { ethers } from "hardhat"; import { BigNumberish, Wallet } from "ethers"; import { expect } from "chai"; import { mine, autoMining } from "./utils/blocks"; const { constants } = ethers; const { AddressZero } = constants; ethers.utils.Logger.setLogLevel(ethers.utils.Logger.levels.ERROR); // turn off warnings const setupTest = async () => { const signers = await ethers.getSigners(); const [deployer, protocolVault, operationalVault, alice, bob, carol, royaltyVault] = signers; const TokenFactoryContract = await ethers.getContractFactory("TokenFactory"); const factory = (await TokenFactoryContract.deploy( protocolVault.address, 25, operationalVault.address, 5, "https://nft721.sushi.com/", "https://nft1155.sushi.com/" )) as TokenFactory; const SocialTokenContract = await ethers.getContractFactory("SocialTokenV0"); const socialToken = (await SocialTokenContract.deploy()) as SocialTokenV0; const ERC20MockContract = await ethers.getContractFactory("ERC20Mock"); const erc20Mock = (await ERC20MockContract.deploy()) as ERC20Mock; await factory.setDeployerWhitelisted(AddressZero, true); await factory.upgradeSocialToken(socialToken.address); return { deployer, protocolVault, operationalVault, factory, alice, bob, carol, royaltyVault, socialToken, erc20Mock, }; }; async function getSocialToken(factory: TokenFactory): Promise<SocialTokenV0> { const events = await factory.queryFilter(factory.filters.DeploySocialToken(), "latest"); const SocialTokenContract = await ethers.getContractFactory("SocialTokenV0"); return (await SocialTokenContract.attach(events[0].args[0])) as SocialTokenV0; } describe("SocialToken", () => { beforeEach(async () => { await ethers.provider.send("hardhat_reset", []); }); it("should be that default values are set correctly with batch minting deploy", async () => { const { factory, alice, erc20Mock } = await setupTest(); await factory.deploySocialToken(alice.address, "Name", "Symbol", erc20Mock.address, 10000); const socialToken = await getSocialToken(factory); expect(await socialToken.PERMIT_TYPEHASH()).to.be.equal( convertToHash("Permit(address owner,address spender,uint256 value,uint256 nonce,uint256 deadline)") ); expect(await socialToken.DOMAIN_SEPARATOR()).to.be.equal( await domainSeparator(ethers.provider, "Name", socialToken.address) ); expect(await socialToken.factory()).to.be.equal(factory.address); }); it("should be that permit fuctions work well", async () => { const { factory, alice, bob, carol, erc20Mock } = await setupTest(); const owner = ethers.Wallet.createRandom(); await factory.deploySocialToken(owner.address, "Name", "Symbol", erc20Mock.address, 10000); const socialToken = await getSocialToken(factory); const currentTime = (await ethers.provider.getBlock("latest")).timestamp; let deadline = currentTime + 100; const permitDigest0 = await getDigest( ethers.provider, "Name", socialToken.address, getHash( ["bytes32", "address", "address", "uint256", "uint256", "uint256"], [await socialToken.PERMIT_TYPEHASH(), owner.address, bob.address, 123, 0, deadline] ) ); const { v: v0, r: r0, s: s0 } = sign(permitDigest0, owner); expect(await socialToken.allowance(owner.address, bob.address)).to.be.equal(0); await socialToken.permit(owner.address, bob.address, 123, deadline, v0, r0, s0); expect(await socialToken.allowance(owner.address, bob.address)).to.be.equal(123); const { v: v1, r: r1, s: s1 } = sign( await getDigest( ethers.provider, "Name", socialToken.address, getHash( ["bytes32", "address", "address", "uint256", "uint256", "uint256"], [await socialToken.PERMIT_TYPEHASH(), owner.address, alice.address, 55, 1, deadline] ) ), owner ); const { v: fv0, r: fr0, s: fs0 } = sign( await getDigest( ethers.provider, "Name", socialToken.address, getHash( ["bytes32", "address", "address", "uint256", "uint256", "uint256"], [await socialToken.PERMIT_TYPEHASH(), owner.address, alice.address, 55, 111, deadline] //invalid nonce ) ), owner ); const { v: fv1, r: fr1, s: fs1 } = sign( await getDigest( ethers.provider, "Name", socialToken.address, getHash( ["bytes32", "address", "address", "uint256", "uint256", "uint256"], [await socialToken.PERMIT_TYPEHASH(), owner.address, alice.address, 55, 3, deadline - 120] //deadline over ) ), owner ); const fakeSigner = ethers.Wallet.createRandom(); const { v: fv2, r: fr2, s: fs2 } = sign( await getDigest( ethers.provider, "Name", socialToken.address, getHash( ["bytes32", "address", "address", "uint256", "uint256", "uint256"], [await socialToken.PERMIT_TYPEHASH(), owner.address, alice.address, 55, 3, deadline] //fake signer ) ), fakeSigner ); await expect(socialToken.permit(owner.address, alice.address, 55, deadline, fv0, fr0, fs0)).to.be.revertedWith( "SHOYU: UNAUTHORIZED" ); //invalid nonce await expect( socialToken.permit(owner.address, alice.address, 55, deadline - 120, fv1, fr1, fs1) ).to.be.revertedWith("SHOYU: EXPIRED"); //deadline over await expect(socialToken.permit(owner.address, carol.address, 55, deadline, v1, r1, s1)).to.be.revertedWith( "SHOYU: UNAUTHORIZED" ); //wrong spender await expect(socialToken.permit(owner.address, alice.address, 55, deadline, fv2, fr2, fs2)).to.be.revertedWith( "SHOYU: UNAUTHORIZED" ); //fake signer await socialToken.permit(owner.address, alice.address, 55, deadline, v1, r1, s1); expect(await socialToken.allowance(owner.address, alice.address)).to.be.equal(55); }); it("should be that SocialToken holders receive their shares properly when the contract receives ERC20 Tokens", async () => { const { factory, alice, bob, carol, erc20Mock } = await setupTest(); async function checkSocialTokenBalances(balances: BigNumberish[]) { expect(await socialToken.balanceOf(alice.address)).to.be.equal(balances[0]); expect(await socialToken.balanceOf(bob.address)).to.be.equal(balances[1]); expect(await socialToken.balanceOf(carol.address)).to.be.equal(balances[2]); } async function checkDividendOfETH(balances: BigNumberish[]) { expect(await erc20Mock.balanceOf(alice.address)).to.be.equal(balances[0]); expect(await erc20Mock.balanceOf(bob.address)).to.be.equal(balances[1]); expect(await erc20Mock.balanceOf(carol.address)).to.be.equal(balances[2]); } await factory.deploySocialToken(alice.address, "Name", "Symbol", erc20Mock.address, 10000); const socialToken = await getSocialToken(factory); //0 await autoMining(false); await socialToken.connect(alice).transfer(bob.address, 1000); await mine(); await autoMining(true); await checkSocialTokenBalances([9000, 1000, 0]); await checkDividendOfETH([0, 0, 0]); //1 await autoMining(false); await erc20Mock.mint(socialToken.address, 10000); await socialToken.sync(); await mine(); await autoMining(true); await checkSocialTokenBalances([9000, 1000, 0]); await checkDividendOfETH([0, 0, 0]); //2 await autoMining(false); await socialToken.connect(alice).transfer(carol.address, 4000); await mine(); await autoMining(true); await checkSocialTokenBalances([5000, 1000, 4000]); await checkDividendOfETH([0, 0, 0]); //3 await autoMining(false); await socialToken.connect(alice).withdrawDividend(); await mine(); await autoMining(true); await checkSocialTokenBalances([5000, 1000, 4000]); await checkDividendOfETH([9000, 0, 0]); //4 await autoMining(false); await erc20Mock.mint(socialToken.address, 30000); await socialToken.sync(); await socialToken.connect(bob).withdrawDividend(); await mine(); await autoMining(true); await checkSocialTokenBalances([5000, 1000, 4000]); await checkDividendOfETH([9000, 4000, 0]); //5 await autoMining(false); await erc20Mock.mint(socialToken.address, 20000); await socialToken.sync(); await mine(); await autoMining(true); await checkSocialTokenBalances([5000, 1000, 4000]); await checkDividendOfETH([9000, 4000, 0]); //6 await autoMining(false); await erc20Mock.mint(socialToken.address, 100000); await socialToken.sync(); await socialToken.connect(bob).transfer(carol.address, 1000); await mine(); await autoMining(true); await checkSocialTokenBalances([5000, 0, 5000]); await checkDividendOfETH([9000, 4000, 0]); //7 await autoMining(false); await socialToken.connect(carol).transfer(bob.address, 4000); await socialToken.connect(carol).withdrawDividend(); await mine(); await autoMining(true); await checkSocialTokenBalances([5000, 4000, 1000]); await checkDividendOfETH([9000, 4000, 60000]); //8 await autoMining(false); await socialToken.connect(alice).withdrawDividend(); await erc20Mock.mint(socialToken.address, 70000); await socialToken.sync(); await mine(); await autoMining(true); await checkSocialTokenBalances([5000, 4000, 1000]); await checkDividendOfETH([84000, 4000, 60000]); //9 await autoMining(false); await socialToken.connect(carol).transfer(bob.address, 1000); await erc20Mock.mint(socialToken.address, 40000); await socialToken.sync(); await mine(); await autoMining(true); await checkSocialTokenBalances([5000, 5000, 0]); await checkDividendOfETH([84000, 4000, 60000]); //10 await autoMining(false); await socialToken.connect(alice).withdrawDividend(); await socialToken.connect(alice).transfer(bob.address, 2000); await mine(); await autoMining(true); await checkSocialTokenBalances([3000, 7000, 0]); await checkDividendOfETH([139000, 4000, 60000]); //11 await autoMining(false); await socialToken.connect(bob).withdrawDividend(); await socialToken.connect(alice).transfer(carol.address, 2000); await mine(); await autoMining(true); await checkSocialTokenBalances([1000, 7000, 2000]); await checkDividendOfETH([139000, 64000, 60000]); const dan = Wallet.createRandom(); const erin = Wallet.createRandom(); //12 await autoMining(false); await erc20Mock.mint(socialToken.address, 10000); await socialToken.sync(); await socialToken.connect(alice).mint(dan.address, 5000); await mine(); await autoMining(true); await checkSocialTokenBalances([1000, 7000, 2000]); await checkDividendOfETH([139000, 64000, 60000]); expect(await socialToken.balanceOf(dan.address)).to.be.equal(5000); expect(await erc20Mock.balanceOf(dan.address)).to.be.equal(0); expect(await socialToken.withdrawableDividendOf(dan.address)).to.be.equal(0); expect(await socialToken.balanceOf(erin.address)).to.be.equal(0); expect(await erc20Mock.balanceOf(erin.address)).to.be.equal(0); expect(await socialToken.withdrawableDividendOf(erin.address)).to.be.equal(0); //13 await autoMining(false); await erc20Mock.mint(socialToken.address, 10000); await socialToken.sync(); await socialToken.connect(alice).mint(erin.address, 5000); await mine(); await autoMining(true); await checkSocialTokenBalances([1000, 7000, 2000]); await checkDividendOfETH([139000, 64000, 60000]); expect(await socialToken.balanceOf(dan.address)).to.be.equal(5000); expect(await erc20Mock.balanceOf(dan.address)).to.be.equal(0); expect(await socialToken.withdrawableDividendOf(dan.address)).to.be.equal(3333); expect(await socialToken.balanceOf(erin.address)).to.be.equal(5000); expect(await erc20Mock.balanceOf(erin.address)).to.be.equal(0); expect(await socialToken.withdrawableDividendOf(erin.address)).to.be.equal(0); //extra test await expect(socialToken.sync()).to.be.revertedWith("SHOYU: INSUFFICIENT_AMOUNT"); await expect( alice.sendTransaction({ to: socialToken.address, value: 1, }) ).to.be.revertedWith("SHOYU: UNABLE_TO_RECEIVE_ETH"); }); it("should be that SocialToken holders receive their shares properly when the contract receives ETH", async () => { const { factory, deployer, alice, bob, carol } = await setupTest(); async function checkSocialTokenBalances(balances: BigNumberish[]) { expect(await socialToken.balanceOf(alice.address)).to.be.equal(balances[0]); expect(await socialToken.balanceOf(bob.address)).to.be.equal(balances[1]); expect(await socialToken.balanceOf(carol.address)).to.be.equal(balances[2]); } await factory.deploySocialToken(alice.address, "Name", "Symbol", AddressZero, 10000); const socialToken = await getSocialToken(factory); //0 await expect(() => socialToken.connect(alice).transfer(bob.address, 1000)).to.changeEtherBalances( [alice, bob, carol], [0, 0, 0] ); await checkSocialTokenBalances([9000, 1000, 0]); //1 await deployer.sendTransaction({ to: socialToken.address, value: 10000 }); await checkSocialTokenBalances([9000, 1000, 0]); //2 await expect(() => socialToken.connect(alice).transfer(carol.address, 4000)).to.changeEtherBalances( [alice, bob, carol], [0, 0, 0] ); await checkSocialTokenBalances([5000, 1000, 4000]); //3 await expect(() => socialToken.connect(alice).withdrawDividend()).to.changeEtherBalances( [alice, bob, carol], [9000, 0, 0] ); await checkSocialTokenBalances([5000, 1000, 4000]); //4 await deployer.sendTransaction({ to: socialToken.address, value: 30000 }); await expect(() => socialToken.connect(bob).withdrawDividend()).to.changeEtherBalances( [alice, bob, carol], [0, 4000, 0] ); await checkSocialTokenBalances([5000, 1000, 4000]); //5 await deployer.sendTransaction({ to: socialToken.address, value: 20000 }); await checkSocialTokenBalances([5000, 1000, 4000]); //6 await deployer.sendTransaction({ to: socialToken.address, value: 100000 }); await expect(() => socialToken.connect(bob).transfer(carol.address, 1000)).to.changeEtherBalances( [alice, bob, carol], [0, 0, 0] ); await checkSocialTokenBalances([5000, 0, 5000]); //7 await socialToken.connect(carol).transfer(bob.address, 4000); await expect(() => socialToken.connect(carol).withdrawDividend()).to.changeEtherBalances( [alice, bob, carol], [0, 0, 60000] ); await checkSocialTokenBalances([5000, 4000, 1000]); //8 await expect(() => socialToken.connect(alice).withdrawDividend()).to.changeEtherBalances( [alice, bob, carol], [75000, 0, 0] ); await deployer.sendTransaction({ to: socialToken.address, value: 70000 }); await checkSocialTokenBalances([5000, 4000, 1000]); //9 await socialToken.connect(carol).transfer(bob.address, 1000); await deployer.sendTransaction({ to: socialToken.address, value: 40000 }); await checkSocialTokenBalances([5000, 5000, 0]); //10 await expect(() => socialToken.connect(alice).withdrawDividend()).to.changeEtherBalances( [alice, bob, carol], [55000, 0, 0] ); await socialToken.connect(alice).transfer(bob.address, 2000); await checkSocialTokenBalances([3000, 7000, 0]); //11 await expect(() => socialToken.connect(bob).withdrawDividend()).to.changeEtherBalances( [alice, bob, carol], [0, 60000, 0] ); await socialToken.connect(alice).transfer(carol.address, 2000); await checkSocialTokenBalances([1000, 7000, 2000]); const dan = Wallet.createRandom(); const erin = Wallet.createRandom(); //12 await deployer.sendTransaction({ to: socialToken.address, value: 10000 }); await expect(() => socialToken.connect(alice).mint(dan.address, 5000)).to.changeEtherBalances( [alice, bob, carol], [0, 0, 0] ); await checkSocialTokenBalances([1000, 7000, 2000]); expect(await socialToken.balanceOf(dan.address)).to.be.equal(5000); expect(await socialToken.withdrawableDividendOf(dan.address)).to.be.equal(0); expect(await socialToken.balanceOf(erin.address)).to.be.equal(0); expect(await socialToken.withdrawableDividendOf(erin.address)).to.be.equal(0); //13 await deployer.sendTransaction({ to: socialToken.address, value: 10000 }); await socialToken.connect(alice).mint(erin.address, 5000); await checkSocialTokenBalances([1000, 7000, 2000]); expect(await socialToken.balanceOf(dan.address)).to.be.equal(5000); expect(await socialToken.withdrawableDividendOf(dan.address)).to.be.equal(3333); expect(await socialToken.balanceOf(erin.address)).to.be.equal(5000); expect(await socialToken.withdrawableDividendOf(erin.address)).to.be.equal(0); }); });
the_stack
import * as jsonx from './index'; // import mochaJSDOM from 'jsdom-global'; import path from 'path'; import chai from 'chai'; import sinon from 'sinon'; import React, { ReactElement } from 'react'; import ReactTestUtils from 'react-dom/test-utils'; // ES6 import ReactDOM from 'react-dom'; import ReactDOMElements from 'react-dom-factories'; import { expect as expectCHAI, } from 'chai'; import { JSDOM, } from 'jsdom'; // chai.use(require('sinon-chai')); // import 'mocha-sinon'; // import useGlobalHook from 'use-global-hook'; import { render, fireEvent, waitFor, screen } from '@testing-library/react' import '@testing-library/jest-dom/extend-expect' import * as defs from "./types/jsonx/index"; const sampleJSONX = { component: 'div', props: { id: 'generatedJSONX', className: 'jsonx', key:5 }, children: [ { component: 'p', props: { style: { color: 'red', fontWeight:'bold', }, key:3 }, children:'hello world', }, ], }; const simpleJSONX = { div: { props: { id: 'generatedJSONX', className: 'jsonx', key:0, }, children: [ { p: { props: { style: { color: 'red', fontWeight:'bold', }, key:1, }, children:'hello world', }, }, ], }, }; const sampleJSONXJSON = jsonx.getReactElementFromJSONX.call({ returnJSON: true }, sampleJSONX); const simpleJSONXJSON = jsonx.getReactElementFromJSONX.call({ returnJSON: true }, simpleJSONX); const simpleDiv = { component: 'div', props: { title: 'test', }, children: 'hello', }; const complexDiv = { component: 'div', props: { title: 'test', }, children: [ { button: { props: { //@ts-ignore onClick: function (e) { console.log({ e, }); }, }, children:'log event', }, }, { component: 'button', __dangerouslyBindEvalProps: { onClick:`(function(e){ console.log({ e }); })`, }, children:'log even two', }, ], }; const simpleDivJSON = jsonx.getReactElementFromJSONX.call({ returnJSON: true, }, simpleDiv); const complexDivJSON = jsonx.getReactElementFromJSONX.call({ returnJSON: true, exposeEval:true, }, complexDiv); const customComponents:defs.jsonxCustomComponent[] = [ { type: 'function', name: 'genFuncDef', functionComponent:function(){ console.log("called generated function") return { component:'span', children:'gen custom def', } }, options:{ name:'genFuncDef' } }, { type: 'function', name: 'genFunc', functionBody:'console.log("called generated function")', jsonx:{ component:'span', children:'gen custom', }, options:{ name:'genFun' } }, { type: 'component', name: 'genClass', jsonx:{ componentDidMount: { body: 'console.log(\'mounted\',this.props)', arguments: [], }, render: { body: { component: 'p', children: [ { component: 'span', children: 'My Custom React Component Status: ', }, { component: 'span', thisprops: { children: ['status',], }, }, ], }, }, }, options:{ name:'genClass' } }, { type:'library', name:'MyLib', jsonx:{ CompA:{ name:'CompA', type:'function', functionBody:'console.log("called generated function")', jsonxComponent:{ component:'div', children:'gen lib function comp a', }, options:{ name:'CompA' } }, CompB:{ name:'CompB', type:'function', jsonxComponent:{ component:'div', children:'gen lib function comp b', }, options:{ name:'CompB' } }, CompC:{ type: 'component', name: 'CompC', jsonxComponent:{ componentDidMount: { body: 'console.log(\'mounted CompC\',this.props)', arguments: [], }, render: { body: { component: 'div', children: 'CompC Class Component', }, }, }, options:{ name:'genClass' } } } } ]; describe('jsonx', function () { describe('helper functions', () => { // it('should return useGlobalHook', () => { // const ugh = jsonx.__getUseGlobalHook(); // console.log({ ugh }); // expectCHAI(jsonx.__getUseGlobalHook()).to.be.a('function'); // }); it('should return React', () => { expectCHAI(jsonx.__getReact()).to.eql(React); }); it('should return ReactDOM', () => { expectCHAI(jsonx.__getReactDOM()).to.eql(ReactDOM); }); }); it('should generate complex components',()=>{ const jsonxWithFunc = { component:'div', __dangerouslyInsertFunctionComponents:{ _children:{ functionBody:'console.log("clicked!")', reactComponent:{ component:'span', children:'from func', }, options:{ name:'spanFunc' } } }, children:'click me' }; //@ts-ignore const ReactiveJSON = jsonx.getReactElementFromJSONX(jsonxWithFunc); // console.log({jsonxWithFunc,ReactiveJSON}) }); describe('getReactElementFromJSONX', () => { it('should render custom elements',()=>{ const originalConsoleError = console.error; const originalConsoleLog = console.log; console.error=jest.fn(); console.log=jest.fn(); // const JsonxEl = jsonx.getReactElementFromJSONX const JsonxEl = jsonx.outputHTML .call( { customComponents, // debug: true, disableRenderIndexKey: false, useJSON: false, }, { jsonx:{ // test:true, component:'div', children:[ { component:'p', children:'hello world'}, { component: 'genFuncDef', }, { component: 'genFunc', }, { component: 'genClass', }, { component: 'MyLib.CompA', }, ], }, }); // console.log('JsonxEl',JsonxEl) expect(JsonxEl).toMatch('<div data-reactroot=""><p>hello world</p><span>gen custom def</span><span>gen custom</span><p><span>My Custom React Component Status: </span><span></span></p><div>gen lib function comp a</div></div>') //@ts-ignore // const {container}=render(<JsonxEl/>); // console.log('container.innerHTML',container.innerHTML) console.error = originalConsoleError console.log = originalConsoleLog }); it('should debug invalid componets',()=>{ const originalConsoleError = console.error; console.error=jest.fn(); expect(jsonx.getReactElementFromJSONX.call({debug:true},{component:'wrong'})).toBe("ReferenceError: Invalid React Component (wrong)"); console.error = originalConsoleError }) it('should return an instance of a react element', () => { //@ts-ignore const ReactiveJSON = jsonx.getReactElementFromJSONX(sampleJSONX); //@ts-ignore const ReactiveSimpleJSON = jsonx.getReactElementFromJSONX(simpleJSONX); expectCHAI(ReactTestUtils.isElement(ReactiveJSON)); expectCHAI(ReactTestUtils.isElement(ReactiveSimpleJSON)); expectCHAI(ReactiveJSON).to.be.an('object'); expectCHAI(ReactiveJSON).to.haveOwnProperty('$$typeof'); expectCHAI(ReactiveJSON).to.haveOwnProperty('type'); expectCHAI(ReactiveJSON).to.haveOwnProperty('key'); expectCHAI(ReactiveJSON).to.haveOwnProperty('ref'); expectCHAI(ReactiveJSON).to.haveOwnProperty('props'); }); it('should handle errors with empty components', () => { //@ts-ignore const emptySpanComponent = jsonx.getReactElementFromJSONX({}); const emptySpanComponentDebugged = jsonx.getReactElementFromJSONX.call({ debug: true, }, {}, {}); expectCHAI(emptySpanComponent).to.be.an('object'); expectCHAI(emptySpanComponentDebugged).to.be.an('object'); //@ts-ignore expectCHAI(emptySpanComponentDebugged.props.children).to.eql('Error: Missing Component Object'); }); it('should throw an error with invalid components', () => { const loggerSpy = sinon.spy(); expectCHAI(jsonx.getReactElementFromJSONX.bind({}, { component: 'somethingInvalid', })).to.throw('Invalid React Component (somethingInvalid)'); try { jsonx.getReactElementFromJSONX.call({ debug: true, logError: loggerSpy, }, { component: 'somethingInvalid', }, {}); } catch (e) { expectCHAI(loggerSpy.called).to.be.true; expectCHAI(e).to.be.an('error'); } }); it('should return testing output', () => { const ReactiveJSON = jsonx.getReactElementFromJSONX.call({}, { ...sampleJSONX, test: true }); expectCHAI(ReactiveJSON).to.be.a('string'); expectCHAI(ReactiveJSON).to.include('element'); expectCHAI(ReactiveJSON).to.include('props'); expectCHAI(ReactiveJSON).to.include('children'); }); }); describe('getReactElementFromJSON', () => { it('should return an instance of a react element', () => { //@ts-ignore const ReactiveJSON = jsonx.getReactElementFromJSON(sampleJSONXJSON); //@ts-ignore const ReactiveSimpleJSON = jsonx.getReactElementFromJSON(simpleJSONXJSON); expectCHAI(ReactTestUtils.isElement(ReactiveJSON)).to.be.true; expectCHAI(ReactTestUtils.isElement(ReactiveSimpleJSON)).to.be.true; expectCHAI(ReactiveJSON).to.be.an('object'); expectCHAI(ReactiveJSON).to.haveOwnProperty('$$typeof'); expectCHAI(ReactiveJSON).to.haveOwnProperty('type'); expectCHAI(ReactiveJSON).to.haveOwnProperty('key'); expectCHAI(ReactiveJSON).to.haveOwnProperty('ref'); expectCHAI(ReactiveJSON).to.haveOwnProperty('props'); }); }); describe('compile', () => { it('should convert JSONX to React Element', () => { const dom = new JSDOM(`<!DOCTYPE html> <body> <div id="root"/> </body>`); // global.document = dom.window.document; // global.document.createElement = React.createElement; // console.log('dom.window',dom.window) //@ts-ignore global.window = dom.window; global.window.React = React; global.document = global.window.document; // console.log("dom.window.document.querySelector('#root')",dom.window.document.querySelector('#root')); //@ts-ignore const ReactiveJSON = jsonx.compile(sampleJSONXJSON); //@ts-ignore const testDOM = ReactTestUtils.renderIntoDocument(ReactiveJSON()); // console.log({testDOM}); //@ts-ignore expectCHAI(ReactTestUtils.isDOMComponent(testDOM)).to.be.true; expectCHAI(ReactiveJSON).to.be.a('function'); // expectCHAI(ReactTestUtils.isCompositeComponent(ReactiveJSON)).to.be.true; }); }); describe('outputJSON', () => { it('should convert JSONX to JSON', () => { const compiledJSON = jsonx.outputJSON(simpleDiv); const compiledJSONXJSON = jsonx.getReactElementFromJSONX.call({ returnJSON: true, }, simpleDiv); //@ts-ignore expectCHAI(compiledJSON.children).to.eql(compiledJSONXJSON.children); //@ts-ignore expectCHAI(compiledJSON.type).to.eql(compiledJSONXJSON.type); }); }); describe('outputJSX', () => { it('should compile to JSX String', () => { //@ts-ignore const JSXString = jsonx.outputJSX(simpleDiv); expectCHAI(JSXString).to.include('title="test">hello</div>'); // console.log({ JSXString, }); }); }); describe('jsonToJSX', () => { // const util = require('util'); // console.log(util.inspect({ simpleDivJSON, complexDivJSON, },{depth:20})); it('should compile to JSX String', () => { //@ts-ignore const JSXString = jsonx.jsonToJSX(simpleDivJSON); //@ts-ignore const complexJSXString = jsonx.jsonToJSX(complexDivJSON); expectCHAI(JSXString).to.include('title="test">hello</div>'); expectCHAI(complexJSXString).to.be.a('string'); // console.log({ JSXString, complexJSXString, }); // console.log(complexJSXString); }); }); describe('outputHTML', () => { it('should be an alias for jsonxHTMLString', () => { expectCHAI(jsonx.outputHTML).to.eql(jsonx.jsonxHTMLString); }); }); describe('jsonxHTMLString', () => { it('should return an HTML string', () => { //@ts-ignore const jsonxString = jsonx.jsonxHTMLString({ jsonx: sampleJSONX, }); const dom = new JSDOM(`<!DOCTYPE html><body>${jsonxString}</body>`); expectCHAI(jsonxString).to.be.a('string'); expectCHAI(dom.window.document.body.querySelector('p').innerHTML).to.eql('hello world'); expectCHAI(dom.window.document.body.querySelector('p').style.color).to.eql('red'); }); }); describe('__express', () => { const sampleJSONXFilepath = path.resolve('./src/mock/sample.jsonx'); const spantext = 'should render in express'; it('should return an HTML string', (done) => { jsonx.__express( sampleJSONXFilepath, { spantext, __boundConfig: { debug:false, }, __DOCTYPE:'', }, //@ts-ignore ((err, renderedString) => { const dom = new JSDOM(renderedString); if (renderedString) { expectCHAI(dom.window.document.querySelector('#generatedJSONX').getAttribute('title')).to.eql(spantext); expectCHAI(err).to.be.null; expectCHAI(renderedString).to.be.a('String'); } done(err); }) ); }); it('it should handle errors', (done) => { //@ts-ignore expectCHAI(jsonx.__express.bind()).to.throw; //@ts-ignore jsonx.__express(null,null, (err,template) => { expectCHAI(err).to.eql(null); expectCHAI(template).to.eql('<!DOCTYPE html>\n'); done(); }) }); }); describe('jsonxRender', () => { // beforeAll(function () { // // this.jsdom = mochaJSDOM(); // }); it('should render component inside of querySelector', function () { const containerDiv = document.createElement('div'); containerDiv.setAttribute('id', 'reactContainer'); document.body.appendChild(containerDiv); //@ts-ignore jsonx.jsonxRender({ jsonx: sampleJSONX, querySelector:'#reactContainer', }); //@ts-ignore expectCHAI(document.body.querySelector('p').innerHTML).to.eql('hello world'); //@ts-ignore expectCHAI(document.body.querySelector('p').style.color).to.eql('red'); }); // afterAll(function () { // // this.jsdom(); // }); }); });
the_stack
import { FakeScheduler, Schedule, ScheduleTrigger } from "./fake-scheduler"; import { SchedulerStatus } from "../app/api"; import __map from 'lodash/map'; export interface IFakeSchedulerOptions { version: string; quartzVersion: string; dotNetVersion: string; timelineSpan: number; schedulerName: string; schedule: Schedule; } export class FakeSchedulerServer { private _scheduler: FakeScheduler; private _commandHandlers: { [command: string]: (args) => any }; constructor(options: IFakeSchedulerOptions) { this._scheduler = new FakeScheduler(options.schedulerName, options.schedule); this._commandHandlers = { 'get_env': () => ({ _ok: 1, sv: options.version, qv: options.quartzVersion, dnv: options.dotNetVersion, ts: options.timelineSpan }), 'get_input_types': (args) => ({ _ok: 1, i: [ { "_": 'string', l: 'string' }, { "_": 'int', l: 'int' }, { "_": 'long', l: 'long' }, { "_": 'float', l: 'float' }, { "_": 'double', l: 'double' }, { "_": 'boolean', l: 'boolean', v: 1 }, { "_": 'ErrorTest', l: 'Error test' } ] }), 'get_input_type_variants': (args) => ({ _ok: 1, i: [ { "_": 'true', l: 'True' }, { "_": 'false', l: 'False' } ] }), 'get_job_types': (args) => ({ _ok: 1, i: [ "HelloJob|CrystalQuartz.Samples|CrystalQuartz", "CleanupJob|CrystalQuartz.Samples|CrystalQuartz", "GenerateReports|CrystalQuartz.Samples|CrystalQuartz" ] }), 'get_data': (args) => { return this.mapCommonData(args); }, 'resume_trigger': (args) => { this._scheduler.resumeTrigger(args.trigger); return this.mapCommonData(args); }, 'pause_trigger': (args) => { this._scheduler.pauseTrigger(args.trigger); return this.mapCommonData(args); }, 'delete_trigger': (args) => { this._scheduler.deleteTrigger(args.trigger); return this.mapCommonData(args); }, 'pause_job': (args) => { this._scheduler.pauseJob(args.group, args.job); return this.mapCommonData(args); }, 'resume_job': (args) => { this._scheduler.resumeJob(args.group, args.job); return this.mapCommonData(args); }, 'delete_job': (args) => { this._scheduler.deleteJob(args.group, args.job); return this.mapCommonData(args); }, 'pause_group': (args) => { this._scheduler.pauseGroup(args.group); return this.mapCommonData(args); }, 'resume_group': (args) => { this._scheduler.resumeGroup(args.group); return this.mapCommonData(args); }, 'delete_group': (args) => { this._scheduler.deleteGroup(args.group); return this.mapCommonData(args); }, 'get_scheduler_details': (args) => ({ _ok: 1, ism: this._scheduler.status === SchedulerStatus.Ready, jsc: false, jsp: false, je: this._scheduler.jobsExecuted, rs: this._scheduler.startedAt, siid: 'IN_BROWSER', sn: this._scheduler.name, isr: false, t: null, isd: this._scheduler.status === SchedulerStatus.Shutdown, ist: this._scheduler.status === SchedulerStatus.Started, tps: 1, tpt: null, v: 'In-Browser Emulation' }), 'get_job_details': (args) => ({ _ok: true, jd: { ced: true, // ConcurrentExecutionDisallowed ds: '', // Description pjd: false, // PersistJobDataAfterExecution d: false, // Durable t: 'SampleJob|Sample|InBrowser', // JobType rr: false // RequestsRecovery }, jdm: { '_': 'object', v: { 'Test1': { '_': 'single', k: 1, v: 'String value'}, 'Test2': { '_': 'object', k: 1, v: { "FirstName": { '_': 'single', v: 'John' }, "LastName": { '_': 'single', v: 'Smith' }, "TestError": { '_': 'error', _err: 'Exception text' } } }, 'Test3': { '_': 'enumerable', v: [ { '_': 'single', v: 'Value 1' }, { '_': 'single', v: 'Value 2' }, { '_': 'single', v: 'Value 3' } ] } } } // todo: take actual from job }), 'start_scheduler': (args) => { this._scheduler.start(); return this.mapCommonData(args); }, 'pause_scheduler': (args) => { this._scheduler.pauseAll(); return this.mapCommonData(args); }, 'resume_scheduler': (args) => { this._scheduler.resumeAll(); return this.mapCommonData(args); }, 'standby_scheduler': (args) => { this._scheduler.standby(); return this.mapCommonData(args); }, 'stop_scheduler': (args) => { this._scheduler.shutdown(); return this.mapCommonData(args); }, 'add_trigger': (args) => { const triggerType = args.triggerType; let i = 0, errors = null; while (args['jobDataMap[' + i + '].Key']) { if (args['jobDataMap[' + i + '].InputTypeCode'] === 'ErrorTest') { errors = errors || {}; errors[args['jobDataMap[' + i + '].Key']] = 'Testing error message'; } i++; } if (errors) { return { ...this.mapCommonData(args), ve: errors }; } if (triggerType !== 'Simple') { return { _err: 'Only "Simple" trigger type is supported by in-browser fake scheduler implementation' }; } const job = args.job, group = args.group, name = args.name, trigger: ScheduleTrigger = { repeatCount: args.repeatForever ? null : args.repeatCount, repeatInterval: args.repeatInterval }; this._scheduler.triggerJob(group, job, name, trigger); return this.mapCommonData(args); }, 'execute_job': (args) => { this._scheduler.executeNow(args.group, args.job); return this.mapCommonData(args); } }; this._scheduler.init(); this._scheduler.start(); } handleRequest(data) { const handler = this._commandHandlers[data.command]; if (handler) { return handler(data); } return { _err: 'Fake scheduler server does not support command ' + data.command }; } private mapCommonData(args) { const scheduler = this._scheduler, data = scheduler.getData(); return { _ok: 1, sim: scheduler.startedAt, rs: scheduler.startedAt, n: data.name, st: scheduler.status.code, je: scheduler.jobsExecuted, jt: data.jobsCount, ip: __map(scheduler.inProgress, ip => ip.fireInstanceId + '|' + ip.trigger.name), jg: __map(data.groups, g => ({ n: g.name, s: g.getStatus().value, jb: __map(g.jobs, j => ({ n: j.name, s: j.getStatus().value, gn: g.name, _: g.name + '_' + j.name, tr: __map(j.triggers, t => ({ '_': t.name, n: t.name, s: t.getStatus().value, sd: t.startDate, ed: t.endDate, nfd: t.nextFireDate, pfd: t.previousFireDate, tc: 'simple', tb: (t.repeatCount === null ? '-1' : t.repeatCount.toString()) + '|' + t.repeatInterval + '|' + t.executedCount })) })) })), ev: __map( scheduler.findEvents(+args.minEventId), ev => { const result: any = { '_': `${ev.id}|${ev.date}|${ev.eventType}|${ev.scope}`, k: ev.itemKey, fid: ev.fireInstanceId }; if (ev.faulted) { result['_err'] = ev.errors ? __map(ev.errors, er => ({ "_": er.text, l: er.level })) : 1 } return result; } //`${ev.id}|${ev.date}|${ev.eventType}|${ev.scope}|${ev.fireInstanceId}|${ev.itemKey}` ) }; } }
the_stack
import * as vscode from 'vscode-languageserver'; import * as ast from '../parsing/ast-types'; import * as parser from '../parsing/coq-parser'; import * as textUtil from '../util/text-util'; // // export enum SymbolKind { // // Definition, // // Class, // // Constructor, // // Module, // // } // // export class SymbolDefinition { // // private range: vscode.Range; // // public constructor( // // /** short name used to declare the symbol */ // // public readonly name: string, // // /** full name of the symbol, considering its context (within a module, etc.) */ // // public readonly fullName: string, // // /** where the symbol was defined */ // // definitionRange: vscode.Range, // // /** what kind of symbol is this? */ // // public readonly kind: SymbolKind, // // ) { // // this.range = definitionRange; // // } // // public get definitionRange() : vscode.Range { // // return this.range; // // } // // public setRange(range: vscode.Range) { // // this.range = range; // // } // // } // // export class SymbolDbState { // // private parent: SymbolDbState | null; // // private children: SymbolDbState[]; // // private symbols: SymbolDefinition[]; // // public lookupIdentifier(ident: string) : SymbolDefinition { // // return undefined; // // } // // } // type QualId = string[]; // /** Determines whether `shortId` is a sub-id of `fullId` */ // function qualIdMatch(fullId: QualId, shortId: QualId) : boolean { // if(shortId.length > fullId.length) // return false; // for(let idx = 1; idx <= shortId.length; ++idx) { // const fPart = fullId[fullId.length - idx]; // const sPart = fullId[fullId.length - idx]; // if(fPart !== sPart) // return false; // } // return true; // } // interface SymbolDefinition { // /** */ // // availabilityScope: Scope, // /** where the definition is named */ // nameRange: vscode.Range, // /** full qualified id */ // fullId: QualId; // /** the sentence that created the definition */ // source: Sentence; // } // abstract class Scope { // public abstract lookupDefinition(id: QualId, currentScope: QualId) : SymbolDefinition|null; // } // } // class TopScope extends Scope { // public lookupDefinition(id: QualId, currentScope: QualId) : SymbolDefinition|null { // return undefined; // } // } // class SectionScope extends Scope { // public lookupDefinition(id: QualId, currentScope: QualId) : SymbolDefinition|null { // return undefined; // } // } // class ModuleScope extends Scope { // public constructor(identifier: string, range: vscode.Range) { // super(); // } // public lookupDefinition(id: QualId, currentScope: QualId) : SymbolDefinition|null { // return undefined; // } // } // class DefinitionScope extends Scope { // private definitions : {identifier: string, range: vscode.Range}[]; // public constructor(definitions: {identifier: string, range: vscode.Range}[], private source: Sentence) { // super(); // // We store the defs in reverse order, so that symbols can be searched from the start // this.definitions = definitions.reverse(); // } // public lookupDefinition(id: QualId, currentScope: QualId) : SymbolDefinition|null { // const fullId = [...currentScope, undefined]; // for(let def of this.definitions) { // fullId[fullId.length-1] = def.identifier; // if(qualIdMatch(fullId, id)) { // return { // nameRange: def.range, // fullId: fullId, // source: this.source, // } // } // } // return null; // } // } // // class ScopeStack { // // private parent : Scope; // // private scopes: Scope[]; // // public getParent() { // // return this.parent; // // } // // public lookupDefinition(id: QualId) : {scope: Scope} { // // return undefined; // // } // // } // // export class DocumentScopes { // // private root: SymbolDbState; // // private currentState: SymbolDbState; // // public processSentenceAst(ast: ast.Sentence) : SymbolDbState { // // return this.currentState; // // } // // } export type QualId = string[]; // class QualId extends Array<string> { // public contains(id: QualId|string[]) : boolean { // return containsQualId(this,id); // } // public resolve(id: QualId|string[]) : QualId | null { // return resolveQualId(this,id); // } // public match(x: QualId|string[]) : QualId|null { // return matchQualId(this,x); // } // } // export function containsQualId(id1: QualId, id2: QualId) : boolean { // if(id2.length > id1.length) // return false; // for(let idx = 1; idx <= id2.length; ++idx) { // const fPart = id1[id1.length - idx]; // const sPart = id2[id2.length - idx]; // if(fPart !== sPart) // return false; // } // return true; // } export function resolveQualId(id1: QualId, id2: QualId) : QualId | null { if(id2.length > id1.length) return null; let idx = 1; for(/**/; idx <= id2.length; ++idx) { const fPart = id1[id1.length - idx]; const sPart = id2[id2.length - idx]; if(fPart !== sPart) return null; } return [...id1.slice(0,id1.length-idx+1), ...id2]; } export function matchQualId(x: QualId, y: QualId) : {which: 0|1, prefix: QualId, id: QualId}|null { let which: 0|1 = 0; if(x.length > y.length) [which,x,y] = [1,y,x]; // x is now the shortest let idx = 1; for(/**/; idx <= x.length; ++idx) { const partX = x[x.length - idx]; const partY = y[y.length - idx]; if(partX !== partY) return null; } return { which: which, prefix: y.slice(0, y.length - idx + 1), id: x }; } // function matchQualId(prefix: QualId|string[], ident: string, id: QualId|string[]) : {assumedPrefix: QualId, id: QualId}|null { // if(id.length < prefix.length+1) { // if(id.length <= prefix.length+1) { // const result = resolveQualId([...prefix, ident], id); // if(result) // return {assumedPrefix: emptyQualId, id: result} // else // return null; // } else { // const result = resolveQualId(id, [...prefix, ident]); // if(result) // return {assumedPrefix: id.slice(), id: result} // else // return null; // } // x is now the shortest export interface SymbolInformation<S> { /** */ // availabilityScope: Scope, /** where the definition is named */ symbol: Symbol, /** full qualified id */ id: QualId; /** the sentence that created the definition */ source: S; /** */ assumedPrefix: QualId; } export function qualIdEqual(x: QualId, y: QualId) { if(x.length !== y.length) return false; for(let idx = 0; idx < x.length; ++idx) { if(x[idx] !== y[idx]) return false; } return true; } export enum SymbolKind { Definition, Class, Inductive, Constructor, Module, Let, Section, Ltac, Assumption, } export interface Symbol { identifier: string, range: vscode.Range, kind: SymbolKind, } export enum ScopeFlags { Private = 1 << 0, Local = 1 << 1, Export = 1 << 2, All = Private | Local | Export, } export class ScopeDeclaration<S extends {prev: S, next: S, getScope() : ScopeDeclaration<S>|null}> { /** Symbols that are available within this scope. */ private privateSymbols : Symbol[] = []; /** Symbols that are made available to this scope's subsequent siblings. */ private localSymbols : Symbol[] = []; /** Symbols that are made available outside of this scope's parent. */ private exportSymbols : Symbol[] = []; public constructor( /** The sentence which defined this scope */ private source: S, private myId : QualId, protected node: {kind: "begin", name: string, exports: boolean}|{kind: "end", name: string}|null, ) { } public static createSection<S extends {prev: S, next: S, getScope() : ScopeDeclaration<S>|null}>(source: S, name: string, range: vscode.Range) { const result = new ScopeDeclaration(source, [], {kind: "begin", name: name, exports: true}); result.privateSymbols.push({identifier: name, range: range, kind: SymbolKind.Section}); return result; } public static createModule<S extends {prev: S, next: S, getScope() : ScopeDeclaration<S>|null}>(source: S, name: string, exports: boolean, range: vscode.Range) { const result = new ScopeDeclaration(source, [name], {kind: "begin", name: name, exports: exports}); result.exportSymbols.push({identifier: name, range: range, kind: SymbolKind.Module}); return result; } public static createEnd<S extends {prev: S, next: S, getScope() : ScopeDeclaration<S>|null}>(source: S, name: string) { const result = new ScopeDeclaration(source, [], {kind: "end", name: name}); return result; } public static createDefinition<S extends {prev: S, next: S, getScope() : ScopeDeclaration<S>|null}>(source: S, name: string, range: vscode.Range) { const result = new ScopeDeclaration(source, [], null); result.exportSymbols.push({identifier: name, range: range, kind: SymbolKind.Module}); return result; } public addPrivateSymbol(s: Symbol) : void { this.privateSymbols.push(s); } public addLocalSymbol(s: Symbol) : void { this.localSymbols.push(s); } public addExportSymbol(s: Symbol) : void { this.exportSymbols.push(s); } private lookupSymbolInList(id: QualId, symbols: Symbol[]) : SymbolInformation<S>|null { const matchedId = matchQualId(id.slice(0,id.length-1), this.myId); if(!matchedId) return null; let assumedPrefix : QualId = []; if(matchedId.which===1) assumedPrefix = matchedId.prefix; for(let s of symbols) { if(id[id.length-1] === s.identifier) return { symbol: s, assumedPrefix: assumedPrefix, id: matchedId.id.concat(s.identifier), source: this.source, } } return null; } private lookupHere(id: QualId, flags: ScopeFlags) : SymbolInformation<S>|null { if(flags & ScopeFlags.Private) { const result = this.lookupSymbolInList(id, this.privateSymbols); if(result) return result; } if(flags & ScopeFlags.Local) { const result = this.lookupSymbolInList(id, this.localSymbols); if(result) return result; } if(flags & ScopeFlags.Export) { const result = this.lookupSymbolInList(id, this.exportSymbols); if(result) return result; } return null; } private getPreviousSentence() : ScopeDeclaration<S>|null { if(this.source.prev) return this.source.prev.getScope() else return null; } public isBegin(name?: string) : this is ScopeDeclaration<S>&{node: {kind:"begin",name:string,exports:boolean}} { return (this.node && this.node.kind==="begin" && (!name || name===this.node.name)) ? true : false } public isEnd(name?: string) : this is ScopeDeclaration<S>&{node: {kind:"end",name:string}} { return (this.node && this.node.kind==="end" && (!name || name===this.node.name)) ? true : false } private getParentScope() : ScopeDeclaration<S>|null{ let nesting = 0; let scope : ScopeDeclaration<S> = this.getPreviousSentence(); while(scope) { if(scope.isEnd()) ++nesting; else if(scope.isBegin() && nesting > 0) --nesting; else if(scope.isBegin() && nesting === 0) return scope; scope = scope.getPreviousSentence(); } return null; } public getPrefixes() : QualId[] { let prefixes : QualId[] = []; let scope = this.getParentScope(); while(scope) { if(scope.isBegin() && scope.node.exports) prefixes = [...prefixes, ...prefixes.map((p) => [...scope.myId, ...p])] else prefixes = prefixes.map((p) => [...scope.myId, ...p]); scope = scope.getParentScope(); } if(prefixes.length === 0) return [[]]; else return prefixes; } private resolveSymbol(s: SymbolInformation<S>|null, idPrefixes: QualId[]) : SymbolInformation<S>|null { if(!s) return null; const myPrefixes = this.getPrefixes(); const prefix = myPrefixes.find(p1 => idPrefixes.some(p2 => qualIdEqual(p1,p2))); if(prefix) { s.assumedPrefix = []; s.id = [...prefix, ...s.id]; return s; } else return null; // const fullId = resolveQualId([...prefixes, s.id[s.id.length-1]], [...s.assumedPrefix, ...s.id]); // if(fullId) { // s.assumedPrefix = []; // s.id = fullId; // return s; // } else // return null } private resolveId(id: QualId, idPrefixes: QualId[], flags: ScopeFlags) : SymbolInformation<S>|null { return this.resolveSymbol(this.lookupHere(id, flags), idPrefixes); } // public lookup(id: QualId, flags: ScopeFlags) : SymbolInformation<S>|null { // let scope : ScopeDeclaration<S> = this; // const results: SymbolInformation<S>[] = []; // const flagStack : ScopeFlags[] = []; // while(scope) { // const result = scope.lookupHere(id,flags); // if(result) { // results.push(result); // scope = scope.getParentScope(); // } // // Only check the internals of the first declaration // flags &= ~ScopeFlags.Private; // if(scope.isEnd()) { // flagStack.push(flags); // flags &= ~ScopeFlags.Local; // } else if(scope.isBegin() && flagStack.length > 0) // flags = flagStack.pop(); // scope = scope.getPreviousSentence(); // } // return null; // } public lookup(id: QualId, flags: ScopeFlags) : SymbolInformation<S>[] { let idPrefixes = this.getPrefixes(); let results : SymbolInformation<S>[] = []; let scope : ScopeDeclaration<S> = this; const flagStack : ScopeFlags[] = []; while(scope) { const result = scope.resolveId(id,idPrefixes,flags); if(result) results.push(result); // Only check the internals of the first declaration flags &= ~ScopeFlags.Private; if(scope.isEnd()) { flagStack.push(flags); flags &= ~ScopeFlags.Local; } else if(scope.isBegin() && flagStack.length > 0) flags = flagStack.pop(); scope = scope.getPreviousSentence(); } return results; } } namespace parseAstSymbols { function identToSymbol(ident: ast.Identifier, kind: SymbolKind, pos: vscode.Position) : Symbol { return { identifier: ident.text, kind: kind, range: textUtil.rangeTranslateRelative(pos,parser.locationRangeToRange(ident.loc)) }; } export function definition<S extends {prev: S, next: S, getScope() : ScopeDeclaration<S>|null}>(ast: ast.SDefinition, sent: S, pos: vscode.Position) : ScopeDeclaration<S> { const result = new ScopeDeclaration(sent, [], null); result.addExportSymbol(identToSymbol(ast.ident, SymbolKind.Definition, pos)); return result; } export function inductive<S extends {prev: S, next: S, getScope() : ScopeDeclaration<S>|null}>(ast: ast.SInductive, sent: S, pos: vscode.Position) : ScopeDeclaration<S> { const result = new ScopeDeclaration(sent, [], null); ast.bodies.forEach(body => { result.addExportSymbol(identToSymbol(body.ident, SymbolKind.Inductive, pos)); body.constructors.forEach(c => { result.addExportSymbol(identToSymbol(c.ident, SymbolKind.Constructor, pos)); }); }); return result; } export function ltacDef<S extends {prev: S, next: S, getScope() : ScopeDeclaration<S>|null}>(ast: ast.SLtacDef, sent: S, pos: vscode.Position) : ScopeDeclaration<S> { const result = new ScopeDeclaration(sent, [], null); result.addExportSymbol(identToSymbol(ast.ident, SymbolKind.Ltac, pos)); return result; } export function assumptions<S extends {prev: S, next: S, getScope() : ScopeDeclaration<S>|null}>(ast: ast.SAssumptions, sent: S, pos: vscode.Position) : ScopeDeclaration<S> { const result = new ScopeDeclaration(sent, [], null); ast.idents.forEach(a => { result.addLocalSymbol(identToSymbol(a, SymbolKind.Assumption, pos)); }); return result; } export function section<S extends {prev: S, next: S, getScope() : ScopeDeclaration<S>|null}>(ast: ast.SSection, sent: S, pos: vscode.Position) : ScopeDeclaration<S> { const result = new ScopeDeclaration(sent, [], {kind: "begin", name: ast.ident.text, exports: true}); return result; } export function module<S extends {prev: S, next: S, getScope() : ScopeDeclaration<S>|null}>(ast: ast.SModule, sent: S, pos: vscode.Position) : ScopeDeclaration<S> { const result = new ScopeDeclaration(sent, [ast.ident.text], {kind: "begin", name: ast.ident.text, exports: ast.intro==="Export"}); result.addExportSymbol(identToSymbol(ast.ident, SymbolKind.Module, pos)); // [ ast.ident, ...Array.prototype.concat(...ast.bindings.map((b) => b.idents)) ] // .map((id) => identToSymbol(id, vscode.SymbolKind.Module, pos)) return result; } export function moduleType<S extends {prev: S, next: S, getScope() : ScopeDeclaration<S>|null}>(ast: ast.SModuleType, sent: S, pos: vscode.Position) : ScopeDeclaration<S> { const result = new ScopeDeclaration(sent, [ast.ident.text], {kind: "begin", name: ast.ident.text, exports: false}); result.addExportSymbol(identToSymbol(ast.ident, SymbolKind.Module, pos)); return result; // return [ ast.ident, ...Array.prototype.concat(...ast.bindings.map((b) => b.idents)) ] // .map((id) => identToSymbol(id, vscode.SymbolKind.Module, pos)) } export function moduleBind<S extends {prev: S, next: S, getScope() : ScopeDeclaration<S>|null}>(ast: ast.SModuleBind, sent: S, pos: vscode.Position) : ScopeDeclaration<S> { const result = new ScopeDeclaration(sent, [], null); result.addExportSymbol(identToSymbol(ast.ident, SymbolKind.Module, pos)); return result; } export function moduleTypeBind<S extends {prev: S, next: S, getScope() : ScopeDeclaration<S>|null}>(ast: ast.SModuleTypeBind, sent: S, pos: vscode.Position) : ScopeDeclaration<S> { const result = new ScopeDeclaration(sent, [], null); result.addExportSymbol(identToSymbol(ast.ident, SymbolKind.Module, pos)); return result; } } export function parseAstForScopeDeclarations<S extends {prev: S, next: S, getScope() : ScopeDeclaration<S>|null}>(ast: ast.Sentence, sent: S, pos: vscode.Position) : ScopeDeclaration<S> { try { switch(ast.type) { case "assumptions": return parseAstSymbols.assumptions(ast,sent,pos); case "definition": return parseAstSymbols.definition(ast,sent,pos); case "inductive": return parseAstSymbols.inductive(ast,sent,pos); case "ltacdef": return parseAstSymbols.ltacDef(ast,sent,pos); case "section": return parseAstSymbols.section(ast,sent,pos); case "module": return parseAstSymbols.module(ast,sent,pos); case "module-bind": return parseAstSymbols.moduleBind(ast,sent,pos); case "module-type": return parseAstSymbols.moduleType(ast,sent,pos); case "module-type-bind": return parseAstSymbols.moduleTypeBind(ast,sent,pos); default: return new ScopeDeclaration(sent,[], null); } } catch(err) { // debugger; return new ScopeDeclaration(sent,[], null); } }
the_stack
import {ExportNs} from '../../esl-utils/environment/export-ns'; import {ESLBaseElement, attr, boolAttr} from '../../esl-base-element/core'; import {bind} from '../../esl-utils/decorators/bind'; import {CSSClassUtils} from '../../esl-utils/dom/class'; import {rafDecorator} from '../../esl-utils/async/raf'; import {debounce} from '../../esl-utils/async/debounce'; import {EventUtils} from '../../esl-utils/dom/events'; import {parseAspectRatio} from '../../esl-utils/misc/format'; import {ESLMediaQuery} from '../../esl-media-query/core'; import {TraversingQuery} from '../../esl-traversing-query/core'; import {SPACE, PAUSE} from '../../esl-utils/dom/keys'; import {getIObserver} from './esl-media-iobserver'; import {PlayerStates} from './esl-media-provider'; import {ESLMediaProviderRegistry} from './esl-media-registry'; import {MediaGroupRestrictionManager} from './esl-media-manager'; import type {BaseProvider} from './esl-media-provider'; export type ESLMediaFillMode = 'cover' | 'inscribe' | ''; /** * ESLMedia - custom element, that provides an ability to add and configure media (video / audio) * using a single tag as well as work with external providers using simple native-like API. * * @author Alexey Stsefanovich (ala'n), Yuliya Adamskaya */ @ExportNs('Media') export class ESLMedia extends ESLBaseElement { public static is = 'esl-media'; public static eventNs = 'esl:media:'; /** Media resource identifier */ @attr() public mediaId: string; /** Media resource src/url path */ @attr() public mediaSrc: string; /** Media resource type. 'auto' (auto detection from src) by default */ @attr() public mediaType: string; /** Media elements group name */ @attr() public group: string; /** Media resource rendering strategy relative to the element area: 'cover', 'inscribe' or not defined */ @attr() public fillMode: ESLMediaFillMode; /** Strict aspect ratio definition */ @attr() public aspectRatio: string; /** Disabled marker to prevent rendering */ @boolAttr() public disabled: boolean; /** Autoplay resource marker */ @boolAttr() public autoplay: boolean; /** Autofocus on play marker */ @boolAttr() public autofocus: boolean; /** Mute resource marker */ @boolAttr() public muted: boolean; /** Loop resource play */ @boolAttr() public loop: boolean; /** Marker to show controls for resource player */ @boolAttr() public controls: boolean; /** Allow media to play inline (see HTML video/audio spec) */ @boolAttr() public playsinline: boolean; /** Allows play resource only in viewport area */ @boolAttr() public playInViewport: boolean; /** Preload resource */ @attr({defaultValue: 'auto'}) public preload: string; /** Ready state class/classes */ @attr() public readyClass: string; /** Ready state class/classes target */ @attr() public readyClassTarget: string; /** Class / classes to add when media is accepted */ @attr() public loadClsAccepted: string; /** Class / classes to add when media is declined */ @attr() public loadClsDeclined: string; /** Target element {@link TraversingQuery} select to add accepted/declined classes */ @attr({defaultValue: '::parent'}) public loadClsTarget: string; /** @readonly Ready state marker */ @boolAttr({readonly: true}) public ready: boolean; /** @readonly Active state marker */ @boolAttr({readonly: true}) public active: boolean; /** @readonly Resource played marker */ @boolAttr({readonly: true}) public played: boolean; /** @readonly Error state marker */ @boolAttr({readonly: true}) public error: boolean; private _provider: BaseProvider | null; private _conditionQuery: ESLMediaQuery | null; private deferredResize = rafDecorator(() => this._onResize()); private deferredReinitialize = debounce(() => this.reinitInstance()); /** * Map object with possible Player States, values: * BUFFERING, ENDED, PAUSED, PLAYING, UNSTARTED, VIDEO_CUED, UNINITIALIZED */ static get PLAYER_STATES() { return PlayerStates; } static get observedAttributes() { return [ 'disabled', 'media-type', 'media-id', 'media-src', 'fill-mode', 'aspect-ratio', 'play-in-viewport', 'muted', 'loop', 'controls' ]; } static supports(name: string): boolean { return ESLMediaProviderRegistry.instance.has(name); } protected connectedCallback() { super.connectedCallback(); if (!this.hasAttribute('role')) { this.setAttribute('role', 'application'); } this.innerHTML += '<!-- Inner Content, do not modify it manually -->'; this.bindEvents(); this.attachViewportConstraint(); this.deferredReinitialize(); } protected disconnectedCallback() { super.disconnectedCallback(); this.unbindEvents(); this.detachViewportConstraint(); this._provider && this._provider.unbind(); } protected attributeChangedCallback(attrName: string, oldVal: string, newVal: string) { if (!this.connected || oldVal === newVal) return; switch (attrName) { case 'disabled': case 'media-id': case 'media-src': case 'media-type': this.deferredReinitialize(); break; case 'loop': case 'muted': case 'controls': this._provider && this._provider.onSafeConfigChange(attrName, newVal !== null); break; case 'fill-mode': case 'aspect-ratio': this.deferredResize(); break; case 'play-in-viewport': this.playInViewport ? this.attachViewportConstraint() : this.detachViewportConstraint(); break; } } protected bindEvents() { ESLMediaProviderRegistry.instance.addListener(this._onRegistryStateChange); if (this.conditionQuery) { this.conditionQuery.addListener(this.deferredReinitialize); } if (this.fillModeEnabled) { window.addEventListener('resize', this.deferredResize); } window.addEventListener('esl:refresh', this._onRefresh); this.addEventListener('keydown', this._onKeydown); } protected unbindEvents() { ESLMediaProviderRegistry.instance.removeListener(this._onRegistryStateChange); if (this.conditionQuery) { this.conditionQuery.removeListener(this.deferredReinitialize); } if (this.fillModeEnabled) { window.removeEventListener('resize', this.deferredResize); } window.removeEventListener('esl:refresh', this._onRefresh); this.removeEventListener('keydown', this._onKeydown); } public canActivate() { if (this.disabled) return false; if (this.conditionQuery) return this.conditionQuery.matches; return true; } private reinitInstance() { console.debug('[ESL] Media reinitialize ', this); this._provider && this._provider.unbind(); this._provider = null; if (this.canActivate()) { this._provider = ESLMediaProviderRegistry.instance.createFor(this); if (this._provider) { this._provider.bind(); console.debug('[ESL] Media provider bound', this._provider); } else { this._onError(); } } this.updateContainerMarkers(); } public updateContainerMarkers() { const targetEl = TraversingQuery.first(this.loadClsTarget, this) as HTMLElement; if (!targetEl) return; const active = this.canActivate(); CSSClassUtils.toggle(targetEl, this.loadClsAccepted, active); CSSClassUtils.toggle(targetEl, this.loadClsDeclined, !active); } /** Seek to given position of media */ public seekTo(pos: number) { return this._provider && this._provider.safeSeekTo(pos); } /** * Start playing media * @param allowActivate - allows to remove disabled marker */ public play(allowActivate: boolean = false): Promise<void> | null { if (this.disabled && allowActivate) { this.disabled = false; this.deferredReinitialize.cancel(); this.reinitInstance(); } if (!this.canActivate()) return null; return this._provider && this._provider.safePlay(); } /** Pause playing media */ public pause(): Promise<void> | null { return this._provider && this._provider.safePause(); } /** Stop playing media */ public stop(): Promise<void> | null { return this._provider && this._provider.safeStop(); } /** Toggle play/pause state of the media */ public toggle(): Promise<void> | null { return this._provider && this._provider.safeToggle(); } /** Focus inner player **/ public focusPlayer(): void { this._provider && this._provider.focus(); } // media live-cycle handlers public _onReady() { this.toggleAttribute('ready', true); this.toggleAttribute('error', false); this.updateReadyClass(); this.deferredResize(); this.$$fire('ready'); } public _onError(detail?: any, setReadyState = true) { this.toggleAttribute('ready', true); this.toggleAttribute('error', true); this.$$fire('error', {detail}); setReadyState && this.$$fire('ready'); } public _onDetach() { this.removeAttribute('active'); this.removeAttribute('ready'); this.removeAttribute('played'); this.updateReadyClass(); this.$$fire('detach'); } public _onPlay() { if (this.autofocus) this.focus(); this.deferredResize(); this.setAttribute('active', ''); this.setAttribute('played', ''); this.$$fire('play'); MediaGroupRestrictionManager.registerPlay(this); } public _onPaused() { this.removeAttribute('active'); this.$$fire('paused'); MediaGroupRestrictionManager.unregister(this); } public _onEnded() { this.removeAttribute('active'); this.$$fire('ended'); MediaGroupRestrictionManager.unregister(this); } public _onResize() { if (!this._provider) return; if (this.fillModeEnabled && this.actualAspectRatio > 0) { let stretchVertically = this.offsetWidth / this.offsetHeight < this.actualAspectRatio; if (this.fillMode === 'inscribe') stretchVertically = !stretchVertically; // Inscribe behaves inversely stretchVertically ? this._provider.setSize(this.actualAspectRatio * this.offsetHeight, this.offsetHeight) : // h this._provider.setSize(this.offsetWidth, this.offsetWidth / this.actualAspectRatio); // w } else { this._provider.setSize('auto', 'auto'); } } @bind protected _onRefresh(e: Event) { const {target} = e; if (target instanceof HTMLElement && target.contains(this)) { this._onResize(); } } @bind protected _onRegistryStateChange(name: string) { const type = this.mediaType.toLowerCase() || 'auto'; if (name === type || (!this.providerType && type === 'auto')) { this.reinitInstance(); } } @bind protected _onKeydown(e: KeyboardEvent) { if (e.target !== this) return; if ([SPACE, PAUSE].includes(e.key)) { e.preventDefault(); e.stopPropagation(); this.toggle(); } } /** Update ready class state */ protected updateReadyClass() { const target = TraversingQuery.first(this.readyClassTarget, this) as HTMLElement; target && CSSClassUtils.toggle(target, this.readyClass, this.ready); } /** Applied provider */ public get providerType() { return this._provider ? this._provider.name : ''; } /** Current player state, see {@link ESLMedia.PLAYER_STATES} values */ public get state() { return this._provider ? this._provider.state : PlayerStates.UNINITIALIZED; } /** Duration of the media resource */ public get duration() { return this._provider ? this._provider.duration : 0; } /** Current time of media resource */ public get currentTime() { return this._provider ? this._provider.currentTime : 0; } /** Set current time of media resource */ public set currentTime(time: number) { (this._provider) && this._provider.safeSeekTo(time); } /** ESLMediaQuery to limit ESLMedia loading */ public get conditionQuery() { if (!this._conditionQuery && this._conditionQuery !== null) { const query = this.getAttribute('load-condition'); this._conditionQuery = query ? ESLMediaQuery.for(query) : null; } return this._conditionQuery; } /** Fill mode should be handled for element */ public get fillModeEnabled() { return this.fillMode === 'cover' || this.fillMode === 'inscribe'; } /** Used resource aspect ratio forced by attribute or returned by provider */ public get actualAspectRatio() { if (this.aspectRatio && this.aspectRatio !== 'auto') return parseAspectRatio(this.aspectRatio); return this._provider ? this._provider.defaultAspectRatio : 0; } protected attachViewportConstraint() { if (this.playInViewport) { getIObserver().observe(this); } } protected detachViewportConstraint() { const observer = getIObserver(true); observer && observer.unobserve(this); } public $$fire(eventName: string, eventInit?: CustomEventInit): boolean { const ns = (this.constructor as typeof ESLMedia).eventNs; return EventUtils.dispatch(this, ns + eventName, eventInit); } } declare global { export interface ESLLibrary { Media: typeof ESLMedia; } export interface HTMLElementTagNameMap { 'esl-media': ESLMedia; } }
the_stack
// This is a genuinely hard & interesting space, and this code is very much a // first pass solution. import * as I from '../interfaces' import err from '../err' import {queryTypes, resultTypes} from '../qrtypes' import {vIntersectMut, vEq, vSparse} from '../version' import sel from '../sel' import {vMax, vMin, vCmp} from '../version' import iterGuard from '../iterGuard' import { bitSet } from '../bit' import streamToIter from 'ministreamiterator' // import {inspect} from 'util' // const ins = (x: any) => inspect(x, {depth: null, colors: true}) const assert = (v: any, msg?: string) => { if (!v) throw Error('assertion failed: ' + msg) } // Selector utilities. type Sel = I.StaticKeySelector export type Router<Val> = I.Store<Val> & { mount<V2 extends Val>(store: I.Store<V2>, fPrefix: string, range: [Sel, Sel] | null, bPrefix: string, isOwned: boolean): void, } type RangeRes = {inputs: [number, number][], reverse: boolean}[] type Route = { store: I.Store<any>, // This maps from backend source id to frontend source (eg sourceMap[0] = 4) sourceMap: number[], // Frontend prefix (eg movies/) fPrefix: string, // Frontend range (eg ['movies/m', 'movies/q']). fRange: [Sel, Sel], // Backend prefix (eg imdb/) bPrefix: string, // Are we the exclusive owner of the backend DB? Aka, when the router is // closed, should we chain close the backend? isOwned: boolean, } let nextId = 1000 // For debugging export const prefixToRange = (prefix: string): [Sel, Sel] => [sel(prefix), sel(prefix+'~')] export const ALL = prefixToRange('') const changePrefix = (k: I.Key, fromPrefix: string, toPrefix: string) => { assert(k.startsWith(fromPrefix)) return toPrefix + k.slice(fromPrefix.length) } const changeSelPrefix = (s: Sel, fromPrefix: string, toPrefix: string): Sel => { return sel(changePrefix(s.k, fromPrefix, toPrefix), s.isAfter) } // const mapKeysInto = <T>(dest: Map<I.Key, T> | null, oldMap: Map<I.Key, T> | null, keyRoutes: Map<I.Key, Route>) => { // if (dest == null) dest = new Map<I.Key, T>() // if (oldMap == null) return dest // const r = resultTypes['kv'] // return r.copyInto!(dest, r.mapEntries(oldMap, (bk, v) => { // const route = keyRoutes.get(bk!) // return route == null ? null : [changePrefix(bk!, route.bPrefix, route.fPrefix), v] // })) // } // Convert frontend versions into backend versions for a particular route const mapVersionsFrontToBack = <V>(versions: (V | null)[], sourceMap: number[]): (V | null)[] => ( sourceMap.map(m => versions[m]) ) const mapVersionsBackToFront = <V>(versions: (V | null)[], sourceMap: number[]): (V | null)[] => { const result: (V | null)[] = [] versions.forEach((v, i) => {result[sourceMap[i]] = v}) return result } const mapKVResults = <T>(from: Map<I.Key, T>, routes: Map<I.Key, Route>): Map<I.Key, T> => ( resultTypes[I.ResultType.KV].mapEntries(from, (bk, v) => { const route = (routes as Map<I.Key, Route>).get(bk!) return route == null ? null : [changePrefix(bk!, route.bPrefix, route.fPrefix), v] }) ) const mapRangeResults = <T>(from: [I.Key, T][][], routes: Route[]): [I.Key, T][][] => ( from.map((rr, i) => { const route = (routes as Route[])[i] return rr.map( ([k, v]) => ([changePrefix(k, route.bPrefix, route.fPrefix), v] as I.KVPair<T>) ) }) ) const mapResults = <T>(qtype: I.QueryType, from: Map<I.Key, T> | [I.Key, T][][], routes: Map<I.Key, Route> | Route[]) => ( qtype === I.QueryType.KV ? mapKVResults(from as Map<I.Key, T>, routes as Map<I.Key, Route>) : mapRangeResults(from as [I.Key, T][][], routes as Route[]) ) const mergeKVResults = <T>(from: (Map<I.Key, T> | null)[]): Map<I.Key, T> => { const results = new Map() for (let i = 0; i < from.length; i++) { const innerMap = from[i] if (innerMap != null) for (const [k, v] of innerMap.entries()) results.set(k, v) } return results } const mergeRangeResults = <T>(from: (T[][] | null)[], res: RangeRes): T[][] => ( res.map(({reverse, inputs}, i) => { const r = ([] as T[]).concat( ...inputs .filter(([bsIdx]) => from[bsIdx] != null) .map(([bsIdx, outIdx]) => from[bsIdx]![outIdx]) ) return reverse ? r.reverse() : r }) ) // const mergeResults = <T>(qtype: I.QueryType, from: (Map<I.Key, T> | null)[] | ([I.Key, T][][] | null)[], res: any, q: I.QueryData) => ( const mergeResults = <T>(qtype: I.QueryType, from: (Map<I.Key, T> | [I.Key, T][][] | null)[], res: any) => ( qtype === I.QueryType.KV ? mergeKVResults(from as (Map<I.Key, T> | null)[]) : mergeRangeResults(from as ([I.Key, T][][] | null)[], res) ) const mapCRKeys = (qtype: I.QueryType, data: I.ReplaceQueryData, mapFn: (k: I.Key, i: number) => I.Key | null) => ( qtype === I.QueryType.KV ? queryTypes[I.QueryType.KV].mapKeys!(data, mapFn) : (data as I.StaticRangeQuery[]).map(r => queryTypes[I.QueryType.StaticRange].mapKeys!(r, mapFn)) ) // Consumes data. (It rewrites it in-place) const mapCatchupMut = <Val>(data: I.CatchupData<Val>, qtype: I.QueryType, routes: Map<I.Key, Route> | Route[], sourceMap: number[]) => { // const qr = queryTypes[qtype] if (data.replace) { // This is super gross. The data format for KVs matches the format for // fetches, but the data format for ranges is subtly different. What a // mess. data.replace.q.q = mapCRKeys(qtype, data.replace.q.q, (bk, i) => { const route = qtype === I.QueryType.KV ? (routes as Map<I.Key, Route>).get(bk) : (routes as Route[])[i] // TODO: Why would route be unset here? assert(route) return route ? changePrefix(bk, route.bPrefix, route.fPrefix) : null }) data.replace.with = qtype === I.QueryType.KV ? mapResults(qtype, data.replace.with, routes) : (data.replace.with as [I.Key, Val][][][]).map(d => mapResults(qtype, d, routes)) data.toVersion = mapVersionsBackToFront(data.toVersion, sourceMap) } for (let i = 0; i < data.txns.length; i++) { data.txns[i].txn = mapResults(qtype, data.txns[i].txn as I.KVTxn<Val>, routes) data.txns[i].versions = mapVersionsBackToFront(data.txns[i].versions, sourceMap) } data.toVersion = mapVersionsBackToFront(data.toVersion, sourceMap) return data } // Wrap a subscription with the route mapping. const mapSub = async function*<Val>(sub: I.Subscription<Val>, qtype: I.QueryType, routes: Map<I.Key, Route> | Route[], sourceMap: number[]) { for await (const c of sub) { yield mapCatchupMut(c, qtype, routes, sourceMap) } } const mergeVersionsInto = (dest: I.FullVersion, src: I.FullVersion) => { for (let i = 0; i < src.length; i++) dest[i] = src[i] } // Consumes both. Returns a. const composeCatchupsMut = <Val>(qtype: I.QueryType, a: I.CatchupData<Val>, b: I.CatchupData<Val>) => { const qt = queryTypes[qtype] if (b.replace) { if (b.replace.q.type !== qtype) throw new err.InvalidDataError() if (a.replace) { if (a.replace.q.type !== qtype) throw new err.InvalidDataError() const {q, with: w} = qt.composeCR(a.replace, b.replace) a.replace.q = q a.replace.with = w mergeVersionsInto(a.replace.versions, b.replace.versions) } else { a.replace = b.replace } // Trim out any transaction data in a that has been replaced by b. if (qtype === I.QueryType.KV) { let i = 0 while (i < a.txns.length) { const txn = (a.txns[i].txn as I.KVTxn<Val>) for (const k in txn.values()) if ((<Set<I.Key>>b.replace.q.q).has(k)) txn.delete(k) if (txn.size == 0) a.txns.splice(i, 1) else ++i } } else if (a.txns.length) throw Error('not implemented') } // Then merge all remaining transactions into a. a.txns.push(...b.txns) mergeVersionsInto(a.toVersion, b.toVersion) return a } // Its sad that I need this - its only used for subscription replace queries // at the moment. Though it will also be used for baked queries in fetch. const mergeKVQueries = <T>(from: (Set<I.Key> | null)[]): Set<I.Key> => { const results = new Set<I.Key>() for (let i = 0; i < from.length; i++) { const innerMap = from[i] if (innerMap != null) for (const k of innerMap) results.add(k) } return results } const mergeRangeQueries = <T>(from: (I.StaticRange[][] | null)[], res: RangeRes): I.StaticRangeQuery[] => ( // TODO: This is almost identical to mergeRangeResults. Consider merging the // two functions. res.map(({inputs}, i) => ([] as I.StaticRangeQuery).concat( ...inputs .filter(([bsIdx]) => from[bsIdx] != null) .map(([bsIdx, outIdx]) => from[bsIdx]![outIdx]) )) ) const mergeQueries = <T>(qtype: I.QueryType, from: (Set<I.Key> | I.StaticRange[][] | null)[], res: any) => ( qtype === I.QueryType.KV ? mergeKVQueries(from as (Set<I.Key> | null)[]) : mergeRangeQueries(from as (I.StaticRange[][] | null)[], res) ) // const noopRange = (): I.StaticRange => ({from: sel(''), to: sel('')}) const mergeCatchups = <Val>(qtype: I.QueryType, cd: I.CatchupData<Val>[], res: any): I.CatchupData<Val> => { const result: I.CatchupData<Val> = {txns: [], toVersion: [], caughtUp: false} // Check if any of the catchup data contains a replace block if (cd.reduce((v, cd) => !!cd.replace || v, false)) { // What a mess. const q = { type: qtype, q: mergeQueries(qtype, cd.map( c => c.replace == null ? null : c.replace.q.q as Set<I.Key> | I.StaticRange[][] ), res) } as I.ReplaceQuery const w = mergeResults(qtype, cd.map(c => c.replace == null ? null : c.replace.with), res) // So the replace versions might not match up. I'm not really sure what to // do in that case - it shouldn't really matter; but its technically // incorrect. const versions = cd.reduce((acc, src) => { if (src.replace) { const srv = src.replace.versions for (let i = 0; i < srv.length; i++) if (srv[i] != null) { acc[i] = acc[i] == null ? srv[i] : vMax(acc[i]!, srv[i]!) } } return acc }, [] as I.FullVersion) result.replace = {q, with: w, versions} } for (let i = 0; i < cd.length; i++) { const src = cd[i] // The downside of doing it this way is that the results won't // be strictly ordered by version. They should still be // correct if applied in sequence.. but its a lil' janky. result.txns.push(...src.txns) for (let i = 0; i < src.toVersion.length; i++) if (src.toVersion[i] != null) { const dv = result.toVersion[i] // For some reason when this error happens, it takes a couple seconds for the server to actually crash out. // if (dv != null && !vEq(dv, src.toVersion[s])) { // console.log('misaligned', s, dv, src.toVersion[s]) // process.exit(1) // } if (dv == null) result.toVersion[i] = src.toVersion[i] else if (!vEq(dv, src.toVersion[i]!)) throw Error('Subscription ops misaligned') } if (src.caughtUp) result.caughtUp = true } return result } // TODO: Change this API to have the routes passed in at construction time. export default function router<Val>(): Router<Val> { // The routes list is kept sorted in order of frontend ranges const routes: Route[] = [] const sources: string[] = [] const sourceIsMonotonic: true[] = [] const getRoute = (k: I.Key) => ( // This could be rewritten to use binary search. It won't make a huge // difference in practice though - the number of routes will usually // stay pretty small. routes.find(r => sel.kGt(k, r.fRange[0]) && sel.kLt(k, r.fRange[1])) || null ) type ByStore = { store: I.Store<Val>, q: I.QueryData, routes: Map<I.Key, Route> | Route[], /** Map from each backend source -> frontend source index. Eg [3,5] */ sourceMap: number[] } const splitKeysByStore = (keys: Set<I.Key>) => { // const result: [I.Store<Val>, Set<I.Key>, Map<I.Key, Route>, number[]][] = [] const result: ByStore[] = [] for (const fKey of keys) { const route = getRoute(fKey) if (route) { const bKey = changePrefix(fKey, route.fPrefix, route.bPrefix) let pair = result.find(({store}) => store === route.store) let q, routeForKey if (pair == null) { [q, routeForKey] = [new Set<I.Key>(), new Map()] result.push({store: route.store, q, routes: routeForKey, sourceMap: route.sourceMap}) } else { q = pair.q as I.KVQuery routeForKey = pair.routes as Map<I.Key, Route> } q.add(bKey) routeForKey.set(bKey, route) } } return result } const splitRangeByRoutes = (r: I.StaticRange) => { const result: [Sel, Sel, Route][] = [] for (let i = 0; i < routes.length; i++) { const {fRange, fPrefix, bPrefix} = routes[i] const activeRange = sel.intersect(r.low, r.high, fRange[0], fRange[1]) if (activeRange != null) { result.push([ changeSelPrefix(activeRange[0], fPrefix, bPrefix), changeSelPrefix(activeRange[1], fPrefix, bPrefix), routes[i] ]) } } return result } const splitRangeQuery = (q: I.StaticRangeQuery) => { // For each store, we'll generate the range query that applies to that // store and the list of routes that each part of the range query feeds // from, to map the query back. const byStore: ByStore[] = [] // When we run the queries we'll have a list of [store, results[][]] // that we need to map back into a list of results corresponding to our // input range. For each input range we'll produce some data describing // how the corresponding output will be generated. // res will mirror the format of query.q. For each range in the query, // we'll have a list of [byStore index, output index] pairs from which // the actual result will be aggregated. const res = q.map((r, i) => ({ inputs: splitRangeByRoutes(r).map(([from, to, route], k) => { // console.log('srr', r, from, to, route.fPrefix) const {store} = route let byStoreIdx = byStore.findIndex(x => x.store === store) if (byStoreIdx < 0) { byStoreIdx = byStore.push({store, q: [], routes: [], sourceMap: route.sourceMap}) - 1 } const outIdx = (byStore[byStoreIdx].q as I.StaticRangeQuery).push({low:from, high:to}) - 1 ;(byStore[byStoreIdx].routes as Route[])[outIdx] = route return [byStoreIdx, outIdx] as [number, number] }), reverse: r.reverse || false, } as RangeRes[0])) return [byStore, res] as [typeof byStore, typeof res] } const splitQueryByStore = (q: I.Query): [ByStore[], any] => { if (q.type === I.QueryType.KV) return [splitKeysByStore(q.q), null] else if (q.type === I.QueryType.StaticRange) return splitRangeQuery(q.q) else throw new err.UnsupportedTypeError('Router only supports kv queries') } const isBefore = (v: I.FullVersion, other: I.FullVersion) => { for (let i = 0; i < v.length; i++) if (v[i] != null) { assert(other[i] != null) if (vCmp(v[i]!, other[i]!) < 0) return true } return false } const storeInfo = { uid: `router()`, sources, sourceIsMonotonic, capabilities: { queryTypes: bitSet(I.QueryType.KV, I.QueryType.StaticRange), mutationTypes: bitSet(I.ResultType.KV), } } return { // Note the storeinfo will change as more stores are added. storeInfo, // NOTE: You must mount all stores before the router is used. // TODO: Enforce that. mount(store, fPrefix, range, bPrefix, isOwned) { if (range == null) range = ALL assert(sel.ltSel(range[0], range[1]), 'Range start must be before range end') // TODO: Consider keeping this list sorted. const sourceMap = store.storeInfo.sources.map((s, i) => { const si = sources.indexOf(s) if (si >= 0) return si const idx = sources.push(s) - 1 sourceIsMonotonic.push(true) return idx }) // Filter the advertised capabilities to only be those which all our // routes support. // const {queryTypes, mutationTypes} = this.storeInfo.capabilities // const {queryTypes: oqt, mutationTypes: omt} = store.storeInfo.capabilities // for (const type of queryTypes) if (!oqt.has(type)) queryTypes.delete(type) // for (const type of mutationTypes) if (!omt.has(type)) mutationTypes.delete(type) // Ok, now create & add the route to the list. const [a, b] = [sel.addPrefix(fPrefix, range[0]), sel.addPrefix(fPrefix, range[1])] const route: Route = { store, sourceMap, fPrefix, fRange: [a, b], bPrefix, isOwned } // Check that the new route doesn't overlap an existing route let pos = 0 for(; pos < routes.length; pos++) { const r = routes[pos].fRange if (sel.ltSel(a, r[1])) { assert(sel.LtESel(b, r[0]), 'Routes overlap') break } } // Routes are kept sorted routes.splice(pos, 0, route) // This ends up being quite long, though I'm not sure if that actually matters. storeInfo.uid = `router(${routes.map(({store}) => store.storeInfo.uid).join(',')})` }, async fetch(query, frontOpts = {}) { // Range support here is a red hot mess. I have a sticky situation in // that the implementation for range reads and the implementation for KV // reads is similar, but definitely not the same. I could write this in // a generic way by specializing a bunch of functionality, but the // effect would make it much less readable and I'm not sure its worth // the effort. For now I'm just going to maintain two separate // implementations. const {type: qtype, q} = query if (qtype !== I.QueryType.KV && qtype !== I.QueryType.StaticRange) { throw new err.UnsupportedTypeError('Invalid type in range query ' + qtype) } const [byStore, res] = splitQueryByStore(query) let versions: I.FullVersionRange = [] const innerResults = await Promise.all(byStore.map(async ({store, q, routes, sourceMap}) => { const backOpts = {...frontOpts} if (frontOpts.atVersion) { backOpts.atVersion = mapVersionsFrontToBack(frontOpts.atVersion, sourceMap) } const r = await store.fetch({type: qtype, q} as I.Query, backOpts) const newVersions = vIntersectMut(versions, r.versions) if (newVersions == null) throw Error('Incompatible versions in results not yet implemented') versions = newVersions return mapResults(qtype, r.results as I.RangeResult<Val>, routes) })) return { results: mergeResults(qtype, innerResults, res), versions, } }, async getOps(query, queryVersions, opts) { const qtype = query.type if (qtype !== I.QueryType.KV && qtype !== I.QueryType.StaticRange) throw new err.UnsupportedTypeError() const [byStore, res] = splitQueryByStore(query) let validRange: I.FullVersionRange = [] // Output valid version range // For now this function requires that the underlying stores each only // contain a single source. This is sort of disappointing, but the fully // general version of this function requires some fancy graph data // structures, and its simply not important enough at the moment. type SingleTxnData = {txn: I.KVTxn<Val> | I.RangeTxn<Val>, v: I.Version, meta: I.Metadata} // const opsForSource = new Map<I.Source, SingleTxnData[]>() const frontSourcesUsed: (true | undefined)[] = [] // frontend sources. const opsByStore = await Promise.all(byStore.map(async ({store, q, routes, sourceMap}) => { const {ops, versions} = await store.getOps( {type:query.type, q} as I.Query, mapVersionsFrontToBack(queryVersions, sourceMap), opts ) // We'll just map the keys back here. const newValidRange = vIntersectMut(validRange, versions) // I think this should never happen in real life. It probably // indicates a bug in the backend stores. if (newValidRange == null) throw Error('Conflicting valid version ranges in getOps') validRange = newValidRange let backSourceIdx: number | null = null return ops.map(op => { // assert(op.txn instanceof Map) const sourceIdxs = op.versions.map((v, i) => v == null ? -1 : i).filter(i => i >= 0) assert(sourceIdxs.length) if (sourceIdxs.length > 1) throw Error('Multiple backend sources in router not implemented yet') if (backSourceIdx == null) backSourceIdx = sourceIdxs[0] else if (backSourceIdx !== sourceIdxs[0]) throw Error('Multiple backend sources in router not implemented yet') frontSourcesUsed[sourceMap[backSourceIdx]] = true // Destructively rewrite the keys in each op return <SingleTxnData>{ txn: qtype === I.QueryType.KV ? mapKVResults(op.txn as I.KVTxn<Val>, routes as Map<I.Key, Route>) : mapRangeResults(op.txn as I.RangeTxn<Val>, routes as Route[]), v: op.versions[backSourceIdx], meta: op.meta, } }) })) // There's two transformations we need to do before returning the // results: // // - Merge all the ops from the same source together, with respect to the query // - Flatmap the results const resultsBySource = frontSourcesUsed.map((hasOps, frontSourceIdx) => { if (!hasOps) return [] // mergeResults(qtype, opsByStore. const filtered = opsByStore .map((data, i) => (data.length && byStore[i].sourceMap[0] === frontSourceIdx) ? data : null) .filter(x => x != null) as SingleTxnData[][] // This is all pretty inefficient, but it should be correct enough for // now. I can optimize later. const merged: I.TxnWithMeta<Val>[] = [] while (true) { // First find the minimum version let meta: I.Metadata = null as any const versions = filtered.filter(item => item != null && item.length > 0) .map(item => { meta = item![0].meta // Dodgy. return item![0].v }) if (versions.length === 0) break // We're done. const minVersion = versions.reduce(vMin) const toMerge = filtered.map(item => item != null && item.length > 0 && vEq(item[0].v, minVersion) ? item.shift()!.txn : null) const txn = mergeResults(qtype, toMerge, res) // TODO: reverse? merged.push({ txn, versions: vSparse(frontSourceIdx, minVersion), meta, }) } return merged }) return { ops: ([] as I.TxnWithMeta<Val>[]).concat(...resultsBySource), versions: validRange, } }, subscribe(q, opts = {}) { const id = nextId++ // console.log(id, 'q', q) const qtype = q.type if (qtype !== I.QueryType.KV && qtype !== I.QueryType.StaticRange) { throw new err.UnsupportedTypeError('Router only supports kv and static range queries') } let {fromVersion} = opts if (fromVersion === 'current') throw new Error('opts.fromVersion current not supported by router') // Subscribe here merges a view over a number of child subscriptions. // When advancing the catchup iterators, we have to group the // subscriptions by their sources and merge catchup data const [byStore, res] = splitQueryByStore(q) // console.log('qbs', byStore) const childOpts = { ...opts, alwaysNotify: true } const childSubs = byStore.map(({store, q, routes, sourceMap}) => { const rawSub = store.subscribe({type:qtype, q} as I.Query, childOpts) return { store, sub: iterGuard(mapSub(rawSub, qtype, routes, sourceMap), rawSub.return) as I.AsyncIterableIteratorWithRet<I.CatchupData<Val>>, } }) const stream = streamToIter<I.CatchupData<Val>>(() => { // We're done reading. Close parents. // console.log(id, 'router stream return') for (const sub of childSubs) sub.sub.return() }) ;(async () => { let finished = false // Either opts.fromVersion is set, and we're going to subscribe to all // stores from the version specified in the options. Or its not set // (its null), and the underlying subscribe functions will do a whole // catchup first. Either way by convention we'll get an initial // catchup op from the stores; it might just be empty. We forward that // on. const catchupIterItem = await Promise.all(childSubs.map(({sub}) => sub.next())) // Figure out the starting version for subscriptions, which is the // max version of everything that was returned. fromVersion = [] const catchups: I.CatchupData<Val>[] = [] for (let i = 0; i < catchupIterItem.length; i++) { const catchup = catchupIterItem[i].value if (catchup == null) { // One of the child subscriptions ended before it started. Bubble up! console.warn('In router child subscription ended before catchup!') stream.end() for (const sub of childSubs) sub.sub.return() return } for (let si = 0; si < catchup.toVersion.length; si++) { const v = catchup.toVersion[si] if (v != null) fromVersion[si] = (fromVersion[si] == null) ? v : vMax(fromVersion[si]!, v) } catchups.push(catchup) } // console.log('catchups', catchups) // Ok, now we need to wait for everyone to catch up to fromVersion! const waiting = [] for (let i = 0; i < catchups.length; i++) { if (isBefore(catchups[i].toVersion, fromVersion)) waiting.push((async () => { // I could use for-await here, but I want a while loop and its // pretty twisty. while (!finished && isBefore(catchups[i].toVersion, fromVersion)) { const {value, done} = await childSubs[i].sub.next() if (done) { finished = true; return } composeCatchupsMut(qtype, catchups[i], value) for (let si = 0; si < catchups[i].toVersion.length; si++) { const tv = catchups[i].toVersion[si] if (tv != null && fromVersion[si] != null && vCmp(tv, fromVersion[si]!) > 0) { throw new Error('Skipped target version. Handling this is NYI') } } } })()) } await Promise.all(waiting) // We'll send all initial catchups in one big first update. This is // consistent with the behaviour of stores and will let the router // self-compose cleaner. // // Note we're sending the catchup even if there's no data - if the // input query maps to an empty query, we'll still generate an empty // catchup which returns nothing. // console.log(id, 'initial emitting', mergeCatchups(qtype, catchups, res)) stream.append(mergeCatchups(qtype, catchups, res)) // ***** Ok we have our initial data and fromVersion is set now. // Next up subscriptions are grouped based on their source. This is // needed because the subscription API produces an entire source's // updates at once. So if there are two subs with the same source, // we need to wait for both subs to tick before returning the merged // catchup data to the consumer. // // And if there are three subs - one with source [a], one with source // [a,b] and one with source [b], we'll need to put all of them into a // group together. // // We should be able to have more granularity based on the actual query // subscription, but this is fine for now. const subGroups: { store: I.Store<Val>, // routes: Route[] | Map<string, Route>, sub: I.Subscription<Val>, }[][] = [] { // I could convert this to a set, but it should be a really small list // anyway. This is all kind of gross - we need childSubs for the // cursor functions below, and this code was written to consume them // into subGroups. Bleh. POC. const subsCopy = childSubs.slice() while (subsCopy.length) { const groupSources = new Set<I.Source>() // local sources for this group const group = [subsCopy.pop()!] group[0]!.store.storeInfo.sources.forEach(s => groupSources.add(s)) let i = 0; while (i < subsCopy.length) { const store = subsCopy[i].store let hasNew = false let hasCommon = false store.storeInfo.sources.forEach(s => { if (groupSources.has(s)) hasCommon = true else hasNew = true }) if (hasCommon) { // Put the sub+store in the group group.push(subsCopy[i]) subsCopy[i] = subsCopy[subsCopy.length-1] subsCopy.length-- if (hasNew) { store.storeInfo.sources.forEach(s => groupSources.add(s)) // Try again with all the new sources. This is potentially n^2 // with the number of routes / sources; but the number will // usually be small so it shouldn't matter. This could be sped // up; but its not important to do so yet. i = 0 } } else i++ } subGroups.push(group) } } // Ok now we'll start streaming the subscriptions for real. for (let i = 0; i < subGroups.length; i++) { assert(subGroups[i].length > 0) const subs = subGroups[i].map(ss => ss.sub) // const routes = subGroups[i].map(ss => ss.routes) ;(async () => { // First while (true) { const nexts = await Promise.all(subs.map(sub => sub.next())) const updates = nexts.map(({value, done}) => { if (done) finished = true return value }) // Locally or from another sub group. if (finished) { // console.log(id, 'finished!') break } // console.warn(id, 'merge catchups - updates', updates) // I could update fromVersions, but I don't need to. Its not // used for anything at this point. const merged = mergeCatchups<Val>(qtype, updates, res) if (opts.alwaysNotify || merged.replace || merged.txns.length) { stream.append(merged) } } // We're done. Propogate upwards subs.forEach(sub => sub.return()) })() } // This is a little inelegant, but we'll throw any errors up to the client. })().catch(err => stream.throw(err)) return stream.iter }, // Mutation can work through the router, but doing multi-store // transactions is too hard / impossible with the current architecture. // We'll only allow transactions which hit one store. async mutate(type, fTxn, versions, opts) { if (type !== I.ResultType.KV) throw new err.UnsupportedTypeError('Only kv mutations supported') fTxn = fTxn as I.KVTxn<Val> let store = null const bTxn = new Map<I.Key, I.Op<Val>>() for (const [fk, op] of fTxn) { const route = getRoute(fk) if (route == null) throw new err.InvalidDataError('Mutation on unmounted key') if (store == null) store = route.store else if (store !== route.store) { // Consider adding a {nonAtomic} flag to allow mutation splitting here. throw new err.InvalidDataError('Mutation txn in router cannot span multiple stores') } const bk = changePrefix(fk, route.fPrefix, route.bPrefix) bTxn.set(bk, op) } if (store == null) return [] else return store.mutate(I.ResultType.KV, bTxn, versions, opts) }, close() { const stores = Array.from(new Set(routes.filter(r => r.isOwned).map(r => r.store))) stores.forEach(s => s.close()) }, } }
the_stack
import { logError } from 'common-utils/log.js'; import { BaseSetting, Glob, GlobDef } from 'cspell-lib'; import * as os from 'os'; import * as Path from 'path'; import { WorkspaceFolder } from 'vscode-languageserver/node'; import { URI as Uri } from 'vscode-uri'; import { CSpellUserSettings } from '../config/cspellConfig'; import { extractDictionaryDefinitions, extractDictionaryList } from './customDictionaries'; export type WorkspaceGlobResolverFn = (glob: Glob) => GlobDef; export type WorkspacePathResolverFn = (path: string) => string; interface WorkspacePathResolver { resolveFile: WorkspacePathResolverFn; resolveGlob: (globRoot: string | undefined) => WorkspaceGlobResolverFn; } interface FolderPath { name: string; path: string; uri: Uri; } export function resolveSettings<T extends CSpellUserSettings>(settings: T, resolver: WorkspacePathResolver): T { // Sections // - imports // - dictionary definitions (also nested in language settings) // - globs (ignorePaths and Override filenames) // - override dictionaries // - custom dictionaries // There is a more elegant way of doing this, but for now just change each section. const newSettings = resolveCoreSettings(settings, resolver); newSettings.import = resolveImportsToWorkspace(newSettings.import, resolver); newSettings.overrides = resolveOverrides(newSettings, resolver); // Merge custom dictionaries const dictionaryDefinitions = resolveDictionaryPathReferences(extractDictionaryDefinitions(newSettings), resolver); newSettings.dictionaryDefinitions = dictionaryDefinitions.length ? dictionaryDefinitions : undefined; // By default all custom dictionaries are enabled const dictionaries = extractDictionaryList(newSettings); newSettings.dictionaries = dictionaries.length ? dictionaries : undefined; // Remove unwanted settings. delete newSettings.customUserDictionaries; delete newSettings.customWorkspaceDictionaries; delete newSettings.customFolderDictionaries; return shallowCleanObject(newSettings); } /** * * @param folder - Workspace folder to be considered the active folder * @param folders - all folders including the active folder * @param root - optional file path */ export function createWorkspaceNamesResolver( folder: WorkspaceFolder, folders: WorkspaceFolder[], root: string | undefined ): WorkspacePathResolver { return { resolveFile: createWorkspaceNamesFilePathResolver(folder, folders, root), resolveGlob: createWorkspaceNamesGlobPathResolver(folder, folders, root), }; } function toFolderPath(w: WorkspaceFolder): FolderPath { const uri = Uri.parse(w.uri); return { name: w.name, path: uri.fsPath, uri: uri, }; } function createWorkspaceNamesFilePathResolver( folder: WorkspaceFolder, folders: WorkspaceFolder[], root: string | undefined ): WorkspacePathResolverFn { return createWorkspaceNameToPathResolver(toFolderPath(folder), folders.map(toFolderPath), root); } function createWorkspaceNamesGlobPathResolver( folder: WorkspaceFolder, folders: WorkspaceFolder[], root: string | undefined ): (globRoot: string | undefined) => WorkspaceGlobResolverFn { const rootFolder = toFolderPath(folder); return createWorkspaceNameToGlobResolver(rootFolder, folders.map(toFolderPath), root); } function createWorkspaceNameToGlobResolver( folder: FolderPath, folders: FolderPath[], workspaceRoot: string | undefined ): (globRoot: string | undefined) => WorkspaceGlobResolverFn { const _folder = { ...folder }; const _folders = [...folders]; return (globRoot: string | undefined) => { const folderPairs = [['${workspaceFolder}', _folder.path] as [string, string]].concat( _folders.map((folder) => [`\${workspaceFolder:${folder.name}}`, folder.path]) ); workspaceRoot = workspaceRoot || _folder.path; const map = new Map(folderPairs); const regEx = /^\$\{workspaceFolder(?:[^}]*)\}/i; const root = resolveRoot(globRoot || '${workspaceFolder}'); function lookUpWorkspaceFolder(match: string): string { const r = map.get(match); if (r !== undefined) return r; logError(`Failed to resolve ${match}`); return match; } function resolveRoot(globRoot: string | undefined): string | undefined { const matchRoot = globRoot?.match(regEx); if (matchRoot && globRoot) { const workspaceRoot = lookUpWorkspaceFolder(matchRoot[0]); return Path.join(workspaceRoot, globRoot.slice(matchRoot[0].length)); } return globRoot; } return (glob: Glob) => { if (typeof glob == 'string') { glob = { glob, root, }; } const matchGlob = glob.glob.match(regEx); if (matchGlob) { const root = lookUpWorkspaceFolder(matchGlob[0]); return { ...glob, glob: glob.glob.slice(matchGlob[0].length), root, }; } return { ...glob, root: resolveRoot(glob.root), }; }; }; } /** * * @param currentFolder * @param folders * @param root - optional file path to consider the root */ function createWorkspaceNameToPathResolver( currentFolder: FolderPath, folders: FolderPath[], root: string | undefined ): WorkspacePathResolverFn { const folderPairs = ([] as [string, string][]) .concat([ ['.', currentFolder.path], ['~', os.homedir()], ['${workspaceFolder}', currentFolder.path], ['${root}', root || folders[0]?.path || currentFolder.path], ['${workspaceRoot}', root || folders[0]?.path || currentFolder.path], ]) .concat(folders.map((folder) => [`\${workspaceFolder:${folder.name}}`, folder.path])); const map = new Map(folderPairs); const regEx = /^(?:\.|~|\$\{(?:workspaceFolder|workspaceRoot|root)(?:[^}]*)\})/i; function replacer(match: string): string { const r = map.get(match); if (r) return r; logError(`Failed to resolve ${match}`); return match; } return (path: string): string => { return path.replace(regEx, replacer); }; } function resolveCoreSettings<T extends CSpellUserSettings>(settings: T, resolver: WorkspacePathResolver): T { // Sections // - imports // - dictionary definitions (also nested in language settings) // - globs (ignorePaths and Override filenames) // - override dictionaries const newSettings: CSpellUserSettings = resolveCustomAndBaseSettings(settings, resolver); // There is a more elegant way of doing this, but for now just change each section. newSettings.dictionaryDefinitions = resolveDictionaryPathReferences(newSettings.dictionaryDefinitions, resolver); newSettings.languageSettings = resolveLanguageSettings(newSettings.languageSettings, resolver); newSettings.ignorePaths = resolveGlobArray(newSettings.ignorePaths, resolver.resolveGlob(newSettings.globRoot)); newSettings.files = resolveGlobArray(newSettings.files, resolver.resolveGlob(newSettings.globRoot)); newSettings.workspaceRootPath = newSettings.workspaceRootPath ? resolver.resolveFile(newSettings.workspaceRootPath) : undefined; return shallowCleanObject(newSettings) as T; } function resolveBaseSettings<T extends BaseSetting>(settings: T, resolver: WorkspacePathResolver): T { const newSettings = { ...settings }; newSettings.dictionaryDefinitions = resolveDictionaryPathReferences(newSettings.dictionaryDefinitions, resolver); return shallowCleanObject(newSettings); } function resolveCustomAndBaseSettings<T extends CSpellUserSettings>(settings: T, resolver: WorkspacePathResolver): T { const newSettings = resolveBaseSettings(settings, resolver); return newSettings; } function resolveImportsToWorkspace(imports: CSpellUserSettings['import'], resolver: WorkspacePathResolver): CSpellUserSettings['import'] { if (!imports) return imports; const toImport = typeof imports === 'string' ? [imports] : imports; return toImport.map(resolver.resolveFile); } function resolveGlobArray(globs: Glob[] | undefined, resolver: WorkspaceGlobResolverFn): undefined | Glob[] { if (!globs) return globs; return globs.map(resolver); } interface PathRef { path?: string | undefined; } function resolveDictionaryPathReferences<T extends PathRef>(dictDefs: T[], resolver: WorkspacePathResolver): T[]; function resolveDictionaryPathReferences<T extends PathRef>(dictDefs: T[] | undefined, resolver: WorkspacePathResolver): T[] | undefined; function resolveDictionaryPathReferences<T extends PathRef>(dictDefs: T[] | undefined, resolver: WorkspacePathResolver): T[] | undefined { if (!dictDefs) return dictDefs; return dictDefs.map((def) => (def.path ? { ...def, path: resolver.resolveFile(def.path) } : def)); } function resolveLanguageSettings( langSettings: CSpellUserSettings['languageSettings'], resolver: WorkspacePathResolver ): CSpellUserSettings['languageSettings'] { if (!langSettings) return langSettings; return langSettings.map((langSetting) => { return shallowCleanObject({ ...resolveBaseSettings(langSetting, resolver) }); }); } function resolveOverrides(settings: CSpellUserSettings, resolver: WorkspacePathResolver): CSpellUserSettings['overrides'] { const { overrides } = settings; if (!overrides) return overrides; const resolveGlob = resolver.resolveGlob(settings.globRoot); function resolve(glob: Glob | Glob[]) { if (!glob) return glob; return Array.isArray(glob) ? glob.map(resolveGlob) : resolveGlob(glob); } return overrides.map((src) => { const dest = { ...resolveCoreSettings(src, resolver) }; dest.filename = resolve(dest.filename); return shallowCleanObject(dest); }); } function shallowCleanObject<T>(obj: T): T { if (typeof obj !== 'object') return obj; const objMap = obj as { [key: string]: any }; for (const key of Object.keys(objMap)) { if (objMap[key] === undefined) { delete objMap[key]; } } return obj; } export const debugExports = { shallowCleanObject, };
the_stack
import { Component, ViewChild } from '@angular/core'; import { ComponentFixture, TestBed, waitForAsync } from '@angular/core/testing'; import { FormsModule, ReactiveFormsModule } from '@angular/forms'; import { MatIconModule } from '@angular/material/icon'; import { MatSelectModule } from '@angular/material/select'; import { NoopAnimationsModule } from '@angular/platform-browser/animations'; import { DropdownOption, FilterDropdownComponent } from './filter-dropdown.component'; /** * Wrapper component that allows passing parameters via data binding, so we can * accurately test the behavior in ngOnChanges, which doesn't get triggered by * directly setting variables. */ @Component({ selector: 'app-dropdown-test', template: ` <app-filter-dropdown [options]="options" [selectFirstOption]="selectFirstOption" [multi]="multi" [selectAll]="selectAll" (selectedOptionsChange)="incrementSelectedOptionsChangeEventCount()" (customOptionSelected)="incrementCustomOptionCount()" > </app-filter-dropdown> `, }) class DropdownTestComponent { @ViewChild(FilterDropdownComponent) dropdownComponent!: FilterDropdownComponent; options: DropdownOption[] = []; selectFirstOption = false; multi = false; selectAll = false; customOptionEventCount = 0; selectedOptionsChangeEventCount = 0; incrementCustomOptionCount() { this.customOptionEventCount++; } incrementSelectedOptionsChangeEventCount() { this.selectedOptionsChangeEventCount++; } } describe('FilterDropdownComponent', () => { let testComponent: DropdownTestComponent; let component: FilterDropdownComponent; let fixture: ComponentFixture<DropdownTestComponent>; beforeEach(waitForAsync(() => { TestBed.configureTestingModule({ declarations: [DropdownTestComponent, FilterDropdownComponent], imports: [ FormsModule, MatIconModule, MatSelectModule, NoopAnimationsModule, ReactiveFormsModule, ], }).compileComponents(); })); function createComponent( options?: DropdownOption[], selectFirstOption?: boolean, multiple?: boolean, selectAll?: boolean ) { fixture = TestBed.createComponent(DropdownTestComponent); testComponent = fixture.componentInstance; if (options !== undefined) { testComponent.options = options; } if (selectFirstOption !== undefined) { testComponent.selectFirstOption = selectFirstOption; } if (multiple !== undefined) { testComponent.multi = multiple; } if (selectAll !== undefined) { testComponent.selectAll = selectAll; } fixture.detectChanges(); component = testComponent.dropdownComponent; } it('should create', () => { createComponent(); expect(component).toBeTruthy(); }); it('Emits event for custom option selected', async () => { const options = [ { displayText: 'puppies' }, { displayText: 'kittens' }, { displayText: 'bunnies' }, { displayText: 'your favorite animal', customOption: true }, { displayText: 'dragons' }, ]; createComponent(options, true); fixture.detectChanges(); expect(component.selectedOption.value).toEqual(options[0]); expect(testComponent.customOptionEventCount).toEqual(0); component.selectForTesting.open(); fixture.detectChanges(); // Wait for everything to update before continuing, otherwise the open event // will not fire with the correct value. await fixture.whenStable(); fixture.detectChanges(); component.selectedOption.setValue(component.options[3]); fixture.detectChanges(); component.selectForTesting.close(); fixture.detectChanges(); await fixture.whenStable(); fixture.detectChanges(); expect(testComponent.customOptionEventCount).toEqual(1); // Select the custom option again, and make sure the event still fires. component.selectForTesting.open(); fixture.detectChanges(); await fixture.whenStable(); fixture.detectChanges(); component.selectedOption.setValue(component.options[3]); fixture.detectChanges(); component.selectForTesting.close(); fixture.detectChanges(); await fixture.whenStable(); fixture.detectChanges(); expect(testComponent.customOptionEventCount).toEqual(2); }); it('Initializes with selectFirstOption = true', () => { const options = [ { displayText: 'puppies' }, { displayText: 'kittens' }, { displayText: 'bunnies' }, { displayText: 'your favorite animal', customOption: true }, { displayText: 'dragons' }, ]; createComponent(options, true); expect(component.selectedOption.value).toEqual(options[0]); }); it('Initializes with setFirstOption = false', () => { const options = [ { displayText: 'puppies' }, { displayText: 'kittens' }, { displayText: 'bunnies' }, { displayText: 'your favorite animal', customOption: true }, { displayText: 'dragons' }, ]; createComponent(options, false); expect(component.selectedOption.value).toEqual(null); }); it('Multiple select: Initializes with selectFirstOption = true', () => { const options = [ { displayText: 'puppies' }, { displayText: 'kittens' }, { displayText: 'bunnies' }, { displayText: 'your favorite animal', customOption: true }, { displayText: 'dragons' }, ]; createComponent(options, true, true); expect(component.selectedOptions.value).toEqual([options[0]]); }); it('Multiple select: Initializes with setFirstOption = false', () => { const options = [ { displayText: 'puppies' }, { displayText: 'kittens' }, { displayText: 'bunnies' }, { displayText: 'your favorite animal', customOption: true }, { displayText: 'dragons' }, ]; createComponent(options, false, true); expect(component.selectedOptions.value).toEqual([]); }); it('Multiple select: Initializes with selectAll = true', () => { const options = [ { displayText: 'puppies' }, { displayText: 'kittens' }, { displayText: 'bunnies' }, { displayText: 'your favorite animal', customOption: true }, { displayText: 'dragons' }, ]; createComponent(options, false, true, true); expect(component.selectedOptions.value).toEqual([ options[0], options[1], options[2], options[4], ]); }); it('Multiple select: Initializes with selectAll = true overrides selectFirstOption = true', () => { const options = [ { displayText: 'puppies' }, { displayText: 'kittens' }, { displayText: 'bunnies' }, { displayText: 'your favorite animal', customOption: true }, { displayText: 'dragons' }, ]; createComponent(options, true, true, true); expect(component.selectedOptions.value).toEqual([ options[0], options[1], options[2], options[4], ]); }); it('Multiple select: Deselects non-custom option when custom option selected', async () => { const options = [ { displayText: 'puppies' }, { displayText: 'kittens' }, { displayText: 'bunnies' }, { displayText: 'your favorite animal', customOption: true }, { displayText: 'dragons' }, ]; createComponent(options, true, true); // Check that the first option is selected. expect(component.selectedOptions.value).toEqual([options[0]]); // Set values to the already selected first option plus the custom option to // mimic clicking on the custom option in addition to the first option in a // multi select. component.selectedOptions.setValue([ component.options[0], component.options[3], ]); // Only the custom option should be selected. expect(component.selectedOptions.value).toEqual([component.options[3]]); }); it('Multiple select: Deselects custom option when non custom option selected', async () => { const options = [ { displayText: 'puppies' }, { displayText: 'kittens' }, { displayText: 'bunnies' }, { displayText: 'your favorite animal', customOption: true }, { displayText: 'dragons' }, ]; createComponent(options, false, true); fixture.detectChanges(); expect(component.selectedOptions.value).toEqual([]); // Set values to the custom option. component.selectedOptions.setValue([component.options[3]]); // The custom option should be selected. expect(component.selectedOptions.value).toEqual([component.options[3]]); // Set values to the already selected custom option plus the first option to // mimic clicking on the first option in addition to the custom option in a // multi select. component.selectedOptions.setValue([ component.options[0], component.options[3], ]); // Only the first option should be selected. expect(component.selectedOptions.value).toEqual([component.options[0]]); }); it('Multiple select: Allows selecting multiple non custom options', async () => { const options = [ { displayText: 'puppies' }, { displayText: 'kittens' }, { displayText: 'bunnies' }, { displayText: 'your favorite animal', customOption: true }, { displayText: 'dragons' }, ]; createComponent(options, true, true); fixture.detectChanges(); // Check that the first option is selected. expect(component.selectedOptions.value).toEqual([options[0]]); // Set values to the already selected first option plus another option to // mimic clicking on another option in addition to the first option in a // multi select. component.selectedOptions.setValue([ component.options[0], component.options[1], ]); fixture.detectChanges(); // Check that the first two options are selected. expect(component.selectedOptions.value).toEqual([ component.options[0], component.options[1], ]); }); it('Multiple select: Emits event for custom option selected', async () => { const options = [ { displayText: 'puppies' }, { displayText: 'kittens' }, { displayText: 'bunnies' }, { displayText: 'your favorite animal', customOption: true }, { displayText: 'dragons' }, ]; createComponent(options, true, true); fixture.detectChanges(); // Check that the first option is selected. expect(component.selectedOptions.value).toEqual([options[0]]); expect(testComponent.customOptionEventCount).toEqual(0); // Set values to the already selected option plus the custom option to mimic // clicking on the custom option in addition to the first option in a multi // select. component.selectedOptions.setValue([ component.options[0], component.options[3], ]); fixture.detectChanges(); // Only the custom option should be selected. expect(component.selectedOptions.value).toEqual([component.options[3]]); expect(testComponent.customOptionEventCount).toEqual(1); // Select the custom option again, and make sure the event still fires. component.selectedOptions.setValue([component.options[3]]); expect(testComponent.customOptionEventCount).toEqual(2); }); it('Multiple select: Emits event for non custom option selected', async () => { const options = [ { displayText: 'puppies' }, { displayText: 'kittens' }, { displayText: 'bunnies' }, { displayText: 'your favorite animal', customOption: true }, { displayText: 'dragons' }, ]; createComponent(options, true, true); fixture.detectChanges(); // Check that the first option is selected. expect(component.selectedOptions.value).toEqual([options[0]]); // The initial selection for selectFirstOption = true counts as one event. expect(testComponent.selectedOptionsChangeEventCount).toEqual(1); // Set values to the already selected option plus the custom option to mimic // clicking on the custom option in addition to the first option in a multi // select. component.selectedOptions.setValue([ component.options[0], component.options[3], ]); fixture.detectChanges(); // Only the custom option should be selected. expect(component.selectedOptions.value).toEqual([component.options[3]]); // No additional events from the custom option. expect(testComponent.selectedOptionsChangeEventCount).toEqual(1); // Select a non-custom option. component.selectedOptions.setValue([component.options[0]]); expect(component.selectedOptions.value).toEqual([component.options[0]]); expect(testComponent.selectedOptionsChangeEventCount).toEqual(2); // Select another non-custom option. component.selectedOptions.setValue([ component.options[0], component.options[1], ]); expect(component.selectedOptions.value).toEqual([ component.options[0], component.options[1], ]); expect(testComponent.selectedOptionsChangeEventCount).toEqual(3); }); });
the_stack
namespace com.keyman.osk { type OSKRect = {'left'?: number, 'top'?: number, 'width'?: number, 'height'?: number, 'nosize'?: boolean, 'nomove'?: boolean}; type OSKPos = {'left'?: number, 'top'?: number}; export class FloatingOSKView extends OSKView { readonly desktopLayout: layouts.TargetedFloatLayout; // OSK positioning fields userPositioned: boolean = false; specifiedPosition: boolean = false; x: number; y: number; noDrag: boolean = false; dfltX: string; dfltY: string; // Key code definition aliases for legacy keyboards (They expect window['keyman']['osk'].___) modifierCodes = text.Codes.modifierCodes; modifierBitmasks = text.Codes.modifierBitmasks; stateBitmasks = text.Codes.stateBitmasks; keyCodes = text.Codes.keyCodes; public constructor(modeledDevice: utils.DeviceSpec) { super(modeledDevice); document.body.appendChild(this._Box); this.loadCookie(); // Add header element to OSK only for desktop browsers const layout = this.desktopLayout = new layouts.TargetedFloatLayout(); this.headerView = layout.titleBar; layout.titleBar.attachHandlers(this); } /** * Function _Unload * Scope Private * Description Clears OSK variables prior to exit (JMD 1.9.1 - relocation of local variables 3/9/10) */ _Unload() { this.keyboardView = null; this.bannerView = null; this._Box = null; } protected setBoxStyling() { const s = this._Box.style; s.zIndex = '9999'; s.display = 'none'; s.width = 'auto'; s.position = 'absolute'; } protected postKeyboardLoad() { this._Visible = false; // I3363 (Build 301) this._Box.onmouseover = this._VKbdMouseOver; this._Box.onmouseout = this._VKbdMouseOut; // Add header element to OSK only for desktop browsers const layout = this.desktopLayout; layout.attachToView(this); this.desktopLayout.titleBar.setTitleFromKeyboard(this.activeKeyboard); if(this.vkbd) { this.footerView = layout.resizeBar; this._Box.appendChild(this.footerView.element); } else { if(this.footerView) { this._Box.removeChild(this.footerView.element); } this.footerView = null; } this.loadCookie(); this.setNeedsLayout(); if(this.displayIfActive) { this.present(); } } /** * Function restorePosition * Scope Public * @param {boolean?} keepDefaultPosition If true, does not reset the default x,y set by `setRect`. * If false or omitted, resets the default x,y as well. * Description Move OSK back to default position, floating under active input element */ ['restorePosition']: (keepDefaultPosition?: boolean) => void = function(this: FloatingOSKView, keepDefaultPosition?: boolean) { let isVisible = this._Visible; if(isVisible && this.activeTarget instanceof dom.targets.OutputTarget) { this.activeTarget?.focus(); // I2036 - OSK does not unpin to correct location } this.loadCookie(); this.userPositioned=false; if(!keepDefaultPosition) { delete this.dfltX; delete this.dfltY; } this.saveCookie(); if(isVisible) { this.present(); } this.doResizeMove(); //allow the UI to respond to OSK movements this.desktopLayout.titleBar.showPin(false); }.bind(this); /** * Function enabled * Scope Public * @return {boolean|number} True if KMW OSK enabled * Description Test if KMW OSK is enabled */ ['isEnabled'](): boolean { return this.displayIfActive; } /** * Function isVisible * Scope Public * @return {boolean|number} True if KMW OSK visible * Description Test if KMW OSK is actually visible * Note that this will usually return false after any UI event that results in (temporary) loss of input focus */ ['isVisible'](): boolean { return this._Visible; } /** * Function _VKbdMouseOver * Scope Private * @param {Object} e event * Description Activate the KMW UI on mouse over */ private _VKbdMouseOver = function(this: AnchoredOSKView, e) { com.keyman.singleton.uiManager.setActivatingUI(true); }.bind(this); /** * Function _VKbdMouseOut * Scope Private * @param {Object} e event * Description Cancel activation of KMW UI on mouse out */ private _VKbdMouseOut = function(this: AnchoredOSKView, e) { com.keyman.singleton.uiManager.setActivatingUI(false); }.bind(this); /** * Save size, position, font size and visibility of OSK */ saveCookie() { let util = com.keyman.singleton.util; var c = util.loadCookie('KeymanWeb_OnScreenKeyboard'); var p = this.getPos(); c['visible'] = this.displayIfActive ? 1 : 0; c['userSet'] = this.userPositioned ? 1 : 0; c['left'] = p.left; c['top'] = p.top; c['_version'] = utils.Version.CURRENT.toString(); if(this.vkbd) { c['width'] = this.width.val; c['height'] = this.height.val; } util.saveCookie('KeymanWeb_OnScreenKeyboard',c); } /** * Restore size, position, font size and visibility of desktop OSK * * @return {boolean} */ loadCookie(): void { let util = com.keyman.singleton.util; var c = util.loadCookie('KeymanWeb_OnScreenKeyboard'); this.displayIfActive = util.toNumber(c['visible'], 1) == 1; this.userPositioned = util.toNumber(c['userSet'], 0) == 1; this.x = util.toNumber(c['left'],-1); this.y = util.toNumber(c['top'],-1); let cookieVersionString = c['_version']; // Restore OSK size - font size now fixed in relation to OSK height, unless overridden (in em) by keyboard let dfltWidth=0.3*screen.width; let dfltHeight=0.15*screen.height; //if(util.toNumber(c['width'],0) == 0) dfltWidth=0.5*screen.width; let newWidth = parseInt(c['width'], 10); let newHeight = parseInt(c['height'], 10); let isNewCookie = isNaN(newHeight); newWidth = isNaN(newWidth) ? dfltWidth : newWidth; newHeight = isNaN(newHeight) ? dfltHeight : newHeight; // Limit the OSK dimensions to reasonable values if(newWidth < 0.2*screen.width) { newWidth = 0.2*screen.width; } if(newHeight < 0.1*screen.height) { newHeight = 0.1*screen.height; } if(newWidth > 0.9*screen.width) { newWidth=0.9*screen.width; } if(newHeight > 0.5*screen.height) { newHeight=0.5*screen.height; } // if(!cookieVersionString) - this component was not tracked until 15.0. // Before that point, the OSK's title bar and resize bar heights were not included // in the OSK's cookie-persisted height. if(isNewCookie || !cookieVersionString) { // Adds some space to account for the OSK's header and footer, should they exist. if(this.headerView && this.headerView.layoutHeight.absolute) { newHeight += this.headerView.layoutHeight.val; } if(this.footerView && this.footerView.layoutHeight.absolute) { newHeight += this.footerView.layoutHeight.val; } } this.setSize(newWidth, newHeight); // and OSK position if user located if(this.x == -1 || this.y == -1 || (!this._Box)) { this.userPositioned = false; } if(this.x < window.pageXOffset-0.8*newWidth) { this.x=window.pageXOffset-0.8*newWidth; } if(this.y < 0) { this.x=-1; this.y=-1; this.userPositioned=false; } if(this.userPositioned && this._Box) { this.setPos({'left': this.x, 'top': this.y}); } } /** * Get the wanted height of the OSK for touch devices (does not include banner height) * @return {number} height in pixels **/ getDefaultKeyboardHeight(): number { let keymanweb = com.keyman.singleton; let device = keymanweb.util.device; // KeymanTouch - get OSK height from device if(typeof(keymanweb['getOskHeight']) == 'function') { return keymanweb['getOskHeight'](); } var oskHeightLandscapeView=Math.floor(Math.min(screen.availHeight,screen.availWidth)/2), height=oskHeightLandscapeView; if(device.formFactor == 'phone') { var sx=Math.min(screen.height,screen.width), sy=Math.max(screen.height,screen.width); if(keymanweb.util.portraitView()) height=Math.floor(Math.max(screen.availHeight,screen.availWidth)/3); else height=height*(sy/sx)/1.6; //adjust for aspect ratio, increase slightly for iPhone 5 } // Correct for viewport scaling (iOS - Android 4.2 does not want this, at least on Galaxy Tab 3)) if(device.OS == 'iOS') { height=height/keymanweb.util.getViewportScale(); } return height; } /** * Get the wanted width of the OSK for touch devices * * @return {number} height in pixels **/ getDefaultWidth(): number { let keymanweb = com.keyman.singleton; let device = keymanweb.util.device; // KeymanTouch - get OSK height from device if(typeof(keymanweb['getOskWidth']) == 'function') { return keymanweb['getOskWidth'](); } var width: number; if(device.OS == 'iOS') { // iOS does not interchange these values when the orientation changes! //width = util.portraitView() ? screen.width : screen.height; width = window.innerWidth; } else if(device.OS == 'Android') { try { width=document.documentElement.clientWidth; } catch(ex) { width=screen.availWidth; } } else { width=screen.width; } return width; } /** * Allow UI to update OSK position and properties * * @param {Object=} p object with coordinates and userdefined flag * */ doResizeMove(p?) { return com.keyman.singleton.util.callEvent('osk.resizemove',p); } /** * Allow the UI or page to set the position and size of the OSK * and (optionally) override user repositioning or sizing * * @param {Object.<string,number>} p Array object with position and size of OSK container **/ ['setRect'](p: OSKRect) { let util = com.keyman.singleton.util; if(this._Box == null || util.device.formFactor != 'desktop') { return; } var b = this._Box, bs = b.style; if('left' in p) { this.x = p['left'] - dom.Utils.getAbsoluteX(b) + b.offsetLeft; bs.left= this.x + 'px'; this.dfltX=bs.left; } if('top' in p) { this.y = p['top'] - dom.Utils.getAbsoluteY(b) + b.offsetTop; bs.top = this.y + 'px'; this.dfltY=bs.top; } //Do not allow user resizing for non-standard keyboards (e.g. EuroLatin) if(this.vkbd != null) { var d=this.vkbd.kbdDiv, ds=d.style; // Set width, but limit to reasonable value if('width' in p) { var w=(p['width']-(b.offsetWidth-d.offsetWidth)); if(w < 0.2*screen.width) { w=0.2*screen.width; } if(w > 0.9*screen.width) { w=0.9*screen.width; } ds.width=w+'px'; // Use of the `computed` variant is here temporary. // Shouldn't use `setSize` for this in the long-term. this.setSize(w, this.computedHeight, true); } // Set height, but limit to reasonable value // This sets the default font size for the OSK in px, but that // can be modified at the key text level by setting // the font size in em in the kmw-key-text class if('height' in p) { var h=(p['height']-(b.offsetHeight-d.offsetHeight)); if(h < 0.1*screen.height) { h=0.1*screen.height; } if(h > 0.5*screen.height) { h=0.5*screen.height; } ds.height=h+'px'; ds.fontSize=(h/8)+'px'; // Use of the `computed` variant is here temporary. // Shouldn't use `setSize` for this in the long-term. this.setSize(this.computedWidth, h, true); } // Fix or release user resizing if('nosize' in p) { this.desktopLayout.resizingEnabled = !p['nosize']; } } // Fix or release user dragging if('nomove' in p) { this.noDrag=p['nomove']; this.desktopLayout.movementEnabled = !this.noDrag; } // Save the user-defined OSK size this.saveCookie(); } /** * Get position of OSK window * * @return {Object.<string,number>} Array object with OSK window position **/ getPos(): OSKPos { var Lkbd=this._Box, p={ left: this._Visible ? Lkbd.offsetLeft : this.x, top: this._Visible ? Lkbd.offsetTop : this.y }; return p; } /** * Function setPos * Scope Private * @param {Object.<string,number>} p Array object with OSK left, top * Description Set position of OSK window, but limit to screen, and ignore if a touch input device */ ['setPos'](p: OSKPos) { if(typeof(this._Box) == 'undefined') { return; // I3363 (Build 301) } if(this.userPositioned) { var Px=p['left'], Py=p['top']; if(typeof(Px) != 'undefined') { if(Px < -0.8*this._Box.offsetWidth) { Px = -0.8*this._Box.offsetWidth; } if(this.userPositioned) { this._Box.style.left=Px+'px'; this.x = Px; } } // May not be needed - vertical positioning is handled differently and defaults to input field if off screen if(typeof(Py) != 'undefined') { if(Py < 0) { Py = 0; } if(this.userPositioned) { this._Box.style.top=Py+'px'; this.y = Py; } } } if(this.desktopLayout) { this.desktopLayout.titleBar.showPin(this.userPositioned); } } public setDisplayPositioning() { var Ls = this._Box.style; Ls.position='absolute'; Ls.display='block'; //Ls.visibility='visible'; Ls.left='0px'; if(this.specifiedPosition || this.userPositioned) { Ls.left = this.x+'px'; Ls.top = this.y+'px'; } else { let el: HTMLElement = null; if(this.activeTarget instanceof dom.targets.OutputTarget) { el = this.activeTarget?.getElement(); } if(this.dfltX) { Ls.left=this.dfltX; } else if(typeof el != 'undefined' && el != null) { Ls.left=dom.Utils.getAbsoluteX(el) + 'px'; } if(this.dfltY) { Ls.top=this.dfltY; } else if(typeof el != 'undefined' && el != null) { Ls.top=(dom.Utils.getAbsoluteY(el) + el.offsetHeight)+'px'; } } // Unset the flag, keeping 'specified position' specific to single // presentAtPosition calls. this.specifiedPosition = false; } /** * Display KMW OSK at specified position (returns nothing) * * @param {number=} Px x-coordinate for OSK rectangle * @param {number=} Py y-coordinate for OSK rectangle */ presentAtPosition(Px?: number, Py?: number) { if(!this.mayShow()) { return; } this.specifiedPosition = Px >= 0 || Py >= 0; //probably never happens, legacy support only if(this.specifiedPosition) { this.x = Px; this.y = Py; } // Combines the two paths with set positioning. this.specifiedPosition = this.specifiedPosition || this.userPositioned; this.present(); } present() { if(!this.mayShow()) { return; } this.desktopLayout.titleBar.showPin(this.userPositioned); super.present(); // Allow desktop UI to execute code when showing the OSK var Lpos={}; Lpos['x']=this._Box.offsetLeft; Lpos['y']=this._Box.offsetTop; Lpos['userLocated']=this.userPositioned; this.doShow(Lpos); } public startHide(hiddenByUser: boolean) { super.startHide(hiddenByUser); if(hiddenByUser) { this.saveCookie(); // Save current OSK state, size and position (desktop only) } } /** * Function userPositioned * Scope Public * @return {(boolean|number)} true if user located * Description Test if OSK window has been repositioned by user */ ['userLocated']() { return this.userPositioned; } } }
the_stack
import { IComponent } from '../types'; import { Vnode, TAttributes } from "../vnode"; import { isFromVM, buildProps, argumentsIsReady, getVMFunctionArguments, getValueByValue, getVMVal, getVMFunction, setVMVal, valueIsReady } from './utils'; import { ChangeDetectionStrategy } from '../component'; import { buildPipeScope } from './compiler-utils'; /** * compile util for nv-repeat DOM * * @export * @class CompileRepeatUtil */ export class CompileRepeatUtil { public fragment?: Vnode[]; /** * Creates an instance of CompileRepeatUtil. * * @param {Vnode[]} [fragment] * @memberof CompileRepeatUtil */ constructor(fragment?: Vnode[]) { this.fragment = fragment; } /** * get value by repeat value * * @param {*} val * @param {string} exp * @param {string} key * @returns {*} * @memberof CompileRepeatUtil */ public _getVMRepeatVal(val: any, exp: string, key: string): any { let value: any; const valueList = exp.replace(/\(.*\)/, '').split('.'); valueList.forEach((v, index) => { if (v === key && index === 0) { value = val; return; } value = value[v]; }); return value; } /** * set value by key and anthor value * * @param {*} vm * @param {string} exp * @param {string} key * @param {*} setValue * @returns {*} * @memberof CompileRepeatUtil */ public _setValueByValue(vm: any, exp: string, key: string, setValue: any): any { const valueList = exp.replace(/\(.*\)/, '').split('.'); let value = vm; let lastKey; valueList.forEach((v, index) => { if (v === key && index === 0) return lastKey = v; if (index < valueList.length) lastKey = v; if (index < valueList.length - 1) value = value[v]; }); if (lastKey) value[lastKey] = setValue; } /** * bind handler for nv irective * * @param {Vnode} vnode * @param {string} [key] * @param {string} [dir] * @param {string} [exp] * @param {number} [index] * @param {*} [vm] * @param {*} [watchValue] * @memberof CompileRepeatUtil */ public bind(vnode: Vnode, key?: string, dir?: string, exp?: string, index?: number, vm?: any, watchValue?: any, val?: any): void { const repeatValue = vnode.repeatData[key]; let value; const needCompileStringList = exp.split('|').map(v => v.trim()); const needCompileValue = needCompileStringList[0]; if (/^.*\(.*\)$/.test(needCompileValue)) { if (dir === 'model') throw new Error(`directive: nv-model can't use ${needCompileValue} as value`); // if Function() need function return value const fn = getVMFunction(vm, needCompileValue); const argsList = getVMFunctionArguments(vm, needCompileValue, vnode); value = this.pipeHandler(exp, fn.apply(vm, argsList), needCompileStringList, vm, vnode); // repeat value } else if (needCompileValue === key || needCompileValue.indexOf(`${key}.`) === 0) { value = this.pipeHandler(exp, this._getVMRepeatVal(repeatValue, needCompileValue, key), needCompileStringList, vm, vnode); } // normal value else if (isFromVM(vm, needCompileValue)) value = this.pipeHandler(exp, getVMVal(vm, needCompileValue), needCompileStringList, vm, vnode); else if (needCompileValue === '$index') value = index; else if (/^\'.*\'$/.test(needCompileValue)) value = this.pipeHandler(exp, needCompileValue.match(/^\'(.*)\'$/)[1], needCompileStringList, vm, vnode); else if (/^\".*\"$/.test(needCompileValue)) value = this.pipeHandler(exp, needCompileValue.match(/^\"(.*)\"$/)[1], needCompileStringList, vm, vnode); else if (!/^\'.*\'$/.test(needCompileValue) && !/^\".*\"$/.test(needCompileValue) && /(^[-,+]?\d+$)|(^[-, +]?\d+\.\d+$)/g.test(needCompileValue)) value = this.pipeHandler(exp, Number(needCompileValue), needCompileStringList, vm, vnode); else if (needCompileValue === 'true' || needCompileValue === 'false') value = this.pipeHandler(exp, (needCompileValue === 'true'), needCompileStringList, vm, vnode); else if (needCompileValue === 'null') value = this.pipeHandler(exp, null, needCompileStringList, vm, vnode); else if (needCompileValue === 'undefined') value = this.pipeHandler(exp, undefined, needCompileStringList, vm, vnode); else if (vnode.repeatData) { Object.keys(vnode.repeatData).forEach(data => { if (needCompileValue === data || needCompileValue.indexOf(`${data}.`) === 0) value = this.pipeHandler(exp, getValueByValue(vnode.repeatData[data], needCompileValue, data), needCompileStringList, vm, vnode); }); } else throw new Error(`directive: nv-${dir} can't use recognize this value ${needCompileValue}`); if (!vnode.childNodes || vnode.childNodes.length === 0) this.templateUpdater(vnode, repeatValue, key, vm); switch (dir) { case 'model': { let watchData; // 如果观察数据来自当前要渲染的重复key if (needCompileValue === key || needCompileValue.indexOf(`${key}.`) === 0) { watchData = watchValue; this.modelUpdater(vnode, value, needCompileValue, key, index, watchData, vm); // 如果观察数据来自当前要组件实例 } else if (isFromVM(vm, needCompileValue)) { watchData = this.pipeHandler(exp, getVMVal(vm, needCompileValue), needCompileStringList, vm, vnode); this.modelUpdater(vnode, value, needCompileValue, key, index, watchData, vm); } break; } case 'text': { this.textUpdater(vnode, value); break; } case 'if': { this.ifUpdater(vnode, value, vm); break; } case 'if-not': { this.ifNotUpdater(vnode, value, vm); break; } case 'class': { this.classUpdater(vnode, value); break; } case 'key': { this.keyUpdater(vnode, value); break; } case 'value': { this.valueUpdater(vnode, value); break; } default: this.commonUpdater(vnode, value, dir); } } /** * update text for {{}} * * @param {Vnode} vnode * @param {*} [val] * @param {string} [key] * @param {*} [vm] * @memberof CompileRepeatUtil */ public templateUpdater(vnode: Vnode, val?: any, key?: string, vm?: any): void { const text = vnode.nodeValue; const reg = /\{\{(.*)\}\}/g; if (reg.test(text)) { const textList = text.match(/(\{\{[^\{\}]+?\}\})/g); if (textList && textList.length > 0) { for (let i = 0; i < textList.length; i++) { const exp = textList[i].replace('{{', '').replace('}}', ''); const needCompileStringList = exp.split('|').map(v => v.trim()); const needCompileValue = needCompileStringList[0]; if (/^.*\(.*\)$/.test(needCompileValue) && argumentsIsReady(needCompileValue, vnode, vm)) { const fn = getVMFunction(vm, needCompileValue); const argsList = getVMFunctionArguments(vm, needCompileValue, vnode); const fromVmValue = this.pipeHandler(textList[i], fn.apply(vm, argsList), needCompileStringList, vm, vnode); vnode.nodeValue = vnode.nodeValue.replace(textList[i], fromVmValue); } else if (needCompileValue === key || needCompileValue.indexOf(`${key}.`) === 0) { const fromVmValue = this.pipeHandler(textList[i], this._getVMRepeatVal(val, needCompileValue, key), needCompileStringList, vm, vnode); vnode.nodeValue = vnode.nodeValue.replace(textList[i], fromVmValue); } else if (isFromVM(vm, needCompileValue)) { const fromVmValue = this.pipeHandler(textList[i], getVMVal(vm, needCompileValue), needCompileStringList, vm, vnode); vnode.nodeValue = vnode.nodeValue.replace(textList[i], fromVmValue); } else if (vnode.repeatData) { Object.keys(vnode.repeatData).forEach(data => { if (needCompileValue === data || needCompileValue.indexOf(`${data}.`) === 0) { const fromVmValue = this.pipeHandler(textList[i], getValueByValue(vnode.repeatData[data], needCompileValue, data), needCompileStringList, vm, vnode); vnode.nodeValue = vnode.nodeValue.replace(textList[i], fromVmValue); } }); } else throw new Error(`directive: {{${needCompileValue}}} can\'t use recognize ${needCompileValue}`); } } } } /** * update value of input for nv-model * * @param {Vnode} vnode * @param {*} value * @param {string} exp * @param {string} key * @param {number} index * @param {*} watchData * @param {*} vm * @memberof CompileRepeatUtil */ public modelUpdater(vnode: Vnode, value: any, exp: string, key: string, index: number, watchData: any, vm: any): void { vnode.value = typeof value === 'undefined' ? '' : value; const findAttribute = vnode.attributes.find(attr => attr.name === 'nv-model'); findAttribute.nvValue = (typeof value === 'undefined' ? '' : value); const utilVm = this; const func = function (event: Event): void { event.preventDefault(); if (isFromVM(vm, exp)) { if ((event.target as HTMLInputElement).value === watchData) return; setVMVal(vm, exp, (event.target as HTMLInputElement).value); } else if (exp === key || exp.indexOf(`${key}.`) === 0) { if (typeof watchData[index] !== 'object') watchData[index] = (event.target as HTMLInputElement).value; if (typeof watchData[index] === 'object') { let vals = getValueByValue(watchData[index], exp, key); vals = (event.target as HTMLInputElement).value; utilVm._setValueByValue(watchData[index], exp, key, vals); } } else throw new Error(`directive: nv-model can\'t use recognize this prop ${exp}`); // OnPush 模式要允许触发更新 if ((vm as IComponent).$nvChangeDetection === ChangeDetectionStrategy.OnPush) { if ((vm as IComponent).nvDoCheck) (vm as IComponent).nvDoCheck(); (vm as IComponent).render(); } }; const sameEventType = vnode.eventTypes.find(_eventType => _eventType.type === 'input'); if (sameEventType) sameEventType.handler = func; if (!sameEventType) vnode.eventTypes.push({ type: 'input', handler: func, token: value, }); } /** * update text for nv-text * * @param {Vnode} vnode * @param {*} value * @returns {void} * @memberof CompileRepeatUtil */ public textUpdater(vnode: Vnode, value: any): void { const findAttribute = vnode.attributes.find(attr => attr.name === 'nv-text'); findAttribute.nvValue = (typeof value === 'undefined' ? '' : value); vnode.nodeValue = typeof value === 'undefined' ? '' : value; if (!vnode.childNodes || (vnode.childNodes && vnode.childNodes.length > 0)) vnode.childNodes = []; vnode.childNodes.push(new Vnode({ type: 'text', nodeValue: typeof value === 'undefined' ? '' : value, parentVnode: vnode, template: typeof value === 'undefined' ? '' : value, voidElement: true, })); vnode.voidElement = true; } /** * remove or show DOM for nv-if * * @param {Vnode} vnode * @param {*} value * @param {*} vm * @memberof CompileRepeatUtil */ public ifUpdater(vnode: Vnode, value: any, vm: any): void { const valueOfBoolean = Boolean(value); if (!valueOfBoolean && vnode.parentVnode.childNodes.indexOf(vnode) !== -1) vnode.parentVnode.childNodes.splice(vnode.parentVnode.childNodes.indexOf(vnode), 1); if (valueOfBoolean) { const findAttribute = vnode.attributes.find(attr => attr.name === 'nv-if'); findAttribute.nvValue = valueOfBoolean; } } /** * remove or show DOM for nv-if-not * * @param {Vnode} vnode * @param {*} value * @param {*} vm * @memberof CompileRepeatUtil */ public ifNotUpdater(vnode: Vnode, value: any, vm: any): void { const valueOfBoolean = !Boolean(value); if (!valueOfBoolean && vnode.parentVnode.childNodes.indexOf(vnode) !== -1) vnode.parentVnode.childNodes.splice(vnode.parentVnode.childNodes.indexOf(vnode), 1); if (valueOfBoolean) { const findAttribute = vnode.attributes.find(attr => attr.name === 'nv-if-not'); findAttribute.nvValue = valueOfBoolean; } } /** * update class for nv-class * * @param {Vnode} vnode * @param {*} value * @returns {void} * @memberof CompileRepeatUtil */ public classUpdater(vnode: Vnode, value: any): void { const findAttribute = vnode.attributes.find(attr => attr.name === 'nv-class'); findAttribute.nvValue = value; } /** * update value of repeat node for nv-key * * @param {Vnode} vnode * @param {*} value * @memberof CompileRepeatUtil */ public keyUpdater(vnode: Vnode, value: any): void { const findAttribute = vnode.attributes.find(attr => attr.name === 'nv-key'); findAttribute.nvValue = value; vnode.key = value; } /** * update value of repeat node for nv-value * * @param {Vnode} vnode * @param {*} value * @memberof CompileRepeatUtil */ public valueUpdater(vnode: Vnode, value: any): void { const findAttribute = vnode.attributes.find(attr => attr.name === 'nv-value'); findAttribute.nvValue = value; vnode.value = value; } /** * commonUpdater for nv directive except repeat model text if class * * @param {Vnode} vnode * @param {*} value * @param {string} dir * @memberof CompileUtil */ public commonUpdater(vnode: Vnode, value: any, dir: string): void { const findAttribute = vnode.attributes.find(attr => attr.name === `nv-${dir}`); findAttribute.nvValue = value; } /** * compile event and build eventType in DOM * * @param {Vnode} vnode * @param {*} vm * @param {string} exp * @param {string} eventName * @param {string} key * @param {*} val * @memberof CompileRepeatUtil */ public eventHandler(vnode: Vnode, vm: any, exp: string, eventName: string, key: string, val: any): void { const eventType = eventName.split(':')[1]; const fn = getVMFunction(vm, exp); const args = exp.match(/\((.*)\)/)[1].replace(/\s+/g, '').split(','); const utilVm = this; const func = function (event: Event): any { const argsList: any[] = []; args.forEach(arg => { if (arg === '') return false; if (arg === '$event') return argsList.push(event); if (arg === '$element') return argsList.push(event.target); if (arg === 'true' || arg === 'false') return argsList.push(arg === 'true'); if (arg === 'null') return argsList.push(null); if (arg === 'undefined') return argsList.push(undefined); if (isFromVM(vm, arg)) return argsList.push(getVMVal(vm, arg)); if (/^\'.*\'$/.test(arg)) return argsList.push(arg.match(/^\'(.*)\'$/)[1]); if (/^\".*\"$/.test(arg)) return argsList.push(arg.match(/^\"(.*)\"$/)[1]); if (!/^\'.*\'$/.test(arg) && !/^\".*\"$/.test(arg) && /(^[-,+]?\d+$)|(^[-, +]?\d+\.\d+$)/.test(arg)) return argsList.push(Number(arg)); if (arg === key || arg.indexOf(`${key}.`) === 0) return argsList.push(utilVm._getVMRepeatVal(val, arg, key)); if (vnode.repeatData) { // $index in this Object.keys(vnode.repeatData).forEach(data => { if (arg === data || arg.indexOf(`${data}.`) === 0) return argsList.push(getValueByValue(vnode.repeatData[data], arg, data)); }); } }); const saveWatchStatus = (vm as IComponent).$watchStatus; if (saveWatchStatus === 'available') (vm as IComponent).$watchStatus = 'pending'; fn.apply(vm, argsList); if (saveWatchStatus === 'available') { (vm as IComponent).$watchStatus = 'available'; if ((vm as IComponent).$isWaitingRender && (vm as IComponent).nvDoCheck) (vm as IComponent).nvDoCheck(); if ((vm as IComponent).$isWaitingRender) { (vm as IComponent).render(); (vm as IComponent).$isWaitingRender = false; } } }; if (eventType && fn) { const sameEventType = vnode.eventTypes.find(_eventType => _eventType.type === eventType); if (sameEventType) { sameEventType.handler = func; sameEventType.token = fn; } if (!sameEventType) vnode.eventTypes.push({ type: eventType, handler: func, token: fn, }); } } /** * handle prop * * @param {Vnode} vnode * @param {*} vm * @param {TAttributes} attr * @param {string} prop * @memberof CompileRepeatUtil */ public propHandler(vnode: Vnode, vm: any, attr: TAttributes): void { const prop = /^\{(.+)\}$/.exec(attr.value); if (prop) { const propValue = prop[1]; let _prop = null; if (/^.*\(.*\)$/.test(propValue)) { const fn = getVMFunction(vm, propValue); const args = propValue.match(/\((.*)\)/)[1].replace(/\s+/g, '').split(','); const argsList: any[] = []; args.forEach(arg => { if (arg === '') return false; if (arg === '$element') return argsList.push(vnode.nativeElement); if (arg === 'true' || arg === 'false') return argsList.push(arg === 'true'); if (arg === 'null') return argsList.push(null); if (arg === 'undefined') return argsList.push(undefined); if (isFromVM(vm, arg)) return argsList.push(getVMVal(vm, arg)); if (/^\'.*\'$/.test(arg)) return argsList.push(arg.match(/^\'(.*)\'$/)[1]); if (/^\".*\"$/.test(arg)) return argsList.push(arg.match(/^\"(.*)\"$/)[1]); if (!/^\'.*\'$/.test(arg) && !/^\".*\"$/.test(arg) && /(^[-,+]?\d+$)|(^[-, +]?\d+\.\d+$)/g.test(arg)) return argsList.push(Number(arg)); if (vnode.repeatData) { // $index in this Object.keys(vnode.repeatData).forEach(data => { if (arg === data || arg.indexOf(`${data}.`) === 0) return argsList.push(getValueByValue(vnode.repeatData[data], arg, data)); }); } }); const value = fn.apply(vm, argsList); attr.nvValue = value; return; } const valueList = propValue.split('.'); const key = valueList[0]; if (isFromVM(vm, propValue)) { _prop = getVMVal(vm, propValue); attr.nvValue = buildProps(_prop, vm); return; } if (vnode.repeatData && vnode.repeatData.hasOwnProperty(key)) { _prop = getValueByValue(vnode.repeatData[key], propValue, key); attr.nvValue = buildProps(_prop, vm); return; } if (/^\'.*\'$/.test(propValue)) { attr.nvValue = propValue.match(/^\'(.*)\'$/)[1]; return; } if (/^\".*\"$/.test(propValue)) { attr.nvValue = propValue.match(/^\"(.*)\"$/)[1]; return; } if (!/^\'.*\'$/.test(propValue) && !/^\".*\"$/.test(propValue) && /(^[-,+]?\d+$)|(^[-, +]?\d+\.\d+$)/.test(propValue)) { attr.nvValue = Number(propValue); return; } if (propValue === 'true' || propValue === 'false') { attr.nvValue = (propValue === 'true'); return; } if (propValue === 'null') { attr.nvValue = null; return; } if (propValue === 'undefined') { attr.nvValue = undefined; return; } } } public pipeHandler(oldExp: string, value: any, needCompileStringList: string[], vm: any, vnode: Vnode): any { let canCompileFlag = true; let fromVmValue = value; if (needCompileStringList.length > 1) { needCompileStringList.forEach((need, index) => { if (index !== 0) { const pipeArgusList: string[] = []; let pipeName = ''; // need: test-pipe: 1:2 分离管道名和参数 need.split(':').forEach((v, i) => { if (i === 0) pipeName = v.trim(); else pipeArgusList.push(v.trim()); }); const argList: any[] = []; pipeArgusList.forEach(pipeArgu => { // 参数没准备好,不允许编译 if (!valueIsReady(pipeArgu, vnode, vm)) { canCompileFlag = false; return; } let pipeArguValue = null; if (isFromVM(vm, pipeArgu)) pipeArguValue = getVMVal(vm, pipeArgu); else if (/^\'.*\'$/.test(pipeArgu)) pipeArguValue = pipeArgu.match(/^\'(.*)\'$/)[1]; else if (/^\".*\"$/.test(pipeArgu)) pipeArguValue = pipeArgu.match(/^\"(.*)\"$/)[1]; else if (!/^\'.*\'$/.test(pipeArgu) && !/^\".*\"$/.test(pipeArgu) && /(^[-,+]?\d+$)|(^[-, +]?\d+\.\d+$)/g.test(pipeArgu)) pipeArguValue = Number(pipeArgu); else if (pipeArgu === 'true' || pipeArgu === 'false') pipeArguValue = (pipeArgu === 'true'); else if (pipeArgu === 'null') pipeArguValue = null; else if (pipeArgu === 'undefined') pipeArguValue = undefined; else if (vnode.repeatData) { Object.keys(vnode.repeatData).forEach(data => { if (pipeArgu === data || pipeArgu.indexOf(`${data}.`) === 0) pipeArguValue = getValueByValue(vnode.repeatData[data], pipeArgu, data); }); } argList.push(pipeArguValue); }) // 如果管道参数可以渲染则获取管道结果 if (canCompileFlag) { // 通过组件中的$declarationMap获取管道实例 const PipeClass = vm.$declarationMap.get(pipeName); // 获取管道实例 const pipeInstance = buildPipeScope(PipeClass, vm.$nativeElement, vm); // 调用管道的transform方法 if (!pipeInstance.transform) throw Error(`Pipe ${pipeName} don't implement the method 'transform'`); fromVmValue = pipeInstance.transform(value, ...argList); } } }); } if (canCompileFlag) return fromVmValue; else return oldExp; } }
the_stack
import { Buffer, ObjectRenderer, Renderer, Geometry, Shader, State, ViewableBuffer } from '@pixi/core'; import { TYPES, DRAW_MODES } from '@pixi/constants'; import { log2, nextPow2 } from '@pixi/utils'; const _instanceID = 0; let _instanceRendererID = 0; /** * {@code InstancedRenderer} is an object-renderer for drawing meshes/shapes/display-objects * that have a common geometry and some "instanced" attributes. * * @class * @extends PIXI.ObjectRenderer */ export class InstancedRenderer extends ObjectRenderer { public renderer: Renderer;// @pixi/core doesn't have types yet :< public readonly instanceRendererID: string; protected _instanceBuilder: Record<string, string>; protected _geometry: Geometry; protected _shader: Shader; protected _state: State; protected _objectBuffer: Array<{[id: string]: string}>; protected _objectCount: number; protected _instanceBuffer: Buffer; protected _instanceBufferHash: number; protected readonly _instanceAttribSizes: Record<string, number>; protected readonly _instanceSize: number; private _aBuffers: ViewableBuffer[] = []; private _instanceAttribViews: Record<string, string> ; /** * @param {PIXI.Renderer} renderer - the WebGL renderer to attach to * @param {PIXI.IInstancedRendererOptions} options - the pipeline description */ constructor(renderer: Renderer, options: IInstancedRendererOptions) { super(renderer); /** * Unique ID for this instance renderer. * * @protected * @readonly * @member {number} */ this.instanceRendererID = `instanceRenderer-${_instanceRendererID++}-ID`; /** * Maps display-object property names holding instanced attribute data to their attribute * names. * * @protected * @member {Object<string, string>} */ this._instanceBuilder = options.instanceBuilder; /** * The reference geometry specifying the "attribute style". * * @protected * @member {PIXI.Geometry} */ this._geometry = options.geometry; /** * The shader used to draw all instances. * * @member {PIXI.Shader} */ this._shader = options.shader; /** * The WebGL state required for using the shader. * * @default PIXI.State.for2d() * @member {PIXI.State} */ this._state = options.state || State.for2d(); /** * Object mapping (instanced) attribute IDs to their sizes in bytes. * * @protected * @readonly * @member {Object<string, number>} */ this._instanceAttribSizes = {}; /** * Object mapping (instanced) attribute IDs to their data type views (i.e. `uint32View`, * `float32View`, `uint8View`, etc. in `PIXI.ViewableBuffer`). * * @protected * @readonly * @member {Object<string, string>} */ this._instanceAttribViews = {}; /** * The bytes used per instance/display-object. * * @protected * @readonly * @member {number} */ this._instanceSize = this._calculateInstanceSizesAndViews(); /** * Buffered display-objects * * @protected * @member {PIXI.DisplayObject[]} */ this._objectBuffer = []; /** * The number of display-objects buffered. This is different from the buffer's capacity * {@code this._objectBuffer.length}. * * @protected * @member {number} */ this._objectCount = 0; // NOTE: _initInstanceBuffer() also clones this._geometry and replaces it. this._initInstanceBuffer(); } /** * @override */ start() { this._objectCount = 0; } /** * @override * @param {PIXI.DisplayObject} displayObject */ render(displayObject: { [id: string]: string }): void { this._objectBuffer[this._objectCount] = displayObject; ++this._objectCount; } /** * Flushes/draws all pending display-objects. * * @override */ flush(): void { const instanceBuilder = this._instanceBuilder; const instanceSize = this._instanceSize; const instanceBuffer = this._getBuffer(this._objectCount * this._instanceSize); // TODO: Optimize this by compiling a function that doesn't loop through each attribute // by rolling the loop for (let i = 0; i < this._objectCount; i++) { let rsize = 0; const object = this._objectBuffer[i]; for (const attribID in this._instanceBuilder) { const attribute = this._geometry.attributes[attribID]; if (!attribute.instance) { continue; } const attribSize = attribute.size; const view = instanceBuffer[this._instanceAttribViews[attribID]]; const size = this._instanceAttribSizes[attribID]; const index = (i * instanceSize + rsize) / size; const prop = instanceBuilder[attribID]; if (attribSize === 1) { view[index] = object[prop]; } else { for (let j = 0; j < attribSize; j++) { view[index + j] = object[prop][j]; } } rsize += size; } } this._instanceBuffer.update(instanceBuffer.rawBinaryData); const renderer = this.renderer; renderer.shader.bind(this._shader); renderer.geometry.bind(this._geometry); renderer.state.set(this._state); renderer.geometry.draw(DRAW_MODES.TRIANGLES, undefined, undefined, this._objectCount); this._objectCount = 0; } /** * Returns a (cached) buffer that can hold {@code size} bytes. * * @param {number} size - required capacity in bytes * @return {ViewableBuffer} - buffer than can hold atleast `size` floats * @private */ protected _getBuffer(size: number): ViewableBuffer { const roundedP2 = nextPow2(Math.ceil(size)); const roundedSizeIndex = log2(roundedP2); const roundedSize = roundedP2; if (this._aBuffers.length <= roundedSizeIndex) { this._aBuffers.length = roundedSizeIndex + 1; } let buffer = this._aBuffers[roundedSize]; if (!buffer) { this._aBuffers[roundedSize] = buffer = new ViewableBuffer(roundedSize); } return buffer; } /** * Returns the no. of bytes used for each instance. * * @private * @returns {number} */ private _calculateInstanceSizesAndViews(): number { let totalSize = 0; for (const attribID in this._geometry.attributes) { const attribute = this._geometry.attributes[attribID]; if (!attribute.instance) { continue; } let typeSize = 0; let view; switch (attribute.type) { case TYPES.UNSIGNED_BYTE: typeSize = 1; view = 'uint8View'; break; case TYPES.UNSIGNED_SHORT: case TYPES.UNSIGNED_SHORT_5_6_5: case TYPES.UNSIGNED_SHORT_4_4_4_4: case TYPES.UNSIGNED_SHORT_5_5_5_1: case TYPES.HALF_FLOAT: typeSize = 2; view = 'uint16View';// TODO: HALF_FLOAT will not work break; case TYPES.FLOAT: typeSize = 4; view = 'float32View'; break; } const byteSize = attribute.size * typeSize; this._instanceAttribViews[attribID] = view; this._instanceAttribSizes[attribID] = byteSize; totalSize += byteSize; } return totalSize; } /** * Replaces {@code this._geometry} with a new geometry such that each instanced attribute * points to the same buffer. Uninstanced attributes refer to their original buffers. */ private _initInstanceBuffer(): void { /** * The instance buffer holds all instanced attributes in an interleaved fashion. * * @member {PIXI.Buffer} */ this._instanceBuffer = new Buffer(); const clonedGeometry = new Geometry(); for (const attribID in this._geometry.attributes) { const attribute = this._geometry.attributes[attribID]; const instance = attribute.instance; console.log(attribID); console.log(this._geometry.buffers[attribute.buffer]); clonedGeometry.addAttribute( attribID, instance ? this._instanceBuffer : this._geometry.buffers[attribute.buffer], attribute.size, attribute.normalized, attribute.type, instance ? attribute.start : undefined, instance ? attribute.stride : undefined, attribute.instance, ); } this._geometry = clonedGeometry; } } export interface IInstancedRendererOptions { instanceBuilder: Record<string, string>; geometry: Geometry; shader: Shader; state?: State; } /** * This options define how display-objects are rendered by the instanced renderer. * * NOTE: * * + Make sure your instanceBuilder is in the order you want attributes to be packed * in the same buffer. Also, make sure that floats are aligned at 4-byte boundaries and * shorts are aligned at 2-byte boundaries. * * + PixiJS Bug: Make sure the first attribute is **not** instanced. * * @memberof PIXI * @interface IInstancedRendererOptions * @property {Object<string, string>} instanceBuilder - an object mapping display-object * properties holding "instance attributes" from their attribute name. * @property {Geometry} geometry - the geometry style used to render the display-objects * @property {Shader} shader - the shader used to render display-objects * @property {State}[state] - the WebGL state used to run the shader * @example * { * instanceBuilder: { * aVertexPosition: '_vertexData' * }, * geometry: new PIXI.Geometry() * .addAttribute('aVertexPosition', null, 2, false, TYPES.FLOAT, 0, 0, false) * .addAttribute('aWorldTransform', null, 2, false, TYPES.FLOAT, 0, 0, true) * shader: new PIXI.Shader(<vertexShaderSrc>, <fragmentShaderSrc>, <uniformData>), * state: PIXI.State.for2d() // that's the default * } */
the_stack
//@ts-check ///<reference path="devkit.d.ts" /> declare namespace DevKit { namespace FormLead { interface Header extends DevKit.Controls.IHeader { /** Select a rating value to indicate the lead's potential to become a customer. */ LeadQualityCode: DevKit.Controls.OptionSet; /** Select the primary marketing source that prompted the lead to contact you. */ LeadSourceCode: DevKit.Controls.OptionSet; /** Owner Id */ OwnerId: DevKit.Controls.Lookup; /** Select the lead's status. */ StatusCode: DevKit.Controls.OptionSet; } interface tab_details_tab_Sections { contact_methods: DevKit.Controls.Section; lead_information: DevKit.Controls.Section; marketing_information: DevKit.Controls.Section; } interface tab_documents_sharepoint_Sections { documents_sharepoint_section: DevKit.Controls.Section; } interface tab_Summary_Sections { BusinessCard: DevKit.Controls.Section; company: DevKit.Controls.Section; Contact: DevKit.Controls.Section; MapSection: DevKit.Controls.Section; RELATED_TAB: DevKit.Controls.Section; SOCIAL_PANE: DevKit.Controls.Section; } interface tab_details_tab extends DevKit.Controls.ITab { Section: tab_details_tab_Sections; } interface tab_documents_sharepoint extends DevKit.Controls.ITab { Section: tab_documents_sharepoint_Sections; } interface tab_Summary extends DevKit.Controls.ITab { Section: tab_Summary_Sections; } interface Tabs { details_tab: tab_details_tab; documents_sharepoint: tab_documents_sharepoint; Summary: tab_Summary; } interface Body { Tab: Tabs; /** Shows the complete primary address. */ Address1_Composite: DevKit.Controls.String; /** Stores Image of the Business Card */ BusinessCard: DevKit.Controls.String; /** Choose the campaign that the lead was generated from to track the effectiveness of marketing campaigns and identify communications received by the lead. */ CampaignId: DevKit.Controls.Lookup; /** Type the name of the company associated with the lead. This becomes the account name when the lead is qualified and converted to a customer account. */ CompanyName: DevKit.Controls.String; /** Type additional information to describe the lead, such as an excerpt from the company's website. */ Description: DevKit.Controls.String; /** Select whether the lead accepts bulk email sent through marketing campaigns or quick campaigns. If Do Not Allow is selected, the lead can be added to marketing lists, but will be excluded from the email. */ DoNotBulkEMail: DevKit.Controls.Boolean; /** Select whether the lead allows direct email sent from Microsoft Dynamics 365. */ DoNotEMail: DevKit.Controls.Boolean; /** Select whether the lead allows phone calls. */ DoNotPhone: DevKit.Controls.Boolean; /** Select whether the lead allows direct mail. */ DoNotPostalMail: DevKit.Controls.Boolean; /** Select whether the lead accepts marketing materials, such as brochures or catalogs. Leads that opt out can be excluded from marketing initiatives. */ DoNotSendMM: DevKit.Controls.Boolean; /** Type the primary email address for the lead. */ EMailAddress1: DevKit.Controls.String; /** Combines and shows the lead's first and last names so the full name can be displayed in views and reports. */ FullName: DevKit.Controls.String; /** Select the primary industry in which the lead's business is focused, for use in marketing segmentation and demographic analysis. */ IndustryCode: DevKit.Controls.OptionSet; /** Type the job title of the primary contact for this lead to make sure the prospect is addressed correctly in sales calls, email, and marketing campaigns. */ JobTitle: DevKit.Controls.String; /** Shows the date when the lead was last included in a marketing campaign or quick campaign. */ LastUsedInCampaign: DevKit.Controls.Date; mapcontrol: DevKit.Controls.Map; /** Type the mobile phone number for the primary contact for the lead. */ MobilePhone: DevKit.Controls.String; /** Whether the Opportunity created when qualifying this Lead is for an Item- based or a Work-based sale */ msdyn_ordertype: DevKit.Controls.OptionSet; /** Whether the Opportunity created when qualifying this Lead is for an Item- based or a Work-based sale */ msdyn_ordertype_1: DevKit.Controls.OptionSet; notescontrol: DevKit.Controls.Note; /** Type the number of employees that work at the company associated with the lead, for use in marketing segmentation and demographic analysis. */ NumberOfEmployees: DevKit.Controls.Integer; /** Select the preferred method of contact. */ PreferredContactMethodCode: DevKit.Controls.OptionSet; /** Type the annual revenue of the company associated with the lead to provide an understanding of the prospect's business. */ Revenue: DevKit.Controls.Money; /** Type the Standard Industrial Classification (SIC) code that indicates the lead's primary industry of business for use in marketing segmentation and demographic analysis. */ SIC: DevKit.Controls.String; /** Type a subject or descriptive name, such as the expected order, company name, or marketing source list, to identify the lead. */ Subject: DevKit.Controls.String; /** Type the work phone number for the primary contact for the lead. */ Telephone1: DevKit.Controls.String; /** Choose the local currency for the record to make sure budgets are reported in the correct currency. */ TransactionCurrencyId: DevKit.Controls.Lookup; /** Type the website URL for the company associated with this lead. */ WebSiteUrl: DevKit.Controls.String; } interface Navigation { navActivities: DevKit.Controls.NavigationItem, navAsyncOperations: DevKit.Controls.NavigationItem, navAudit: DevKit.Controls.NavigationItem, navCampaignsInSFA: DevKit.Controls.NavigationItem, navConnections: DevKit.Controls.NavigationItem, navDocument: DevKit.Controls.NavigationItem, navLeadCompetitor: DevKit.Controls.NavigationItem, navProcessSessions: DevKit.Controls.NavigationItem } interface ProcessLead_to_Opportunity_Sales_Process { /** Information about the budget amount of the lead's company or organization. */ BudgetAmount: DevKit.Controls.Money; /** Select whether your notes include information about who makes the purchase decisions at the lead's company. */ DecisionMaker: DevKit.Controls.Boolean; /** Type additional information to describe the lead, such as an excerpt from the company's website. */ Description: DevKit.Controls.String; /** Choose an account to connect this lead to, so that the relationship is visible in reports and analytics. */ ParentAccountId: DevKit.Controls.Lookup; /** Choose a contact to connect this lead to, so that the relationship is visible in reports and analytics. */ ParentContactId: DevKit.Controls.Lookup; /** Choose whether an individual or a committee will be involved in the purchase process for the lead. */ PurchaseProcess: DevKit.Controls.OptionSet; /** Choose how long the lead will likely take to make the purchase, so the sales team will be aware. */ PurchaseTimeFrame: DevKit.Controls.OptionSet; } interface Process extends DevKit.Controls.IProcess { Lead_to_Opportunity_Sales_Process: ProcessLead_to_Opportunity_Sales_Process; } interface Grid { Stakeholders: DevKit.Controls.Grid; Competitors: DevKit.Controls.Grid; DocumentsSubGrid: DevKit.Controls.Grid; } } class FormLead extends DevKit.IForm { /** * DynamicsCrm.DevKit form Lead * @param executionContext the execution context * @param defaultWebResourceName default resource name. E.g.: "devkit_/resources/Resource" */ constructor(executionContext: any, defaultWebResourceName?: string); /** Utility functions/methods/objects for Dynamics 365 form */ Utility: DevKit.Utility; /** The Body section of form Lead */ Body: DevKit.FormLead.Body; /** The Header section of form Lead */ Header: DevKit.FormLead.Header; /** The Navigation of form Lead */ Navigation: DevKit.FormLead.Navigation; /** The Process of form Lead */ Process: DevKit.FormLead.Process; /** The Grid of form Lead */ Grid: DevKit.FormLead.Grid; } namespace FormLead_Quick_Create { interface tab_tab_1_Sections { tab_1_column_1_section_1: DevKit.Controls.Section; tab_1_column_2_section_1: DevKit.Controls.Section; tab_1_column_3_section_1: DevKit.Controls.Section; } interface tab_tab_1 extends DevKit.Controls.ITab { Section: tab_tab_1_Sections; } interface Tabs { tab_1: tab_tab_1; } interface Body { Tab: Tabs; /** Information about the budget amount of the lead's company or organization. */ BudgetAmount: DevKit.Controls.Money; /** Stores Image of the Business Card */ BusinessCard: DevKit.Controls.String; /** Type the name of the company associated with the lead. This becomes the account name when the lead is qualified and converted to a customer account. */ CompanyName: DevKit.Controls.String; /** Type additional information to describe the lead, such as an excerpt from the company's website. */ Description: DevKit.Controls.String; /** Type the primary email address for the lead. */ EMailAddress1: DevKit.Controls.String; /** Type the first name of the primary contact for the lead to make sure the prospect is addressed correctly in sales calls, email, and marketing campaigns. */ FirstName: DevKit.Controls.String; /** Type the job title of the primary contact for this lead to make sure the prospect is addressed correctly in sales calls, email, and marketing campaigns. */ JobTitle: DevKit.Controls.String; /** Type the last name of the primary contact for the lead to make sure the prospect is addressed correctly in sales calls, email, and marketing campaigns. */ LastName: DevKit.Controls.String; /** Select the primary marketing source that prompted the lead to contact you. */ LeadSourceCode: DevKit.Controls.OptionSet; /** Type the mobile phone number for the primary contact for the lead. */ MobilePhone: DevKit.Controls.String; /** Choose how long the lead will likely take to make the purchase, so the sales team will be aware. */ PurchaseTimeFrame: DevKit.Controls.OptionSet; /** Type a subject or descriptive name, such as the expected order, company name, or marketing source list, to identify the lead. */ Subject: DevKit.Controls.String; } } class FormLead_Quick_Create extends DevKit.IForm { /** * DynamicsCrm.DevKit form Lead_Quick_Create * @param executionContext the execution context * @param defaultWebResourceName default resource name. E.g.: "devkit_/resources/Resource" */ constructor(executionContext: any, defaultWebResourceName?: string); /** Utility functions/methods/objects for Dynamics 365 form */ Utility: DevKit.Utility; /** The Body section of form Lead_Quick_Create */ Body: DevKit.FormLead_Quick_Create.Body; } class LeadApi { /** * DynamicsCrm.DevKit LeadApi * @param entity The entity object */ constructor(entity?: any); /** * Get the value of alias * @param alias the alias value * @param isMultiOptionSet true if the alias is multi OptionSet */ getAliasedValue(alias: string, isMultiOptionSet?: boolean): any; /** * Get the formatted value of alias * @param alias the alias value * @param isMultiOptionSet true if the alias is multi OptionSet */ getAliasedFormattedValue(alias: string, isMultiOptionSet?: boolean): string; /** The entity object */ Entity: any; /** The entity name */ EntityName: string; /** The entity collection name */ EntityCollectionName: string; /** The @odata.etag is then used to build a cache of the response that is dependant on the fields that are retrieved */ "@odata.etag": string; /** Unique identifier of the account with which the lead is associated. */ AccountId: DevKit.WebApi.LookupValueReadonly; /** Unique identifier for address 1. */ Address1_AddressId: DevKit.WebApi.GuidValue; /** Select the primary address type. */ Address1_AddressTypeCode: DevKit.WebApi.OptionSetValue; /** Type the city for the primary address. */ Address1_City: DevKit.WebApi.StringValue; /** Shows the complete primary address. */ Address1_Composite: DevKit.WebApi.StringValueReadonly; /** Type the country or region for the primary address. */ Address1_Country: DevKit.WebApi.StringValue; /** Type the county for the primary address. */ Address1_County: DevKit.WebApi.StringValue; /** Type the fax number associated with the primary address. */ Address1_Fax: DevKit.WebApi.StringValue; /** Type the latitude value for the primary address for use in mapping and other applications. */ Address1_Latitude: DevKit.WebApi.DoubleValue; /** Type the first line of the primary address. */ Address1_Line1: DevKit.WebApi.StringValue; /** Type the second line of the primary address. */ Address1_Line2: DevKit.WebApi.StringValue; /** Type the third line of the primary address. */ Address1_Line3: DevKit.WebApi.StringValue; /** Type the longitude value for the primary address for use in mapping and other applications. */ Address1_Longitude: DevKit.WebApi.DoubleValue; /** Type a descriptive name for the primary address, such as Corporate Headquarters. */ Address1_Name: DevKit.WebApi.StringValue; /** Type the ZIP Code or postal code for the primary address. */ Address1_PostalCode: DevKit.WebApi.StringValue; /** Type the post office box number of the primary address. */ Address1_PostOfficeBox: DevKit.WebApi.StringValue; /** Select a shipping method for deliveries sent to this address. */ Address1_ShippingMethodCode: DevKit.WebApi.OptionSetValue; /** Type the state or province of the primary address. */ Address1_StateOrProvince: DevKit.WebApi.StringValue; /** Type the main phone number associated with the primary address. */ Address1_Telephone1: DevKit.WebApi.StringValue; /** Type a second phone number associated with the primary address. */ Address1_Telephone2: DevKit.WebApi.StringValue; /** Type a third phone number associated with the primary address. */ Address1_Telephone3: DevKit.WebApi.StringValue; /** Type the UPS zone of the primary address to make sure shipping charges are calculated correctly and deliveries are made promptly, if shipped by UPS. */ Address1_UPSZone: DevKit.WebApi.StringValue; /** Select the time zone, or UTC offset, for this address so that other people can reference it when they contact someone at this address. */ Address1_UTCOffset: DevKit.WebApi.IntegerValue; /** Unique identifier for address 2. */ Address2_AddressId: DevKit.WebApi.GuidValue; /** Select the secondary address type. */ Address2_AddressTypeCode: DevKit.WebApi.OptionSetValue; /** Type the city for the secondary address. */ Address2_City: DevKit.WebApi.StringValue; /** Shows the complete secondary address. */ Address2_Composite: DevKit.WebApi.StringValueReadonly; /** Type the country or region for the secondary address. */ Address2_Country: DevKit.WebApi.StringValue; /** Type the county for the secondary address. */ Address2_County: DevKit.WebApi.StringValue; /** Type the fax number associated with the secondary address. */ Address2_Fax: DevKit.WebApi.StringValue; /** Type the latitude value for the secondary address for use in mapping and other applications. */ Address2_Latitude: DevKit.WebApi.DoubleValue; /** Type the first line of the secondary address. */ Address2_Line1: DevKit.WebApi.StringValue; /** Type the second line of the secondary address. */ Address2_Line2: DevKit.WebApi.StringValue; /** Type the third line of the secondary address. */ Address2_Line3: DevKit.WebApi.StringValue; /** Type the longitude value for the secondary address for use in mapping and other applications. */ Address2_Longitude: DevKit.WebApi.DoubleValue; /** Type a descriptive name for the secondary address, such as Corporate Headquarters. */ Address2_Name: DevKit.WebApi.StringValue; /** Type the ZIP Code or postal code for the secondary address. */ Address2_PostalCode: DevKit.WebApi.StringValue; /** Type the post office box number of the secondary address. */ Address2_PostOfficeBox: DevKit.WebApi.StringValue; /** Select a shipping method for deliveries sent to this address. */ Address2_ShippingMethodCode: DevKit.WebApi.OptionSetValue; /** Type the state or province of the secondary address. */ Address2_StateOrProvince: DevKit.WebApi.StringValue; /** Type the main phone number associated with the secondary address. */ Address2_Telephone1: DevKit.WebApi.StringValue; /** Type a second phone number associated with the secondary address. */ Address2_Telephone2: DevKit.WebApi.StringValue; /** Type a third phone number associated with the secondary address. */ Address2_Telephone3: DevKit.WebApi.StringValue; /** Type the UPS zone of the secondary address to make sure shipping charges are calculated correctly and deliveries are made promptly, if shipped by UPS. */ Address2_UPSZone: DevKit.WebApi.StringValue; /** Select the time zone, or UTC offset, for this address so that other people can reference it when they contact someone at this address. */ Address2_UTCOffset: DevKit.WebApi.IntegerValue; /** Information about the budget amount of the lead's company or organization. */ BudgetAmount: DevKit.WebApi.MoneyValue; /** Value of the Budget Amount in base currency. */ BudgetAmount_Base: DevKit.WebApi.MoneyValueReadonly; /** Information about the budget status of the lead's company or organization. */ BudgetStatus: DevKit.WebApi.OptionSetValue; /** Stores Image of the Business Card */ BusinessCard: DevKit.WebApi.StringValue; /** Stores Business Card Control Properties. */ BusinessCardAttributes: DevKit.WebApi.StringValue; /** Choose the campaign that the lead was generated from to track the effectiveness of marketing campaigns and identify communications received by the lead. */ CampaignId: DevKit.WebApi.LookupValue; /** Type the name of the company associated with the lead. This becomes the account name when the lead is qualified and converted to a customer account. */ CompanyName: DevKit.WebApi.StringValue; /** Select whether the lead confirmed interest in your offerings. This helps in determining the lead quality. */ ConfirmInterest: DevKit.WebApi.BooleanValue; /** Unique identifier of the contact with which the lead is associated. */ ContactId: DevKit.WebApi.LookupValueReadonly; /** Shows who created the record. */ CreatedBy: DevKit.WebApi.LookupValueReadonly; /** Date and time when the record was created. */ CreatedOn_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValueReadonly; /** Shows who created the record on behalf of another user. */ CreatedOnBehalfBy: DevKit.WebApi.LookupValueReadonly; customerid_account: DevKit.WebApi.LookupValue; customerid_contact: DevKit.WebApi.LookupValue; /** Select whether your notes include information about who makes the purchase decisions at the lead's company. */ DecisionMaker: DevKit.WebApi.BooleanValue; /** Type additional information to describe the lead, such as an excerpt from the company's website. */ Description: DevKit.WebApi.StringValue; /** Select whether the lead accepts bulk email sent through marketing campaigns or quick campaigns. If Do Not Allow is selected, the lead can be added to marketing lists, but will be excluded from the email. */ DoNotBulkEMail: DevKit.WebApi.BooleanValue; /** Select whether the lead allows direct email sent from Microsoft Dynamics 365. */ DoNotEMail: DevKit.WebApi.BooleanValue; /** Select whether the lead allows faxes. */ DoNotFax: DevKit.WebApi.BooleanValue; /** Select whether the lead allows phone calls. */ DoNotPhone: DevKit.WebApi.BooleanValue; /** Select whether the lead allows direct mail. */ DoNotPostalMail: DevKit.WebApi.BooleanValue; /** Select whether the lead accepts marketing materials, such as brochures or catalogs. Leads that opt out can be excluded from marketing initiatives. */ DoNotSendMM: DevKit.WebApi.BooleanValue; /** Type the primary email address for the lead. */ EMailAddress1: DevKit.WebApi.StringValue; /** Type the secondary email address for the lead. */ EMailAddress2: DevKit.WebApi.StringValue; /** Type a third email address for the lead. */ EMailAddress3: DevKit.WebApi.StringValue; /** Shows the default image for the record. */ EntityImage: DevKit.WebApi.StringValue; EntityImage_Timestamp: DevKit.WebApi.BigIntValueReadonly; EntityImage_URL: DevKit.WebApi.StringValueReadonly; EntityImageId: DevKit.WebApi.GuidValueReadonly; /** Type the estimated revenue value that this lead will generate to assist in sales forecasting and planning. */ EstimatedAmount: DevKit.WebApi.MoneyValue; /** Value of the Est. Value in base currency. */ EstimatedAmount_Base: DevKit.WebApi.MoneyValueReadonly; /** Enter the expected close date for the lead, so that the sales team can schedule timely follow-up meetings to move the prospect to the next sales stage. */ EstimatedCloseDate_DateOnly: DevKit.WebApi.DateOnlyValue; /** Type a numeric value of the lead's estimated value, such as a product quantity, if no revenue amount can be specified in the Est. Value field. This can be used for sales forecasting and planning. */ EstimatedValue: DevKit.WebApi.DoubleValue; /** Select whether the fit between the lead's requirements and your offerings was evaluated. */ EvaluateFit: DevKit.WebApi.BooleanValue; /** Shows the conversion rate of the record's currency. The exchange rate is used to convert all money fields in the record from the local currency to the system's default currency. */ ExchangeRate: DevKit.WebApi.DecimalValueReadonly; /** Type the fax number for the primary contact for the lead. */ Fax: DevKit.WebApi.StringValue; /** Type the first name of the primary contact for the lead to make sure the prospect is addressed correctly in sales calls, email, and marketing campaigns. */ FirstName: DevKit.WebApi.StringValue; /** Information about whether to allow following email activity like opens, attachment views and link clicks for emails sent to the lead. */ FollowEmail: DevKit.WebApi.BooleanValue; /** Combines and shows the lead's first and last names so the full name can be displayed in views and reports. */ FullName: DevKit.WebApi.StringValueReadonly; /** Sequence number of the import that created this record. */ ImportSequenceNumber: DevKit.WebApi.IntegerValue; /** Select the primary industry in which the lead's business is focused, for use in marketing segmentation and demographic analysis. */ IndustryCode: DevKit.WebApi.OptionSetValue; /** Choose whether someone from the sales team contacted this lead earlier. */ InitialCommunication: DevKit.WebApi.OptionSetValue; /** Information about whether the contact was auto-created when promoting an email or an appointment. */ IsAutoCreate: DevKit.WebApi.BooleanValueReadonly; /** Indicates whether the lead is private or visible to the entire organization. */ IsPrivate: DevKit.WebApi.BooleanValueReadonly; /** Type the job title of the primary contact for this lead to make sure the prospect is addressed correctly in sales calls, email, and marketing campaigns. */ JobTitle: DevKit.WebApi.StringValue; /** Type the last name of the primary contact for the lead to make sure the prospect is addressed correctly in sales calls, email, and marketing campaigns. */ LastName: DevKit.WebApi.StringValue; /** Contains the date and time stamp of the last on hold time. */ LastOnHoldTime_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValue; /** Shows the date when the lead was last included in a marketing campaign or quick campaign. */ LastUsedInCampaign_UtcDateOnly: DevKit.WebApi.UtcDateOnlyValue; /** Unique identifier of the lead. */ LeadId: DevKit.WebApi.GuidValue; /** Select a rating value to indicate the lead's potential to become a customer. */ LeadQualityCode: DevKit.WebApi.OptionSetValue; /** Select the primary marketing source that prompted the lead to contact you. */ LeadSourceCode: DevKit.WebApi.OptionSetValue; /** Unique identifier of the master lead for merge. */ MasterId: DevKit.WebApi.LookupValueReadonly; MasterLeadIdName: DevKit.WebApi.StringValueReadonly; /** Tells whether the lead has been merged with another lead. */ Merged: DevKit.WebApi.BooleanValueReadonly; /** Type the middle name or initial of the primary contact for the lead to make sure the prospect is addressed correctly. */ MiddleName: DevKit.WebApi.StringValue; /** Type the mobile phone number for the primary contact for the lead. */ MobilePhone: DevKit.WebApi.StringValue; /** Shows who last updated the record. */ ModifiedBy: DevKit.WebApi.LookupValueReadonly; /** Date and time when the record was modified. */ ModifiedOn_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValueReadonly; /** Shows who last updated the record on behalf of another user. */ ModifiedOnBehalfBy: DevKit.WebApi.LookupValueReadonly; /** Describes whether lead is opted out or not */ msdyn_gdproptout: DevKit.WebApi.BooleanValue; /** Whether the Opportunity created when qualifying this Lead is for an Item- based or a Work-based sale */ msdyn_ordertype: DevKit.WebApi.OptionSetValue; /** Choose how high the level of need is for the lead's company. */ Need: DevKit.WebApi.OptionSetValue; /** Type the number of employees that work at the company associated with the lead, for use in marketing segmentation and demographic analysis. */ NumberOfEmployees: DevKit.WebApi.IntegerValue; /** Shows how long, in minutes, that the record was on hold. */ OnHoldTime: DevKit.WebApi.IntegerValueReadonly; /** This attribute is used for Sample Service Business Processes. */ OriginatingCaseId: DevKit.WebApi.LookupValue; /** Date and time that the record was migrated. */ OverriddenCreatedOn_UtcDateOnly: DevKit.WebApi.UtcDateOnlyValue; /** Enter the user who is assigned to manage the record. This field is updated every time the record is assigned to a different user */ OwnerId_systemuser: DevKit.WebApi.LookupValue; /** Enter the team who is assigned to manage the record. This field is updated every time the record is assigned to a different team */ OwnerId_team: DevKit.WebApi.LookupValue; /** Unique identifier for the business unit that owns the record */ OwningBusinessUnit: DevKit.WebApi.LookupValueReadonly; /** Unique identifier for the team that owns the record. */ OwningTeam: DevKit.WebApi.LookupValueReadonly; /** Unique identifier for the user that owns the record. */ OwningUser: DevKit.WebApi.LookupValueReadonly; /** Type the pager number for the primary contact for the lead. */ Pager: DevKit.WebApi.StringValue; /** Choose an account to connect this lead to, so that the relationship is visible in reports and analytics. */ ParentAccountId: DevKit.WebApi.LookupValue; /** Choose a contact to connect this lead to, so that the relationship is visible in reports and analytics. */ ParentContactId: DevKit.WebApi.LookupValue; /** Shows whether the lead participates in workflow rules. */ ParticipatesInWorkflow: DevKit.WebApi.BooleanValue; /** Select the preferred method of contact. */ PreferredContactMethodCode: DevKit.WebApi.OptionSetValue; /** Select the priority so that preferred customers or critical issues are handled quickly. */ PriorityCode: DevKit.WebApi.OptionSetValue; /** Contains the id of the process associated with the entity. */ ProcessId: DevKit.WebApi.GuidValue; /** Choose whether an individual or a committee will be involved in the purchase process for the lead. */ PurchaseProcess: DevKit.WebApi.OptionSetValue; /** Choose how long the lead will likely take to make the purchase, so the sales team will be aware. */ PurchaseTimeFrame: DevKit.WebApi.OptionSetValue; /** Type comments about the qualification or scoring of the lead. */ QualificationComments: DevKit.WebApi.StringValue; /** Choose the opportunity that the lead was qualified on and then converted to. */ QualifyingOpportunityId: DevKit.WebApi.LookupValue; /** Related Campaign Response. */ RelatedObjectId: DevKit.WebApi.LookupValue; /** Type the annual revenue of the company associated with the lead to provide an understanding of the prospect's business. */ Revenue: DevKit.WebApi.MoneyValue; /** Value of the Annual Revenue in base currency. */ Revenue_Base: DevKit.WebApi.MoneyValueReadonly; /** Select the sales stage of this lead to aid the sales team in their efforts to convert this lead to an opportunity. */ SalesStage: DevKit.WebApi.OptionSetValue; /** Select the sales process stage for the lead to help determine the probability of the lead converting to an opportunity. */ SalesStageCode: DevKit.WebApi.OptionSetValue; /** Type the salutation of the primary contact for this lead to make sure the prospect is addressed correctly in sales calls, email messages, and marketing campaigns. */ Salutation: DevKit.WebApi.StringValue; /** Enter the date and time of the prospecting follow-up meeting with the lead. */ ScheduleFollowUp_Prospect_UtcDateOnly: DevKit.WebApi.UtcDateOnlyValue; /** Enter the date and time of the qualifying follow-up meeting with the lead. */ ScheduleFollowUp_Qualify_UtcDateOnly: DevKit.WebApi.UtcDateOnlyValue; /** Type the Standard Industrial Classification (SIC) code that indicates the lead's primary industry of business for use in marketing segmentation and demographic analysis. */ SIC: DevKit.WebApi.StringValue; /** Choose the service level agreement (SLA) that you want to apply to the Lead record. */ SLAId: DevKit.WebApi.LookupValue; /** Last SLA that was applied to this case. This field is for internal use only. */ SLAInvokedId: DevKit.WebApi.LookupValueReadonly; SLAName: DevKit.WebApi.StringValueReadonly; /** Contains the id of the stage where the entity is located. */ StageId: DevKit.WebApi.GuidValue; /** Shows whether the lead is open, qualified, or disqualified. Qualified and disqualified leads are read-only and can't be edited unless they are reactivated. */ StateCode: DevKit.WebApi.OptionSetValue; /** Select the lead's status. */ StatusCode: DevKit.WebApi.OptionSetValue; /** Type a subject or descriptive name, such as the expected order, company name, or marketing source list, to identify the lead. */ Subject: DevKit.WebApi.StringValue; /** Number of users or conversations followed the record */ TeamsFollowed: DevKit.WebApi.IntegerValue; /** Type the work phone number for the primary contact for the lead. */ Telephone1: DevKit.WebApi.StringValue; /** Type the home phone number for the primary contact for the lead. */ Telephone2: DevKit.WebApi.StringValue; /** Type an alternate phone number for the primary contact for the lead. */ Telephone3: DevKit.WebApi.StringValue; /** Total time spent for emails (read and write) and meetings by me in relation to the lead record. */ TimeSpentByMeOnEmailAndMeetings: DevKit.WebApi.StringValueReadonly; /** For internal use only. */ TimeZoneRuleVersionNumber: DevKit.WebApi.IntegerValue; /** Choose the local currency for the record to make sure budgets are reported in the correct currency. */ TransactionCurrencyId: DevKit.WebApi.LookupValue; /** A comma separated list of string values representing the unique identifiers of stages in a Business Process Flow Instance in the order that they occur. */ TraversedPath: DevKit.WebApi.StringValue; /** Time zone code that was in use when the record was created. */ UTCConversionTimeZoneCode: DevKit.WebApi.IntegerValue; /** Version Number */ VersionNumber: DevKit.WebApi.BigIntValueReadonly; /** Type the website URL for the company associated with this lead. */ WebSiteUrl: DevKit.WebApi.StringValue; /** Type the phonetic spelling of the lead's company name, if the name is specified in Japanese, to make sure the name is pronounced correctly in phone calls with the prospect. */ YomiCompanyName: DevKit.WebApi.StringValue; /** Type the phonetic spelling of the lead's first name, if the name is specified in Japanese, to make sure the name is pronounced correctly in phone calls with the prospect. */ YomiFirstName: DevKit.WebApi.StringValue; /** Combines and shows the lead's Yomi first and last names so the full phonetic name can be displayed in views and reports. */ YomiFullName: DevKit.WebApi.StringValueReadonly; /** Type the phonetic spelling of the lead's last name, if the name is specified in Japanese, to make sure the name is pronounced correctly in phone calls with the prospect. */ YomiLastName: DevKit.WebApi.StringValue; /** Type the phonetic spelling of the lead's middle name, if the name is specified in Japanese, to make sure the name is pronounced correctly in phone calls with the prospect. */ YomiMiddleName: DevKit.WebApi.StringValue; } } declare namespace OptionSet { namespace Lead { enum Address1_AddressTypeCode { /** 1 */ Default_Value } enum Address1_ShippingMethodCode { /** 1 */ Default_Value } enum Address2_AddressTypeCode { /** 1 */ Default_Value } enum Address2_ShippingMethodCode { /** 1 */ Default_Value } enum BudgetStatus { /** 2 */ Can_Buy, /** 1 */ May_Buy, /** 0 */ No_Committed_Budget, /** 3 */ Will_Buy } enum IndustryCode { /** 1 */ Accounting, /** 2 */ Agriculture_and_Non_petrol_Natural_Resource_Extraction, /** 3 */ Broadcasting_Printing_and_Publishing, /** 4 */ Brokers, /** 5 */ Building_Supply_Retail, /** 6 */ Business_Services, /** 7 */ Consulting, /** 8 */ Consumer_Services, /** 9 */ Design_Direction_and_Creative_Management, /** 10 */ Distributors_Dispatchers_and_Processors, /** 11 */ Doctors_Offices_and_Clinics, /** 12 */ Durable_Manufacturing, /** 13 */ Eating_and_Drinking_Places, /** 14 */ Entertainment_Retail, /** 15 */ Equipment_Rental_and_Leasing, /** 16 */ Financial, /** 17 */ Food_and_Tobacco_Processing, /** 18 */ Inbound_Capital_Intensive_Processing, /** 19 */ Inbound_Repair_and_Services, /** 20 */ Insurance, /** 21 */ Legal_Services, /** 22 */ Non_Durable_Merchandise_Retail, /** 23 */ Outbound_Consumer_Service, /** 24 */ Petrochemical_Extraction_and_Distribution, /** 25 */ Service_Retail, /** 26 */ SIG_Affiliations, /** 27 */ Social_Services, /** 28 */ Special_Outbound_Trade_Contractors, /** 29 */ Specialty_Realty, /** 30 */ Transportation, /** 31 */ Utility_Creation_and_Distribution, /** 32 */ Vehicle_Retail, /** 33 */ Wholesale } enum InitialCommunication { /** 0 */ Contacted, /** 1 */ Not_Contacted } enum LeadQualityCode { /** 3 */ Cold, /** 1 */ Hot, /** 2 */ Warm } enum LeadSourceCode { /** 1 */ Advertisement, /** 2 */ Employee_Referral, /** 3 */ External_Referral, /** 10 */ Other, /** 4 */ Partner, /** 5 */ Public_Relations, /** 6 */ Seminar, /** 7 */ Trade_Show, /** 8 */ Web, /** 9 */ Word_of_Mouth } enum msdyn_ordertype { /** 192350000 */ Item_based, /** 690970002 */ Service_Maintenance_Based, /** 192350001 */ Work_based } enum Need { /** 2 */ Good_to_have, /** 0 */ Must_have, /** 3 */ No_need, /** 1 */ Should_have } enum PreferredContactMethodCode { /** 1 */ Any, /** 2 */ Email, /** 4 */ Fax, /** 5 */ Mail, /** 3 */ Phone } enum PriorityCode { /** 1 */ Default_Value } enum PurchaseProcess { /** 1 */ Committee, /** 0 */ Individual, /** 2 */ Unknown } enum PurchaseTimeFrame { /** 0 */ Immediate, /** 2 */ Next_Quarter, /** 1 */ This_Quarter, /** 3 */ This_Year, /** 4 */ Unknown } enum SalesStage { /** 0 */ Qualify } enum SalesStageCode { /** 1 */ Default_Value } enum StateCode { /** 2 */ Disqualified, /** 0 */ Open, /** 1 */ Qualified } enum StatusCode { /** 7 */ Canceled, /** 5 */ Cannot_Contact, /** 2 */ Contacted, /** 4 */ Lost, /** 1 */ New, /** 6 */ No_Longer_Interested, /** 3 */ Qualified } enum RollupState { /** 0 - Attribute value is yet to be calculated */ NotCalculated, /** 1 - Attribute value has been calculated per the last update time in <AttributeSchemaName>_Date attribute */ Calculated, /** 2 - Attribute value calculation lead to overflow error */ OverflowError, /** 3 - Attribute value calculation failed due to an internal error, next run of calculation job will likely fix it */ OtherError, /** 4 - Attribute value calculation failed because the maximum number of retry attempts to calculate the value were exceeded likely due to high number of concurrency and locking conflicts */ RetryLimitExceeded, /** 5 - Attribute value calculation failed because maximum hierarchy depth limit for calculation was reached */ HierarchicalRecursionLimitReached, /** 6 - Attribute value calculation failed because a recursive loop was detected in the hierarchy of the record */ LoopDetected } } } //{'JsForm':['Lead','Quick Create'],'JsWebApi':true,'IsDebugForm':true,'IsDebugWebApi':true,'Version':'2.12.31','JsFormVersion':'v2'}
the_stack
import { ArrayExt } from '@lumino/algorithm'; import { CommandRegistry } from '@lumino/commands'; import { ReadonlyJSONObject } from '@lumino/coreutils'; import { DisposableDelegate, IDisposable } from '@lumino/disposable'; import { VirtualElement } from '@lumino/virtualdom'; import { Menu } from '@lumino/widgets'; /** * Interface for disposable item menu */ export interface IDisposableMenuItem extends IDisposable, Menu.IItem {} /** * A common interface for extensible JupyterLab application menus. * * Plugins are still free to define their own menus in any way * they like. However, JupyterLab defines a few top-level * application menus that may be extended by plugins as well, * such as "Edit" and "View" */ export interface IRankedMenu extends IDisposable { /** * Add a group of menu items specific to a particular * plugin. * * The rank can be set for all items in the group using the * function argument or per item. * * @param items - the list of menu items to add. * @param rank - the default rank in the menu in which to insert the group. * @returns Disposable of the group */ addGroup(items: Menu.IItemOptions[], rank?: number): IDisposable; /** * Add a menu item to the end of the menu. * * @param options - The options for creating the menu item. * * @returns The disposable menu item added to the menu. */ addItem(options: IRankedMenu.IItemOptions): IDisposable; /** * A read-only array of the menu items in the menu. */ readonly items: ReadonlyArray<Menu.IItem>; /** * Menu rank */ readonly rank?: number; } /** * Namespace for JupyterLabMenu interfaces */ export namespace IRankedMenu { /** * Default menu item rank */ export const DEFAULT_RANK = 100; /** * An options object for creating a menu item. */ export interface IItemOptions extends Menu.IItemOptions { /** * Menu item rank */ rank?: number; } /** * An options object for creating a JupyterLab menu. */ export interface IOptions extends Menu.IOptions { /** * Whether to include separators between the * groups that are added to the menu. * * Default: true */ includeSeparators?: boolean; /** * Menu rank */ rank?: number; } } /** * An extensible menu for JupyterLab application menus. */ export class RankedMenu extends Menu implements IRankedMenu { /** * Construct a new menu. * * @param options - Options for the lumino menu. */ constructor(options: IRankedMenu.IOptions) { super(options); this._rank = options.rank; this._includeSeparators = options.includeSeparators ?? true; } /** * The underlying Lumino menu. * * @deprecated since v3.1 * RankMenu inherits from Menu since v3.1 */ get menu(): Menu { return this; } /** * Menu rank. */ get rank(): number | undefined { return this._rank; } /** * Add a group of menu items specific to a particular * plugin. * * The rank can be set for all items in the group using the * function argument or per item. * * @param items - the list of menu items to add. * @param rank - the default rank in the menu in which to insert the group. * @returns Disposable of the group */ addGroup(items: IRankedMenu.IItemOptions[], rank?: number): IDisposable { if (items.length === 0) { return new DisposableDelegate(() => void 0); } const defaultRank = rank ?? IRankedMenu.DEFAULT_RANK; const sortedItems = items .map(item => { return { ...item, rank: item.rank ?? defaultRank }; }) .sort((a, b) => a.rank - b.rank); // Insert the plugin group into the menu. let insertIndex = this._ranks.findIndex(rank => sortedItems[0].rank < rank); if (insertIndex < 0) { insertIndex = this._ranks.length; // Insert at the end of the menu } // Keep an array of the menu items that have been created. const added: IDisposableMenuItem[] = []; // Insert a separator before the group. // Phosphor takes care of superfluous leading, // trailing, and duplicate separators. if (this._includeSeparators) { added.push( this.insertItem(insertIndex++, { type: 'separator', rank: defaultRank }) ); } // Insert the group. added.push( ...sortedItems.map(item => { return this.insertItem(insertIndex++, item); }) ); // Insert a separator after the group. if (this._includeSeparators) { added.push( this.insertItem(insertIndex++, { type: 'separator', rank: defaultRank }) ); } return new DisposableDelegate(() => { added.forEach(i => i.dispose()); }); } /** * Add a menu item to the end of the menu. * * @param options - The options for creating the menu item. * * @returns The menu item added to the menu. */ addItem(options: IRankedMenu.IItemOptions): IDisposableMenuItem { let insertIndex = -1; if (options.rank) { insertIndex = this._ranks.findIndex(rank => options.rank! < rank); } if (insertIndex < 0) { insertIndex = this._ranks.length; // Insert at the end of the menu } return this.insertItem(insertIndex, options); } /** * Remove all menu items from the menu. */ clearItems(): void { this._ranks.length = 0; super.clearItems(); } /** * Dispose of the resources held by the menu. */ dispose(): void { this._ranks.length = 0; super.dispose(); } /** * Get the rank of the item at index. * * @param index Item index. * @returns Rank of the item. */ getRankAt(index: number): number { return this._ranks[index]; } /** * Insert a menu item into the menu at the specified index. * * @param index - The index at which to insert the item. * * @param options - The options for creating the menu item. * * @returns The menu item added to the menu. * * #### Notes * The index will be clamped to the bounds of the items. */ insertItem( index: number, options: IRankedMenu.IItemOptions ): IDisposableMenuItem { const clampedIndex = Math.max(0, Math.min(index, this._ranks.length)); ArrayExt.insert( this._ranks, clampedIndex, options.rank ?? Math.max( IRankedMenu.DEFAULT_RANK, this._ranks[this._ranks.length - 1] ?? IRankedMenu.DEFAULT_RANK ) ); const item = super.insertItem(clampedIndex, options); return new DisposableMenuItem(item, this); } /** * Remove the item at a given index from the menu. * * @param index - The index of the item to remove. * * #### Notes * This is a no-op if the index is out of range. */ removeItemAt(index: number): void { ArrayExt.removeAt(this._ranks, index); super.removeItemAt(index); } private _includeSeparators: boolean; private _rank: number | undefined; private _ranks: number[] = []; } /** * Disposable Menu Item */ class DisposableMenuItem implements IDisposableMenuItem { /** * Create a disposable menu item from an item and the menu it belongs to * * @param item Menu item * @param menu Menu */ constructor(item: Menu.IItem, menu: Menu) { this._item = item; this._menu = menu; // dispose this item if the parent menu is disposed const dispose = (menu: Menu): void => { menu.disposed.disconnect(dispose); this.dispose(); }; this._menu.disposed.connect(dispose); } /** * Whether the menu item is disposed or not. */ get isDisposed(): boolean { return this._isDisposed; } /** * The type of the menu item. */ get type(): Menu.ItemType { return this._item.type; } /** * The command to execute when the item is triggered. */ get command(): string { return this._item.command; } /** * The arguments for the command. */ get args(): ReadonlyJSONObject { return this._item.args; } /** * The submenu for a `'submenu'` type item. */ get submenu(): Menu | null { return this._item.submenu; } /** * The display label for the menu item. */ get label(): string { return this._item.label; } /** * The mnemonic index for the menu item. */ get mnemonic(): number { return this._item.mnemonic; } /** * The icon renderer for the menu item. */ get icon(): | VirtualElement.IRenderer | undefined /* <DEPRECATED> */ | string /* </DEPRECATED> */ { return this._item.icon; } /** * The icon class for the menu item. */ get iconClass(): string { return this._item.iconClass; } /** * The icon label for the menu item. */ get iconLabel(): string { return this._item.iconLabel; } /** * The display caption for the menu item. */ get caption(): string { return this._item.caption; } /** * The extra class name for the menu item. */ get className(): string { return this._item.className; } /** * The dataset for the menu item. */ get dataset(): CommandRegistry.Dataset { return this._item.dataset; } /** * Whether the menu item is enabled. */ get isEnabled(): boolean { return this._item.isEnabled; } /** * Whether the menu item is toggled. */ get isToggled(): boolean { return this._item.isToggled; } /** * Whether the menu item is visible. */ get isVisible(): boolean { return this.isVisible; } /** * The key binding for the menu item. */ get keyBinding(): CommandRegistry.IKeyBinding | null { return this._item.keyBinding; } /** * Dispose the menu item by removing it from its menu. */ dispose(): void { this._isDisposed = true; if (this._menu.isDisposed) { // Bail early return; } this._menu.removeItem(this._item); } private _isDisposed: boolean; private _item: Menu.IItem; private _menu: Menu; }
the_stack
import * as _ from 'lodash'; import { callable } from './callable'; import { ComponentSlotStyle, ComponentSlotStylesInput, ComponentSlotStylesPrepared, ComponentVariablesInput, ComponentVariablesObject, ComponentVariablesPrepared, FontFace, SiteVariablesInput, SiteVariablesPrepared, StaticStyle, ThemeAnimation, ThemeComponentStylesInput, ThemeComponentStylesPrepared, ThemeComponentVariablesInput, ThemeComponentVariablesPrepared, ThemeInput, ThemePrepared, } from './types'; import { isEnabled as isDebugEnabled } from './debugEnabled'; import { deepmerge } from './deepmerge'; import { objectKeyToValues } from './objectKeysToValues'; import { withDebugId } from './withDebugId'; export const emptyTheme: ThemePrepared = { siteVariables: { fontSizes: {}, }, componentVariables: {}, componentStyles: {}, fontFaces: [], staticStyles: [], animations: {}, }; // ---------------------------------------- // Component level merge functions // ---------------------------------------- /** * Merges a single component's styles (keyed by component part) with another component's styles. */ export const mergeComponentStyles__PROD: typeof mergeComponentStyles = (stylesA, stylesB) => { const result = {}; if (stylesA) { Object.keys(stylesA).forEach(partName => { const slotA = stylesA[partName]; const slotB = stylesB?.[partName]; // if there is no source, merging is a no-op, skip it if (typeof slotA === 'undefined' || slotA === null) { return; } // no target means source doesn't need to merge onto anything // just ensure source is callable (prepared format) if (typeof slotB === 'undefined' || slotB === null) { result[partName] = typeof slotA === 'function' ? slotA : () => slotA; return; } if (slotA === slotB) { result[partName] = typeof slotA === 'function' ? slotA : () => slotA; } }); } if (stylesB) { Object.keys(stylesB).forEach(partName => { const slotA = stylesA?.[partName]; const slotB = stylesB[partName]; // if there is no source, merging is a no-op, skip it if (typeof slotB === 'undefined' || slotB === null) { return; } // no target means source doesn't need to merge onto anything // just ensure source is callable (prepared format) if (typeof slotA === 'undefined' || slotA === null) { result[partName] = typeof slotB === 'function' ? slotB : () => slotB; return; } if (slotA === slotB) { return; } // We have both target and source, replace with merge fn result[partName] = function mergedStyleFunction(styleParam) { // originalTarget is always prepared, fn is guaranteed return _.merge( typeof slotA === 'function' ? slotA(styleParam) : slotA, typeof slotB === 'function' ? slotB(styleParam) : slotB, ); }; }); } return result; }; export const mergeComponentStyles__DEV: typeof mergeComponentStyles = (stylesA, stylesB) => { if (!isDebugEnabled) { return mergeComponentStyles__PROD(stylesA, stylesB); } const mergedKeys = [...(stylesA ? Object.keys(stylesA) : []), ...(stylesB ? Object.keys(stylesB) : [])]; const result = {}; mergedKeys.forEach(slotName => { const slotA = styleParam => { const { _debug = undefined, ...styles } = callable(stylesA?.[slotName])(styleParam) || {}; // new object required to prevent circular JSON structure error in <Debug /> return { ...styles, _debug: _debug || [{ styles: { ...styles }, debugId: stylesA?._debugId }], }; }; const slotB = styleParam => { const { _debug = undefined, ...styles } = callable(stylesB?.[slotName])(styleParam) || {}; // new object required to prevent circular JSON structure error in <Debug /> return { ...styles, _debug: _debug || [{ styles: { ...styles }, debugId: stylesB?._debugId }], }; }; if (stylesA?.[slotName] && stylesB?.[slotName]) { // We have both, replace with merge fn result[slotName] = styleParam => { // slot* are always prepared, fn is guaranteed, _debug always exists const { _debug: debugA, ...resolvedStylesA } = slotA(styleParam); const { _debug: debugB, ...resolvedStylesB } = slotB(styleParam); const merged = _.merge(resolvedStylesA, resolvedStylesB); merged._debug = debugA.concat(debugB || { styles: resolvedStylesB, debugId: resolvedStylesB._debugId }); return merged; }; } else if (stylesA?.[slotName]) { result[slotName] = slotA; } else if (stylesB?.[slotName]) { result[slotName] = slotB; } }); return result; }; export const mergeComponentStyles: ( stylesA: ComponentSlotStylesInput | null | undefined, stylesB: ComponentSlotStylesInput | null | undefined, ) => ComponentSlotStylesPrepared = process.env.NODE_ENV === 'production' ? mergeComponentStyles__PROD : mergeComponentStyles__DEV; /** * Merges a single component's variables with another component's variables. */ export const mergeComponentVariables__PROD = ( variablesA: ComponentVariablesInput | undefined, variablesB: ComponentVariablesInput | undefined, ): ComponentVariablesPrepared => { if (variablesA && variablesB) { return function mergedComponentVariables( siteVariables: SiteVariablesPrepared | undefined, ): ComponentVariablesObject { const resolvedVariablesA = typeof variablesA === 'function' ? variablesA(siteVariables) : variablesA || {}; const resolvedVariablesB = typeof variablesB === 'function' ? variablesB(siteVariables) : variablesB || {}; return deepmerge(resolvedVariablesA, resolvedVariablesB); }; } if (variablesA) { return typeof variablesA === 'function' ? variablesA : () => variablesA || {}; } if (variablesB) { return typeof variablesB === 'function' ? variablesB : () => variablesB || {}; } return () => ({}); }; export const mergeComponentVariables__DEV: typeof mergeComponentVariables__PROD = ( variablesA, variablesB, ): ComponentVariablesPrepared => { if (!isDebugEnabled) { return mergeComponentVariables__PROD(variablesA, variablesB); } const initial = () => ({}); return [variablesA, variablesB].reduce<ComponentVariablesPrepared>((acc, next) => { return siteVariables => { const { _debug = [], ...accumulatedVariables } = acc(siteVariables); const { _debug: computedDebug = undefined, _debugId = undefined, ...computedComponentVariables } = callable(next)(siteVariables) || {}; const merged = deepmerge(accumulatedVariables, computedComponentVariables); merged._debug = _debug.concat( computedDebug || { resolved: computedComponentVariables, debugId: _debugId, input: siteVariables ? siteVariables._invertedKeys && callable(next)(siteVariables._invertedKeys) : callable(next)(), }, ); return merged; }; }, initial); }; export const mergeComponentVariables = process.env.NODE_ENV === 'production' ? mergeComponentVariables__PROD : mergeComponentVariables__DEV; // ---------------------------------------- // Theme level merge functions // ---------------------------------------- /** * Site variables can safely be merged at each Provider in the tree. * They are flat objects and do not depend on render-time values, such as props. */ export const mergeSiteVariables__PROD = ( siteVariablesA: SiteVariablesInput | undefined, siteVariablesB: SiteVariablesInput | undefined, ): SiteVariablesPrepared => { const initial: SiteVariablesPrepared = { fontSizes: {}, }; if (siteVariablesA && siteVariablesB) { return deepmerge(initial, siteVariablesA, siteVariablesB); } if (siteVariablesA) { return { ...initial, ...siteVariablesA }; } return { ...initial, ...siteVariablesB }; }; export const mergeSiteVariables__DEV: typeof mergeSiteVariables__PROD = ( siteVariablesA, siteVariablesB, ): SiteVariablesPrepared => { if (!isDebugEnabled) { return mergeSiteVariables__PROD(siteVariablesA, siteVariablesB); } const initial: SiteVariablesPrepared = { fontSizes: {}, }; return [siteVariablesA, siteVariablesB].reduce<SiteVariablesPrepared>((acc, next) => { const { _debug = [], ...accumulatedSiteVariables } = acc; const { _debug: computedDebug = undefined, _invertedKeys = undefined, _debugId = undefined, ...nextSiteVariables } = next || {}; const merged = deepmerge({ ...accumulatedSiteVariables, _invertedKeys: undefined }, nextSiteVariables); merged._debug = _debug.concat(computedDebug || { resolved: nextSiteVariables, debugId: _debugId }); merged._invertedKeys = _invertedKeys || objectKeyToValues(merged, key => `siteVariables.${key}`); return merged; }, initial); }; export const mergeSiteVariables = process.env.NODE_ENV === 'production' ? mergeSiteVariables__PROD : mergeSiteVariables__DEV; /** * Component variables can be objects, functions, or an array of these. * The functions must be called with the final result of siteVariables, otherwise * the component variable objects would have no ability to apply siteVariables. * Therefore, componentVariables must be resolved by the component at render time. * We instead pass down call stack of component variable functions to be resolved later. */ export const mergeThemeVariables__PROD = ( themeComponentVariablesA: ThemeComponentVariablesInput | undefined, themeComponentVariablesB: ThemeComponentVariablesInput | undefined, ): ThemeComponentVariablesPrepared => { if (themeComponentVariablesA && themeComponentVariablesB) { const displayNames = _.union(..._.map([themeComponentVariablesA, themeComponentVariablesB], _.keys)); return displayNames.reduce((componentVariables, displayName) => { componentVariables[displayName] = mergeComponentVariables( themeComponentVariablesA[displayName], themeComponentVariablesB[displayName], ); return componentVariables; }, {}); } if (themeComponentVariablesA) { return Object.fromEntries( Object.entries(themeComponentVariablesA).map(([displayName, variables]) => { return [displayName, mergeComponentVariables(undefined, variables)]; }), ); } if (themeComponentVariablesB) { return Object.fromEntries( Object.entries(themeComponentVariablesB).map(([displayName, variables]) => { return [displayName, mergeComponentVariables(undefined, variables)]; }), ); } return {}; }; export const mergeThemeVariables__DEV: typeof mergeThemeVariables__PROD = ( themeComponentVariablesA, themeComponentVariablesB, ) => { if (!isDebugEnabled) { return mergeThemeVariables__PROD(themeComponentVariablesA, themeComponentVariablesB); } const displayNames = _.union(..._.map([themeComponentVariablesA, themeComponentVariablesB], _.keys)); return displayNames.reduce((componentVariables, displayName) => { componentVariables[displayName] = mergeComponentVariables( themeComponentVariablesA && withDebugId(themeComponentVariablesA[displayName], themeComponentVariablesA._debugId), themeComponentVariablesB && withDebugId(themeComponentVariablesB[displayName], themeComponentVariablesB._debugId), ); return componentVariables; }, {}); }; export const mergeThemeVariables = process.env.NODE_ENV === 'production' ? mergeThemeVariables__PROD : mergeThemeVariables__DEV; /** * See mergeThemeVariables() description. * Component styles adhere to the same pattern as component variables, except * that they return style objects. */ const mergeThemeStyles__PROD = ( themeComponentStylesA: ThemeComponentStylesInput | undefined, themeComponentStylesB: ThemeComponentStylesInput | undefined, ): ThemeComponentStylesPrepared => { if (themeComponentStylesA && themeComponentStylesB) { const displayNames = _.union(..._.map([themeComponentStylesA, themeComponentStylesB], _.keys)); return displayNames.reduce((themeComponentStyles, displayName) => { themeComponentStyles[displayName] = mergeComponentStyles( themeComponentStylesA[displayName], themeComponentStylesB[displayName], ); return themeComponentStyles; }, {}); } if (themeComponentStylesA) { return Object.fromEntries( Object.entries(themeComponentStylesA).map(([displayName, styles]) => { return [displayName, mergeComponentStyles(undefined, styles)]; }), ); } if (themeComponentStylesB) { return Object.fromEntries( Object.entries(themeComponentStylesB).map(([displayName, styles]) => { return [displayName, mergeComponentStyles(undefined, styles)]; }), ); } return {}; }; const mergeThemeStyles__DEV: typeof mergeThemeStyles__PROD = (componentStylesA, componentStylesB) => { if (!isDebugEnabled) { return mergeThemeStyles__PROD(componentStylesA, componentStylesB); } const initial: ThemeComponentStylesPrepared = {}; return [componentStylesA, componentStylesB].reduce<ThemeComponentStylesPrepared>((themeComponentStyles, next) => { _.forEach(next, (stylesByPart, displayName) => { themeComponentStyles[displayName] = mergeComponentStyles( themeComponentStyles[displayName], withDebugId(stylesByPart, (next as ThemeComponentStylesPrepared & { _debugId: string })._debugId), ); }); return themeComponentStyles; }, initial); }; export const mergeThemeStyles = process.env.NODE_ENV === 'production' ? mergeThemeStyles__PROD : mergeThemeStyles__DEV; export const mergeFontFaces = (fontFacesA: FontFace[] | undefined, fontFacesB: FontFace[] | undefined): FontFace[] => { return [...(fontFacesA || []), ...(fontFacesB || [])]; }; export const mergeStaticStyles = ( staticStylesA: StaticStyle[] | undefined, staticStylesB: StaticStyle[] | undefined, ): StaticStyle[] => { return [...(staticStylesA || []), ...(staticStylesB || [])]; }; export const mergeAnimations = ( animationsA: { [key: string]: ThemeAnimation } | undefined, animationsB: { [key: string]: ThemeAnimation } | undefined, ): { [key: string]: ThemeAnimation } => { return { ...animationsA, ...animationsB }; }; export const mergeStyles = (...sources: ComponentSlotStyle[]) => { return (...args) => { return sources.reduce((acc, next) => { return _.merge(acc, callable(next)(...args)); }, {}); }; }; export const mergeThemes = ( themeA: ThemeInput | ThemePrepared | undefined, themeB: ThemeInput | ThemePrepared | undefined, ): ThemePrepared => { const debugIdA = themeA?.['_debugId']; const debugIdB = themeB?.['_debugId']; if (themeA && themeB) { return { animations: mergeAnimations(themeA.animations, themeB.animations), componentVariables: mergeThemeVariables( withDebugId(themeA.componentVariables, debugIdA), withDebugId(themeB.componentVariables, debugIdB), ), componentStyles: mergeThemeStyles( withDebugId(themeA.componentStyles, debugIdA), withDebugId(themeB.componentStyles, debugIdB), ), fontFaces: mergeFontFaces(themeA.fontFaces, themeB.fontFaces), siteVariables: mergeSiteVariables( withDebugId(themeA.siteVariables, debugIdA), withDebugId(themeB.siteVariables, debugIdB), ), staticStyles: mergeStaticStyles(themeA.staticStyles, themeB.staticStyles), }; } if (themeA) { return { animations: mergeAnimations(undefined, themeA.animations), componentVariables: mergeThemeVariables(undefined, withDebugId(themeA.componentVariables, debugIdA)), componentStyles: mergeThemeStyles(undefined, withDebugId(themeA.componentStyles, debugIdA)), fontFaces: mergeFontFaces(undefined, themeA.fontFaces), siteVariables: mergeSiteVariables(undefined, withDebugId(themeA.siteVariables, debugIdA)), staticStyles: mergeStaticStyles(undefined, themeA.staticStyles), }; } if (themeB) { return { animations: mergeAnimations(undefined, themeB.animations), componentVariables: mergeThemeVariables(undefined, withDebugId(themeB.componentVariables, debugIdB)), componentStyles: mergeThemeStyles(undefined, withDebugId(themeB.componentStyles, debugIdB)), fontFaces: mergeFontFaces(undefined, themeB.fontFaces), siteVariables: mergeSiteVariables(undefined, withDebugId(themeB.siteVariables, debugIdB)), staticStyles: mergeStaticStyles(undefined, themeB.staticStyles), }; } return { ...emptyTheme }; };
the_stack
const DEFAULT_FONT_COLOR = "#505058"; const DEFAULT_SHADOW_COLOR = "#A0A0A8"; const DEFAULT_FONT_SIZE = 24; //=========================================================================== // #region Bitmap, change the text draw function interface Bitmap { initialize(width: number, height: number): void; shadowColor: String; } let Bitmap_initialize = Bitmap.prototype.initialize; Bitmap.prototype.initialize = function(width, height) { Bitmap_initialize.call(this, width, height); this.outlineWidth = 0; }; /** * @description Use multiples text renders to generate a text shadow. */ Bitmap.prototype._drawTextBody = function(text, tx, ty, maxWidth) { var context = this._context; if (this.outlineWidth == 0) { context.fillStyle = this.shadowColor || DEFAULT_SHADOW_COLOR; context.fillText(text, tx, ty + 1, maxWidth); context.fillText(text, tx, ty + 2, maxWidth); context.fillText(text, tx + 1, ty, maxWidth); context.fillText(text, tx + 2, ty, maxWidth); context.fillText(text, tx + 1, ty + 1, maxWidth); context.fillText(text, tx + 2, ty + 2, maxWidth); } context.fillStyle = this.textColor || DEFAULT_FONT_COLOR; context.fillText(text, tx, ty, maxWidth); }; // #endregion //=========================================================================== //=========================================================================================== // #region Window, Overwrite the window creation function, to use the Pokémon's windowskins Object.defineProperty(Window.prototype, "frameskin", { get: function() { return this._frameskin; }, set: function(value) { if (this._frameskin !== value) { this._frameskin = value; this._frameskin.addLoadListener(this._onWindowskinLoad.bind(this)); } }, configurable: true }); interface Window { _onWindowskinLoad(): void; _createAllParts(): void; _refreshBack(): void; _refreshFrame(): void; _refreshContents(): void; _updateContents(): void; _refreshCursor(): void; _updateCursor(): void; _refreshPauseSign(): void; changeFrameSkin(filename: string): void; } Window.prototype._onWindowskinLoad = function() { this._frameX1 = 0; this._frameX2 = 0; this._frameX3 = 0; this._frameY1 = 0; this._frameY2 = 0; this._frameY3 = 0; if (this._frameskin.width === 48 && this._frameskin.height === 48) { this._frameX1 = this._frameX2 = this._frameX3 = this._frameY1 = this._frameY2 = this._frameY3 = 16; } if (this._frameskin.width === 96 && this._frameskin.height === 48) { this._frameX1 = 32; this._frameX2 = 16; this._frameX3 = 48; this._frameY1 = 16; this._frameY2 = 16; this._frameY3 = 16; } this._paddingLeft = this._frameX1; this._paddingTop = this._frameY1; this._paddingRight = this._frameX3; this._paddingBottom = this._frameY3; this._refreshAllParts(); }; let _Window_createAllParts = Window.prototype._createAllParts; Window.prototype._createAllParts = function() { _Window_createAllParts.call(this); this._margin = 24; }; Window.prototype._refreshBack = function() { var m = this._margin; var w = this._width - (this._paddingLeft + this._paddingRight); var h = this._height - (this._paddingTop + this._paddingBottom); var bitmap = new Bitmap(w, h); this._windowBackSprite.bitmap = bitmap; this._windowBackSprite.move(this._paddingLeft, this._paddingTop); bitmap.blt(this._frameskin, this._frameX1, this._frameY1, this._frameX2, this._frameY2, 0, 0, w, h); var tone = this._colorTone; bitmap.adjustTone(tone[0], tone[1], tone[2]); }; Window.prototype._refreshFrame = function() { var w = this._width; var h = this._height; var m = 16; var bitmap = new Bitmap(w, h); if (this._frameskin.width === 48 && this._frameskin.height === 48) { var skin = this._frameskin; bitmap.blt(skin, 0, this._frameY1, this._frameX1, this._frameY2, 0, this._frameY1, this._frameX1, h - (this._frameY1 + this._frameY3)); // left bar bitmap.blt(skin, this._frameX1, 0, this._frameX2, this._frameY1, this._frameX1, 0, w - (this._frameX1 + this._frameX3), this._frameY1); // top bar bitmap.blt( skin, this._frameX1 + this._frameX2, this._frameY1, this._frameX3, this._frameY2, w - this._frameX3, this._frameY1, this._frameX3, h - (this._frameY1 + this._frameY3) ); // right bar bitmap.blt( skin, this._frameX1, this._frameY1 + this._frameY2, this._frameX2, this._frameY3, this._frameX1, h - this._frameY3, w - (this._frameX1 + this._frameX3), this._frameY3 ); // bottom bar bitmap.blt(skin, 0, 0, this._frameX1, this._frameY1, 0, 0, this._frameX1, this._frameY1); // topleft bitmap.blt(skin, this._frameX1 + this._frameX2, 0, this._frameX3, this._frameY1, w - this._frameX3, 0, this._frameX3, this._frameY1); // topright bitmap.blt( skin, this._frameX1 + this._frameX2, this._frameY1 + this._frameY2, this._frameX3, this._frameY3, w - this._frameX3, h - this._frameY3, this._frameX3, this._frameY3 ); // bottomright bitmap.blt(skin, 0, this._frameY1 + this._frameY2, this._frameX1, this._frameY3, 0, h - this._frameY3, this._frameX1, this._frameY3); // bottomleft } if (this._frameskin.width === 96 && this._frameskin.height === 48) { var skin = this._frameskin; bitmap.blt(skin, 0, this._frameY1, this._frameX1, this._frameY2, 0, this._frameY1, this._frameX1, h - (this._frameY1 + this._frameY3)); // left bar bitmap.blt(skin, this._frameX1, 0, this._frameX2, this._frameY1, this._frameX1, 0, w - (this._frameX1 + this._frameX3), this._frameY1); // top bar bitmap.blt( skin, this._frameX1 + this._frameX2, this._frameY1, this._frameX3, this._frameY2, w - this._frameX3, this._frameY1, this._frameX3, h - (this._frameY1 + this._frameY3) ); // right bar bitmap.blt( skin, this._frameX1, this._frameY1 + this._frameY2, this._frameX2, this._frameY3, this._frameX1, h - this._frameY3, w - (this._frameX1 + this._frameX3), this._frameY3 ); // bottom bar bitmap.blt(skin, 0, 0, this._frameX1, this._frameY1, 0, 0, this._frameX1, this._frameY1); // topleft bitmap.blt(skin, this._frameX1 + this._frameX2, 0, this._frameX3, this._frameY1, w - this._frameX3, 0, this._frameX3, this._frameY1); // topright bitmap.blt( skin, this._frameX1 + this._frameX2, this._frameY1 + this._frameY2, this._frameX3, this._frameY3, w - this._frameX3, h - this._frameY3, this._frameX3, this._frameY3 ); // bottomright bitmap.blt(skin, 0, this._frameY1 + this._frameY2, this._frameX1, this._frameY3, 0, h - this._frameY3, this._frameX1, this._frameY3); // bottomleft } this._windowFrameSprite.bitmap = bitmap; this._margin = m; }; Window.prototype._refreshContents = function() { this._windowContentsSprite.move(this._paddingLeft, this._paddingTop); }; Window.prototype._updateContents = function() { var w = this._width - (this._paddingLeft + this._paddingRight); var h = this._height - (this._paddingTop + this._paddingBottom); if (w > 0 && h > 0) { this._windowContentsSprite.setFrame(this.origin.x, this.origin.y, w, h); this._windowContentsSprite.visible = this.isOpen(); } else { this._windowContentsSprite.visible = false; } }; Window.prototype._refreshCursor = function() { var pad = this._padding; var x = this._cursorRect.x + pad - this.origin.x; var y = this._cursorRect.y + pad - this.origin.y; var w = this._cursorRect.width; var h = this._cursorRect.height; var m = 4; var x1 = Math.max(x, pad); var y1 = Math.max(y, pad); var w2 = Math.min(w, this._width - pad - x1); var h2 = Math.min(h, this._height - pad - y1); var bitmap = new Bitmap(w2, h2); this._windowCursorSprite.bitmap = ImageManager.loadPicture("selarrow"); this._windowCursorSprite.setFrame(0, 0, w2, h2); this._windowCursorSprite.move(x1, y1 + 2); }; Window.prototype._updateCursor = function() { var blinkCount = this._animationCount % 40; var cursorOpacity = this.contentsOpacity; // if (this.active) { // if (blinkCount < 20) { // cursorOpacity -= blinkCount * 8; // } else { // cursorOpacity -= (40 - blinkCount) * 8; // } // } this._windowCursorSprite.alpha = cursorOpacity / 255; this._windowCursorSprite.visible = this.isOpen(); }; Window.prototype._refreshPauseSign = function() { var sx = 144; var sy = 96; var p = 24; this._windowPauseSignSprite.bitmap = this._windowskin; this._windowPauseSignSprite.anchor.x = 0.5; this._windowPauseSignSprite.anchor.y = 1; this._windowPauseSignSprite.move(this._width - this._paddingRight / 2, this._height - 16); this._windowPauseSignSprite.setFrame(sx, sy, p, p); this._windowPauseSignSprite.alpha = 0; }; Window.prototype.changeFrameSkin = function(filename) { this._frameskin = ImageManager.loadSystem(filename); this._frameskin.addLoadListener(this._onWindowskinLoad.bind(this)); }; // #endregion //=========================================================================================== //=========================================================================== // #region Window_Base, overwritefunction tu use custom windowskin interface Window_Base { loadFrameSkin(): void; } Window_Base.prototype.loadWindowskin = function() { this.loadFrameSkin(); this.windowskin = ImageManager.loadSystem("Window"); }; Window_Base.prototype.loadFrameSkin = function() { this.frameskin = ImageManager.loadSystem("choice 1"); }; Window_Base.prototype.standardFontSize = function() { return DEFAULT_FONT_SIZE; }; Window_Base.prototype.standardPadding = function() { return 16; }; Window_Base.prototype.textPadding = function() { return 10; }; Window_Message.prototype.numVisibleRows = function() { return 2; }; Window_Base.prototype.standardBackOpacity = function() { return 255; }; Window_Base.prototype.refreshDimmerBitmap = function() {}; Window_Base.prototype.setBackgroundType = function(type) {}; Window_Base.prototype.showBackgroundDimmer = function() {}; Window_Base.prototype.hideBackgroundDimmer = function() {}; Window_Base.prototype.updateBackgroundDimmer = function() {}; let _Window_Base_textColor = Window_Base.prototype.textColor; Window_Base.prototype.textColor = function(n) { if (n === 0) return DEFAULT_FONT_COLOR; return _Window_Base_textColor.call(this, n); }; // #endregion //=========================================================================== //=========================================================================== // #region Window_Message, Message Window use diferent windowskin Window_Message.prototype.loadFrameSkin = function() { this.frameskin = ImageManager.loadSystem("speech hgss 1"); }; // #endregion //=========================================================================== //=========================================================================== // #region Window_Selectable, use Pokémon's select cursor Window_Selectable.prototype._updateContents = function() { var w = this._width - (this._paddingLeft + this._paddingRight); var h = this._height - (this._paddingTop + this._paddingBottom); if (w > 0 && h > 0) { this._windowContentsSprite.setFrame(this.origin.x - 8, this.origin.y, w, h); this._windowContentsSprite.visible = this.isOpen(); } else { this._windowContentsSprite.visible = false; } }; // #endregion //===========================================================================
the_stack
import { Binding, Scope, ICollectionObserverSplice, ObserverLocator, InternalCollectionObserver, OverrideContext } from 'aurelia-binding'; import { TaskQueue } from 'aurelia-task-queue'; import { View, ViewSlot, Controller } from 'aurelia-templating'; import { RepeatStrategy, AbstractRepeater } from 'aurelia-templating-resources'; import { VirtualRepeat } from './virtual-repeat'; export interface IScrollNextScrollContext { topIndex: number; isAtBottom: boolean; isAtTop: boolean; } /**@internal */ declare module 'aurelia-binding' { interface ObserverLocator { taskQueue: TaskQueue; } interface OverrideContext { $index: number; $scrollContext: IScrollNextScrollContext; $first: boolean; $last: boolean; $middle: boolean; $odd: boolean; $even: boolean; } } /**@internal */ declare module 'aurelia-templating' { interface View { firstChild: Node & { au?: any }; lastChild: Node & { au?: any }; bindings: Binding[]; controllers: Controller[]; } interface Controller { boundProperties: { binding: Binding }[]; } } export interface IVirtualRepeater extends AbstractRepeater { items: any; local?: string; /** * First view index, for proper follow up calculations */ $first: number; /** * Defines how many items there should be for a given index to be considered at edge */ edgeDistance: number; /** * Template handling strategy for this repeat. */ templateStrategy: ITemplateStrategy; /** * The element hosting the scrollbar for this repeater */ scrollerEl: HTMLElement; /** * Top buffer element, used to reflect the visualization of amount of items `before` the first visible item * @internal */ topBufferEl: HTMLElement; /** * Bot buffer element, used to reflect the visualization of amount of items `after` the first visible item */ bottomBufferEl: HTMLElement; /** * Height of top buffer to properly push the visible rendered list items into right position * Usually determined by `_first` visible index * `itemHeight` */ topBufferHeight: number; /** * Height of bottom buffer to properly push the visible rendered list items into right position */ bottomBufferHeight: number; /** * Height of each item. Calculated based on first item */ itemHeight: number; /** * Calculate current scrolltop position */ distanceToTop: number; /** * Number indicating minimum elements required to render to fill up the visible viewport */ minViewsRequired: number; // /** // * Indicates whether virtual repeat attribute is inside a fixed height container with overflow // * // * This helps identifies place to add scroll event listener // */ // fixedHeightContainer: boolean; /** * ViewSlot that encapsulates the repeater views operations in the template */ readonly viewSlot: ViewSlot; /** * Aurelia change handler by convention for property `items`. Used to properly determine action * needed when items value has been changed */ itemsChanged(): void; /** * Get first visible view */ firstView(): IView | null; /** * Get last visible view */ lastView(): IView | null; /** * Get index of first visible view */ firstViewIndex(): number; /** * Get index of last visible view */ lastViewIndex(): number; /** * Virtual repeater normally employs scroll handling buffer for performance reasons. * As syncing between scrolling state and visible views could be expensive. */ enableScroll(): void; /** * Invoke infinite scroll next function expression with currently bound scope of the repeater */ getMore(topIndex: number, isNearTop: boolean, isNearBottom: boolean, force?: boolean): void; /** * Get the real scroller element of the DOM tree this repeat resides in */ getScroller(): HTMLElement; /** * Get scrolling information of the real scroller element of the DOM tree this repeat resides in */ getScrollerInfo(): IScrollerInfo; /** * Observe scroller element to react upon sizing changes */ observeScroller(scrollerEl: HTMLElement): void; /** * Dispose scroller content size observer, if has * Dispose all event listeners related to sizing of scroller, if any */ unobserveScroller(): void; /** * Signal the repeater to reset all its internal calculation states. * Typically used when items value is null, undefined, empty collection. * Or the repeater has been detached */ resetCalculation(): void; /** * Update buffer elements height/width with corresponding * @param skipUpdate `true` to signal this repeater that the update won't trigger scroll event */ updateBufferElements(skipUpdate?: boolean): void; } export type RepeatableValue = number | any[] | Map<any, any> | Set<any>; export interface IVirtualRepeatStrategy<T extends RepeatableValue = RepeatableValue> { /** * create first item to calculate the heights */ createFirstRow(repeat: IVirtualRepeater): IView; /** * Count the number of the items in the repeatable value `items` */ count(items: T): number; /** * Calculate required variables for a virtual repeat instance to operate properly * * @returns `false` to notify that calculation hasn't been finished */ initCalculation(repeat: IVirtualRepeater, items: T): VirtualizationCalculation; /** * Handle special initialization if any, depends on different strategy */ onAttached(repeat: IVirtualRepeater): void; /** * Calculate the start and end index of a repeat based on its container current scroll position */ getViewRange(repeat: IVirtualRepeater, scrollerInfo: IScrollerInfo): [number, number]; /** * Returns true if first index is approaching start of the collection * Virtual repeat can use this to invoke infinite scroll next */ isNearTop(repeat: IVirtualRepeater, firstIndex: number): boolean; /** * Returns true if last index is approaching end of the collection * Virtual repeat can use this to invoke infinite scroll next */ isNearBottom(repeat: IVirtualRepeater, lastIndex: number): boolean; /** * Update repeat buffers height based on repeat.items */ updateBuffers(repeat: IVirtualRepeater, firstIndex: number): void; /** * Get the observer based on collection type of `items` */ getCollectionObserver(observerLocator: ObserverLocator, items: T): InternalCollectionObserver; /** * @override * Handle the repeat's collection instance changing. * @param repeat The repeater instance. * @param items The new array instance. * @param firstIndex The index of first active view */ instanceChanged(repeat: IVirtualRepeater, items: T, firstIndex?: number): void; /** * @override * Handle the repeat's collection instance mutating. * @param repeat The virtual repeat instance. * @param items The modified array. * @param splices Records of array changes. */ instanceMutated(repeat: IVirtualRepeater, items: RepeatableValue, splices: ICollectionObserverSplice[]): void; /** * Unlike normal repeat, virtualization repeat employs "padding" elements. Those elements * often are just blank block with proper height/width to adjust the height/width/scroll feeling * of virtualized repeat. * * Because of this, either mutation or change of the collection of repeat will potentially require * readjustment (or measurement) of those blank block, based on scroll position * * This is 2 phases scroll handle */ remeasure(repeat: IVirtualRepeater): void; /** * Update all visible views of a repeater, starting from given `startIndex` */ updateAllViews(repeat: IVirtualRepeater, startIndex: number): void; } /** * Templating strategy to handle virtual repeat views * Typically related to moving views, creating buffer and locating view range range in the DOM */ export interface ITemplateStrategy { /** * Determine the scroll container of a [virtual-repeat] based on its anchor (`element` is a comment node) */ getScrollContainer(element: Element): HTMLElement; /** * Move root element of a view to first position in the list, after top buffer * Note: [virtual-repeat] only supports single root node repeat */ moveViewFirst(view: View, topBuffer: Element): void; /** * Move root element of a view to last position in the list, before bottomBuffer * Note: [virtual-repeat] only supports single root node repeat */ moveViewLast(view: View, bottomBuffer: Element): void; /** * Create top and bottom buffer elements for an anchor (`element` is a comment node) */ createBuffers(element: Element): [HTMLElement, HTMLElement]; /** * Clean up buffers of a [virtual-repeat] */ removeBuffers(element: Element, topBuffer: Element, bottomBuffer: Element): void; /** * Get the first element(or view) between top buffer and bottom buffer * Note: [virtual-repeat] only supports single root node repeat */ getFirstElement(topBufer: Element, botBuffer: Element): Element; /** * Get the last element(or view) between top buffer and bottom buffer * Note: [virtual-repeat] only supports single root node repeat */ getLastElement(topBuffer: Element, bottomBuffer: Element): Element; } /** * Override `bindingContext` and `overrideContext` on `View` interface */ export type IView = View & Scope; /** * Expose property `children` to help manipulation/calculation */ export type IViewSlot = ViewSlot & { children: IView[] }; /** * Ability to have strong typings on bindingContext for OverrideContext */ export interface IOverrideContext<T> extends OverrideContext { bindingContext: T; } /** * Object with information about current state of a scrollable element * Capturing: * - current scroll height * - current scroll top * - real height */ export interface IScrollerInfo { scroller: HTMLElement; // scrollHeight: number; scrollTop: number; height: number; } export const enum VirtualizationCalculation { none = 0b0_00000, reset = 0b0_00001, has_sizing = 0b0_00010, observe_scroller = 0b0_00100, } export interface IElement { au: { controller: Controller; [key: string]: any; }; } /** * List of events that can be used to notify virtual repeat that size has changed */ export const VirtualizationEvents = Object.assign(Object.create(null), { scrollerSizeChange: 'virtual-repeat-scroller-size-changed' as 'virtual-repeat-scroller-size-changed', itemSizeChange: 'virtual-repeat-item-size-changed' as 'virtual-repeat-item-size-changed', }) as { scrollerSizeChange: 'virtual-repeat-scroller-size-changed'; itemSizeChange: 'virtual-repeat-item-size-changed'; }; export const enum ScrollingState { none = 0, isScrollingDown = 0b0_00001, isScrollingUp = 0b0_00010, isNearTop = 0b0_00100, isNearBottom = 0b0_01000, /**@internal */ isScrollingDownAndNearBottom = isScrollingDown | isNearBottom, /**@internal */ isScrollingUpAndNearTop = isScrollingUp | isNearTop, } // export const enum IVirtualRepeatState { // isAtTop = 0b0_000000_000, // isLastIndex = 0b0_000000_000, // scrollingDown = 0b0_000000_000, // scrollingUp = 0b0_000000_000, // switchedDirection = 0b0_000000_000, // isAttached = 0b0_000000_000, // ticking = 0b0_000000_000, // fixedHeightContainer = 0b0_000000_000, // hasCalculatedSizes = 0b0_000000_000, // calledGetMore = 0b0_000000_000, // skipNextScrollHandle = 0b0_000000_000, // handlingMutations = 0b0_000000_000, // isScrolling = 0b0_000000_000 // }
the_stack
import DiscoveryEntry from "../../../../main/js/generated/joynr/types/DiscoveryEntry"; import CapabilitiesRegistrar from "../../../../main/js/joynr/capabilities/CapabilitiesRegistrar"; import ProviderQos from "../../../../main/js/generated/joynr/types/ProviderQos"; import * as ProviderAttribute from "../../../../main/js/joynr/provider/ProviderAttribute"; import ProviderScope from "../../../../main/js/generated/joynr/types/ProviderScope"; import nanoid from "nanoid"; describe("libjoynr-js.joynr.capabilities.CapabilitiesRegistrar", () => { let capabilitiesRegistrar: CapabilitiesRegistrar; let requestReplyManagerSpy: any; let publicationManagerSpy: any; let participantId: string; let domain: string; let participantIdStorageSpy: any; let discoveryStubSpy: any; let messageRouterSpy: any; let libjoynrMessagingAddress: any; let provider: any; let providerQos: ProviderQos; const gbids = ["joynrdefaultgbid"]; class TestProvider { public static MAJOR_VERSION = 47; public static MINOR_VERSION = 11; public id = nanoid(); public interfaceName = "myInterfaceName"; public checkImplementation = jest.fn().mockReturnValue([]); } beforeEach(done => { publicationManagerSpy = { addPublicationProvider: jest.fn(), removePublicationProvider: jest.fn(), registerOnChangedProvider: jest.fn() }; provider = new TestProvider(); providerQos = new ProviderQos({ customParameters: [], priority: Date.now(), scope: ProviderScope.GLOBAL, supportsOnChangeSubscriptions: true }); provider.myAttribute = new ProviderAttribute.ProviderReadWriteNotifyAttribute( provider, { dependencies: { publicationManager: publicationManagerSpy } }, "myAttribute", "Boolean" ); domain = "testdomain"; participantId = "myParticipantId"; participantIdStorageSpy = { getParticipantId: jest.fn(), setParticipantId: jest.fn() }; participantIdStorageSpy.getParticipantId.mockReturnValue(participantId); requestReplyManagerSpy = { addRequestCaller: jest.fn(), removeRequestCaller: jest.fn() }; discoveryStubSpy = { add: jest.fn(), addToAll: jest.fn(), remove: jest.fn() }; discoveryStubSpy.add.mockReturnValue(Promise.resolve()); discoveryStubSpy.remove.mockReturnValue(Promise.resolve()); discoveryStubSpy.addToAll.mockReturnValue(Promise.resolve()); messageRouterSpy = { addNextHop: jest.fn(), removeNextHop: jest.fn() }; messageRouterSpy.addNextHop.mockReturnValue(Promise.resolve()); libjoynrMessagingAddress = { someKey: "someValue", toBe: "a", object: {} }; messageRouterSpy.removeNextHop.mockReturnValue(Promise.resolve()); capabilitiesRegistrar = new CapabilitiesRegistrar({ discoveryStub: discoveryStubSpy, messageRouter: messageRouterSpy, participantIdStorage: participantIdStorageSpy, libjoynrMessagingAddress, requestReplyManager: requestReplyManagerSpy, publicationManager: publicationManagerSpy }); done(); }); it("is instantiable", () => { expect(capabilitiesRegistrar).toBeDefined(); }); it("has all members", () => { expect(capabilitiesRegistrar.registerProvider).toBeDefined(); expect(typeof capabilitiesRegistrar.registerProvider === "function").toBeTruthy(); expect(typeof capabilitiesRegistrar.register === "function").toBeTruthy(); }); it("checks the provider's implementation", async () => { await capabilitiesRegistrar.registerProvider(domain, provider, providerQos); expect(provider.checkImplementation).toHaveBeenCalled(); }); it("supports configuring defaultDelayMs", async () => { const overwrittenDelay = 100000; jest.useFakeTimers(); const baseTime = Date.now(); jest.spyOn(Date, "now").mockImplementationOnce(() => { return baseTime; }); CapabilitiesRegistrar.setDefaultExpiryIntervalMs(overwrittenDelay); await capabilitiesRegistrar.registerProvider(domain, provider, providerQos).catch((error: any) => { jest.useRealTimers(); throw error; }); expect(discoveryStubSpy.add).toHaveBeenCalled(); const actualDiscoveryEntry = discoveryStubSpy.add.mock.calls[0][0]; expect(actualDiscoveryEntry.expiryDateMs).toEqual(baseTime + overwrittenDelay); jest.useRealTimers(); }); it("checks the provider's implementation, and rejects if incomplete", async () => { provider.checkImplementation = function() { return ["Operation:addFavoriteStation"]; }; const e = await reversePromise(capabilitiesRegistrar.registerProvider(domain, provider, providerQos)); expect(e).toEqual( new Error( `provider: ${domain}/${provider.interfaceName}.v${ provider.constructor.MAJOR_VERSION } is missing: Operation:addFavoriteStation` ) ); }); it("fetches participantId from the participantIdStorage", async () => { await capabilitiesRegistrar.registerProvider(domain, provider, providerQos); expect(participantIdStorageSpy.getParticipantId).toHaveBeenCalled(); expect(participantIdStorageSpy.getParticipantId).toHaveBeenCalledWith(domain, provider); }); it("registers next hop with routing table", async () => { await capabilitiesRegistrar.registerProvider(domain, provider, providerQos); const isGloballyVisible = providerQos.scope === ProviderScope.GLOBAL; expect(messageRouterSpy.addNextHop).toHaveBeenCalled(); expect(messageRouterSpy.addNextHop).toHaveBeenCalledWith( participantId, libjoynrMessagingAddress, isGloballyVisible ); }); it("registers provider at RequestReplyManager", async () => { await capabilitiesRegistrar.registerProvider(domain, provider, providerQos); expect(requestReplyManagerSpy.addRequestCaller).toHaveBeenCalled(); expect(requestReplyManagerSpy.addRequestCaller).toHaveBeenCalledWith(participantId, provider); }); it("handles calls to function register", () => { capabilitiesRegistrar .register({ domain, provider, providerQos }) .then(() => { return null; }) .catch(() => { return null; }); expect(requestReplyManagerSpy.addRequestCaller).toHaveBeenCalled(); expect(requestReplyManagerSpy.addRequestCaller).toHaveBeenCalledWith(participantId, provider); }); it("uses passed-in participantId", async () => { const myParticipantId = "myFixedParticipantId"; await capabilitiesRegistrar.register({ domain, provider, providerQos, participantId: myParticipantId }); expect(participantIdStorageSpy.setParticipantId).toHaveBeenCalledWith(domain, provider, myParticipantId); expect(requestReplyManagerSpy.addRequestCaller).toHaveBeenCalled(); expect(requestReplyManagerSpy.addRequestCaller).toHaveBeenCalledWith(myParticipantId, provider); }); it("registers a provider with PublicationManager if it has an attribute", async () => { await capabilitiesRegistrar.registerProvider(domain, provider, providerQos); expect(publicationManagerSpy.addPublicationProvider).toHaveBeenCalled(); expect(publicationManagerSpy.addPublicationProvider).toHaveBeenCalledWith(participantId, provider); }); it("register calls discoveryStub with gbids", async () => { await capabilitiesRegistrar.register({ domain, provider, providerQos, gbids }); expect(discoveryStubSpy.add).toHaveBeenCalledWith(expect.any(Object), expect.any(Boolean), gbids); }); it("register calls discoveryStub with empty array if gbids aren't provided", async () => { const myParticipantId = "myFixedParticipantId"; const myDomain = "myDomain"; await capabilitiesRegistrar.register({ domain: myDomain, provider, providerQos, participantId: myParticipantId }); expect(discoveryStubSpy.add).toHaveBeenCalledWith(expect.any(Object), expect.any(Boolean), []); }); it("registers capability at capabilities stub", async () => { const lowerBound = Date.now(); await capabilitiesRegistrar.registerProvider(domain, provider, providerQos); const upperBound = Date.now(); expect(discoveryStubSpy.add).toHaveBeenCalled(); const actualDiscoveryEntry = discoveryStubSpy.add.mock.calls[0][0]; expect(actualDiscoveryEntry.domain).toEqual(domain); expect(actualDiscoveryEntry.interfaceName).toEqual(provider.interfaceName); expect(actualDiscoveryEntry.participantId).toEqual(participantId); expect(actualDiscoveryEntry.qos).toEqual(providerQos); expect(actualDiscoveryEntry.lastSeenDateMs).not.toBeLessThan(lowerBound); expect(actualDiscoveryEntry.lastSeenDateMs).not.toBeGreaterThan(upperBound); expect(actualDiscoveryEntry.providerVersion.majorVersion).toEqual(provider.constructor.MAJOR_VERSION); expect(actualDiscoveryEntry.providerVersion.minorVersion).toEqual(provider.constructor.MINOR_VERSION); }); async function testAwaitGlobalRegistrationScenario(awaitGlobalRegistration: boolean) { await capabilitiesRegistrar.register({ domain, provider, providerQos, awaitGlobalRegistration }); const actualAwaitGlobalRegistration = discoveryStubSpy.add.mock.calls[0][1]; expect(actualAwaitGlobalRegistration).toEqual(awaitGlobalRegistration); } it("calls discoveryProxy.add() with same awaitGlobalRegistration parameter true used in call to registerProvider", async () => { const awaitGlobalRegistration = true; await testAwaitGlobalRegistrationScenario(awaitGlobalRegistration); }); it("calls discoveryProxy.add() with same awaitGlobalRegistration parameter false used in call to registerProvider", async () => { const awaitGlobalRegistration = false; await testAwaitGlobalRegistrationScenario(awaitGlobalRegistration); }); it("calls discoveryProxy.add() with awaitGlobalRegistration parameter false on default call of registerProvider", async () => { await capabilitiesRegistrar.registerProvider(domain, provider, providerQos); const expectedAwaitGlobalRegistration = false; const actualAwaitGlobalRegistration = discoveryStubSpy.add.mock.calls[0][1]; expect(actualAwaitGlobalRegistration).toEqual(expectedAwaitGlobalRegistration); }); it("returns the provider participant ID", async () => { const result = await capabilitiesRegistrar.registerProvider(domain, provider, providerQos); expect(result).toEqual(participantId); }); it("returns the promise onRejected from capabilites stub", async () => { discoveryStubSpy.add.mockReturnValue(Promise.reject(new Error("Some error."))); const error = await reversePromise(capabilitiesRegistrar.registerProvider(domain, provider, providerQos)); expect(Object.prototype.toString.call(error) === "[object Error]").toBeTruthy(); }); function reversePromise(promise: Promise<any>) { return promise.then(suc => Promise.reject(suc)).catch((e: any) => e); } it("rejects with an exception when called while shutting down", async () => { capabilitiesRegistrar.shutdown(); await reversePromise(capabilitiesRegistrar.registerProvider(domain, provider, providerQos)); await reversePromise(capabilitiesRegistrar.unregisterProvider(domain, provider)); }); it("deletes the next hop when discoveryStub.add fails", async () => { const error = new Error("some Error"); discoveryStubSpy.add.mockReturnValue(Promise.reject(error)); const e = await reversePromise(capabilitiesRegistrar.registerProvider(domain, provider, providerQos)); expect(e).toEqual(error); expect(messageRouterSpy.removeNextHop).toHaveBeenCalled(); }); it("removes capability at discoveryStub and removes next hop in routing table when unregistering provider", async () => { await capabilitiesRegistrar.registerProvider(domain, provider, providerQos); await capabilitiesRegistrar.unregisterProvider(domain, provider); expect(messageRouterSpy.removeNextHop).toHaveBeenCalled(); expect(discoveryStubSpy.remove).toHaveBeenCalled(); }); describe(`registerInAllKnownBackends`, () => { it(`calls discoveryStub.addToAll`, async () => { await capabilitiesRegistrar.registerInAllKnownBackends({ domain, provider, providerQos }); expect(discoveryStubSpy.addToAll).toHaveBeenCalled(); const discoveryEntry = discoveryStubSpy.addToAll.mock.calls.slice(-1)[0][0]; expect(discoveryEntry).toBeInstanceOf(DiscoveryEntry); expect(discoveryEntry.domain).toEqual(domain); expect(discoveryEntry.interface).toEqual(provider.interfacename); expect(discoveryEntry.participandId).toEqual(provider.participantId); expect(discoveryEntry.qos).toEqual(providerQos); }); }); });
the_stack
import { SortExecutor, FilterExecutor } from '../../js/core/transformExecutors'; import { DataGenerator } from '../js/testUtils'; import { ViewBasedJSONModel } from '../../js/core/viewbasedjsonmodel'; import { ReadonlyJSONValue } from '@lumino/coreutils'; import { Transform } from '../../js/core/transform'; const INVALID_DATE = new Date('INVALID'); const sortTestCases: Private.SortTestCase[] = [ // Number { desc: true, dType: 'number', data: [2, 1, 3], expected: [3, 2, 1] }, { desc: false, dType: 'number', data: [2, 1, 3], expected: [1, 2, 3] }, { desc: false, dType: 'number', data: [2, null, 3], expected: [2, 3, null] }, { desc: false, dType: 'number', data: [2, Number.NaN, 3], expected: [2, 3, Number.NaN], }, // String { desc: true, dType: 'string', data: ['A', 'C', 'B'], expected: ['C', 'B', 'A'], }, { desc: false, dType: 'string', data: ['A', 'C', 'B'], expected: ['A', 'B', 'C'], }, { desc: false, dType: 'string', data: ['A', null, 'B'], expected: ['A', 'B', null], }, // Boolean { desc: true, dType: 'boolean', data: [true, false, true], expected: [true, true, false], }, { desc: false, dType: 'boolean', data: [true, false, true], expected: [false, true, true], }, { desc: false, dType: 'boolean', data: [true, null, false], expected: [false, true, null], }, // Datetime { desc: true, dType: 'datetime', data: [ '2019-09-12T18:38:47.431Z', '2019-09-07T18:38:47.431Z', '2019-09-10T18:38:47.431Z', ], expected: [ '2019-09-12T18:38:47.431Z', '2019-09-10T18:38:47.431Z', '2019-09-07T18:38:47.431Z', ], }, { desc: false, dType: 'datetime', data: [ '2019-09-12T18:38:47.431Z', '2019-09-07T18:38:47.431Z', '2019-09-10T18:38:47.431Z', ], expected: [ '2019-09-07T18:38:47.431Z', '2019-09-10T18:38:47.431Z', '2019-09-12T18:38:47.431Z', ], }, { desc: false, dType: 'datetime', data: ['2019-09-12T18:38:47.431Z', null, '2019-09-10T18:38:47.431Z'], expected: ['2019-09-10T18:38:47.431Z', '2019-09-12T18:38:47.431Z', null], }, { desc: false, dType: 'datetime', data: [ '2019-09-12T18:38:47.431Z', INVALID_DATE, '2019-09-10T18:38:47.431Z', ], expected: [ '2019-09-10T18:38:47.431Z', '2019-09-12T18:38:47.431Z', INVALID_DATE, ], }, // Mixed types (treated as strings) { desc: false, dType: 'string', data: [ 1, Number.NaN, 'B', '2019-09-10T18:38:47.431Z', 101.22, 1.1, Number.NaN, 1.21, 1.31, Number.NaN, Number.NaN, 1.11, 1.21, 1.91, 'A', 9.76, ], expected: [ 1, 1.1, 1.11, 1.21, 1.21, 1.31, 1.91, 101.22, '2019-09-10T18:38:47.431Z', 9.76, 'A', 'B', Number.NaN, Number.NaN, Number.NaN, Number.NaN, ], }, ]; // Run tests describe('Sort Executors', () => { for (let testCase of sortTestCases) { test(`sort-${testCase.dType}-${testCase.desc ? 'desc' : 'asc'}`, () => { const testData = DataGenerator.singleCol({ name: 'test', type: testCase.dType, data: testCase.data, }); const result = Private.executeSort({ field: 'test', desc: testCase.desc, dType: testCase.dType, data: testData, }); expect(result).toEqual(testCase.expected); }); } }); const filterTestCases: Private.FilterTestCase[] = [ // Number { op: '<', value: 10, dType: 'number', data: [5, 10, 15], expected: [5] }, { op: '>', value: 10, dType: 'number', data: [5, 10, 15], expected: [15] }, { op: '<=', value: 10, dType: 'number', data: [5, 10, 15], expected: [5, 10], }, { op: '>=', value: 10, dType: 'number', data: [5, 10, 15], expected: [10, 15], }, { op: 'empty', value: null, dType: 'number', data: [5, null, 15], expected: [null], }, { op: 'notempty', value: null, dType: 'number', data: [5, null, 15], expected: [5, 15], }, { op: 'in', value: [5, 10], dType: 'number', data: [5, 10, 15], expected: [5, 10], }, { op: 'between', value: [7, 12], dType: 'number', data: [5, 10, 15], expected: [10], }, // String { op: '<', value: 'b', dType: 'string', data: ['a', 'b', 'c'], expected: ['a'], }, { op: '>', value: 'b', dType: 'string', data: ['a', 'b', 'c'], expected: ['c'], }, { op: 'empty', value: null, dType: 'string', data: ['a', null, 'c'], expected: [null], }, { op: 'notempty', value: null, dType: 'string', data: ['a', null, 'c'], expected: ['a', 'c'], }, { op: 'in', value: ['a', 'b'], dType: 'string', data: ['a', 'b', 'c'], expected: ['a', 'b'], }, { op: 'between', value: ['a', 'e'], dType: 'string', data: ['a', 'b', 'c', 'd', 'e'], expected: ['b', 'c', 'd'], }, { op: 'startswith', value: 'a', dType: 'string', data: ['ab', 'ba', 'ca'], expected: ['ab'], }, { op: 'endswith', value: 'a', dType: 'string', data: ['ab', 'ba', 'ca'], expected: ['ba', 'ca'], }, { op: 'contains', value: 'rr', dType: 'string', data: ['apple', 'berry', 'cherry'], expected: ['berry', 'cherry'], }, { op: '!contains', value: 'rr', dType: 'string', data: ['apple', 'berry', 'cherry'], expected: ['apple'], }, // Boolean { op: 'empty', value: null, dType: 'boolean', data: [true, null, false], expected: [null], }, { op: 'notempty', value: null, dType: 'boolean', data: [true, null, false], expected: [true, false], }, // Datetime { op: '<', value: '2019-09-11', dType: 'datetime', data: [ '2019-09-10T18:38:47.431Z', '2019-09-11T18:38:47.431Z', '2019-09-12T18:38:47.431Z', ], expected: ['2019-09-10T18:38:47.431Z'], }, { op: '>', value: '2019-09-11', dType: 'datetime', data: [ '2019-09-10T18:38:47.431Z', '2019-09-11T18:38:47.431Z', '2019-09-12T18:38:47.431Z', ], expected: ['2019-09-12T18:38:47.431Z'], }, { op: '<=', value: '2019-09-11', dType: 'datetime', data: [ '2019-09-10T18:38:47.431Z', '2019-09-11T18:38:47.431Z', '2019-09-12T18:38:47.431Z', ], expected: ['2019-09-10T18:38:47.431Z', '2019-09-11T18:38:47.431Z'], }, { op: '>=', value: '2019-09-11', dType: 'datetime', data: [ '2019-09-10T18:38:47.431Z', '2019-09-11T18:38:47.431Z', '2019-09-12T18:38:47.431Z', ], expected: ['2019-09-11T18:38:47.431Z', '2019-09-12T18:38:47.431Z'], }, { op: 'empty', value: null, dType: 'datetime', data: ['2019-09-10T18:38:47.431Z', null, '2019-09-12T18:38:47.431Z'], expected: [null], }, { op: 'notempty', value: null, dType: 'datetime', data: ['2019-09-10T18:38:47.431Z', null, '2019-09-12T18:38:47.431Z'], expected: ['2019-09-10T18:38:47.431Z', '2019-09-12T18:38:47.431Z'], }, { op: 'in', value: ['2019-09-10T18:38:47.431Z', '2019-09-12T18:38:47.431Z'], dType: 'datetime', data: [ '2019-09-10T18:38:47.431Z', '2019-09-11T18:38:47.431Z', '2019-09-12T18:38:47.431Z', ], expected: ['2019-09-10T18:38:47.431Z', '2019-09-12T18:38:47.431Z'], }, { op: 'between', value: ['2019-09-02T18:38:47.431Z', '2019-09-04T18:38:47.431Z'], dType: 'datetime', data: [ '2019-09-01T18:38:47.431Z', '2019-09-03T18:38:47.431Z', '2019-09-06T18:38:47.431Z', ], expected: ['2019-09-03T18:38:47.431Z'], }, { op: '=', value: '2019-09-10T18:38:47.431Z', dType: 'datetime', data: [ '2019-09-10T11:38:47.431Z', '2019-09-10T18:38:47.431Z', '2019-09-10T12:38:47.431Z', ], expected: ['2019-09-10T18:38:47.431Z'], }, { op: 'isOnSameDay', value: '2019-09-10', dType: 'datetime', data: [ '2019-09-10T11:38:47.431Z', '2019-09-10T18:38:47.431Z', '2019-09-12T12:38:47.431Z', ], expected: ['2019-09-10T11:38:47.431Z', '2019-09-10T18:38:47.431Z'], }, ]; // Run tests describe('Filter Executors', () => { for (let testCase of filterTestCases) { test(`filter-${testCase.dType}-${testCase.op}`, () => { const testData = DataGenerator.singleCol({ name: 'test', type: testCase.dType, data: testCase.data, }); const result = Private.executeFilter({ field: 'test', dType: testCase.dType, data: testData, operator: testCase.op, value: testCase.value, }); expect(result).toEqual(testCase.expected); }); } }); /** * The namespace for the module implementation details. */ namespace Private { /** * Returns an array containing the data from a single column of the provided * table. * * @param options - Options for calling this function. */ export function getDataByField( options: IGetDataByFieldOptions, ): ReadonlyJSONValue[] { return options.data.data.map((row) => row[options.field]); } /** * Creates a `SortExecutor` and executes it with the provided options. * * @param options - Options for calling this function. */ export function executeSort( options: IExecuteSortOptions, ): ReadonlyJSONValue[] { const testObject = new SortExecutor({ field: options.field, dType: options.dType, desc: options.desc, }); const result = testObject.apply(options.data); return Private.getDataByField({ data: result, field: options.field }); } /** * Creates a `FilterExecutor` and executes it with the provided options. * * @param options - Options for calling this function. */ export function executeFilter( options: IExecuteFilterOptions, ): ReadonlyJSONValue[] { const testObject = new FilterExecutor({ field: options.field, dType: options.dType, operator: options.operator, value: options.value, }); const result = testObject.apply(options.data); return Private.getDataByField({ data: result, field: options.field }); } /** * An options object for returning an array of data from a table. */ export interface IGetDataByFieldOptions { /** * The table to operate on. */ data: ViewBasedJSONModel.IData; /** * The name of the field to operate on. */ field: string; } /** * An options object for executing a sort operation. */ export interface IExecuteSortOptions { /** * The table to operate on. */ data: ViewBasedJSONModel.IData; /** * The name of the field to operate on. */ field: string; /** * Indicates if the sort should be in descending or ascending order */ desc: boolean; /** * The dtype of the column being operated on. */ dType: string; } /** * An options object for executing a filter operation. */ export interface IExecuteFilterOptions { /** * The table to operate on. */ data: ViewBasedJSONModel.IData; /** * The name of the field to operate on. */ field: string; /** * The operator to use for this transform. */ operator: Transform.FilterOperator; /** * The dtype of the column being operated on. */ dType: string; /** * The value to filter by. */ value: any; } /** * An interface that defines the data needed for a filter test case. */ export interface FilterTestCase { /** * The operator to use for this transform. */ op: Transform.FilterOperator; /** * The value to filter by. */ value: any; /** * The dtype of the column being operated on. */ dType: string; /** * The data used to create the table for testing. */ data: any[]; /** * The expected data to be returned after the test. */ expected: any[]; } /** * An interface that defines the data needed for a sort test case. */ export interface SortTestCase { /** * Indicates if the sort should be in descending or ascending order */ desc: boolean; /** * The dtype of the column being operated on. */ dType: string; /** * The data used to create the table for testing. */ data: any[]; /** * The expected data to be returned after the test. */ expected: any[]; } }
the_stack
import {inject,transient,Container} from 'aurelia-dependency-injection'; import {Config} from 'aurelia-api'; import {metadata} from 'aurelia-metadata'; import {Validator,ValidationRules} from 'aurelia-validation'; import {getLogger} from 'aurelia-logging'; import {Config as ViewManagerConfig,resolvedView} from 'aurelia-view-manager'; import {bindingMode,BindingEngine} from 'aurelia-binding'; import {bindable,customElement} from 'aurelia-templating'; /** * The Repository basis class */ export declare class Repository { transport: any; /** * Construct. * * @param {Config} clientConfig * * @constructor */ constructor(clientConfig?: any); /** * Get the transport for the resource this repository represents. * * @return {Rest} */ getTransport(): any; /** * Set the associated entity's meta data * * @param {{}} meta */ setMeta(meta?: any): any; /** * Get the associated entity's meta data. * @return {{}} */ getMeta(): any; /** * Set the identifier * * @param {string} identifier * @return {Repository} this * @chainable */ setIdentifier(identifier?: any): any; /** * Get the identifier * * @return {string|null} */ getIdentifier(): any; /** * Set the resource * * @param {string} resource * @return {Repository} this * @chainable */ setResource(resource?: any): any; /** * Get the resource * * @return {string|null} */ getResource(): any; /** * Perform a find query and populate entities with the retrieved data. * * @param {{}|number|string} criteria Criteria to add to the query. A plain string or number will be used as relative path. * @param {boolean} [raw] Set to true to get a POJO in stead of populated entities. * * @return {Promise<Entity|[Entity]>} */ find(criteria?: any, raw?: any): any; /** * Perform a find query and populate entities with the retrieved data, limited to one result. * * @param {{}|number|string} criteria Criteria to add to the query. A plain string or number will be used as relative path. * @param {boolean} [raw] Set to true to get a POJO in stead of populated entities. * * @return {Promise<Entity|[Entity]>} */ findOne(criteria?: any, raw?: any): any; /** * Perform a find query for `path` and populate entities with the retrieved data. * * @param {string} path * @param {{}|number|string} criteria Criteria to add to the query. A plain string or number will be used as relative path. * @param {boolean} [raw] Set to true to get a POJO in stead of populated entities. * @param {boolean} [single] Whether or not this is a findOne. * * @return {Promise<Entity|[Entity]>} */ findPath(path?: any, criteria?: any, raw?: any, single?: any): any; /** * Perform a count on the resource. * * @param {null|{}} criteria * * @return {Promise<number>} */ count(criteria?: any): any; /** * Get new populated entity or entities based on supplied data including associations * * @param {{}|[{}]} data|[data] The data to populate with * @param {boolean} [clean] Mark the entities as clean or not * * @return {Entity|[Entity]} */ populateEntities(data?: any, clean?: any): any; /** * Populate a (new) entity including associations * * @param {{}} data The data to populate with * @param {Entity} [entity] optional. if not set, a new entity is returned * @param {boolean} [clean] Mark the entities as clean or not * * @return {Entity} */ getPopulatedEntity(data?: any, entity?: any, clean?: any): any; /** * Get a new instance for entityReference. * * @return {Entity} */ getNewEntity(): any; /** * Populate a new entity with the empty associations set. * * @return {Entity} */ getNewPopulatedEntity(): any; } /** * The DefaultRepository class * @transient */ export declare class DefaultRepository extends Repository { } export declare class OrmMetadata { static forTarget(target?: any): any; } /** * The MetaData class for Entity and Repository * */ export declare class Metadata { // The key used to identify this specific metadata static key: any; /** * Construct metadata with sensible defaults (so we can make assumptions in the code). */ constructor(); /** * Add a value to an array. * * @param {string} key * @param {*} value * * @return {Metadata} itself * @chainable */ addTo(key?: any, value?: any): any; /** * Set a value for key, or one level deeper (key.key). * * @param {string} key * @param {string|*} valueOrNestedKey * @param {null|*} [valueOrNull] * * @return {Metadata} itself * @chainable */ put(key?: any, valueOrNestedKey?: any, valueOrNull?: any): any; /** * Check if key, or key.nested exists. * * @param {string} key * @param {string} [nested] * * @return {boolean} */ has(key?: any, nested?: any): any; /** * Fetch key or key.nested from metadata. * * @param {string} key * @param {string} [nested] * * @return {*} */ fetch(key?: any, nested?: any): any; } /* eslint-disable max-lines */ /** * The Entity basis class * @transient */ export declare class Entity { /** * Construct a new entity. * * @param {Validator} validator */ constructor(); /** * Get the transport for the resource this repository represents. * * @return {Rest} */ getTransport(): any; /** * Get reference to the repository. * * @return {Repository} */ getRepository(): any; /** * Set reference to the repository. * * @param {Repository} repository * * @return {Entity} this * @chainable */ setRepository(repository?: any): any; /** * Define a non-enumerable property on the entity. * * @param {string} property * @param {*} value * @param {boolean} [writable] * @chainable * * @return {Entity} this * @chainable */ define(property?: any, value?: any, writable?: any): any; /** * Get the metadata for this entity. * * @return {Metadata} */ getMeta(): any; /** * Get the id property name for this entity. * * @return {string} */ getIdProperty(): any; /** * Get the id property name of the entity (static). * * @return {string} */ static getIdProperty(): any; /** * Get the Id value for this entity. * * @return {number|string} */ getId(): any; /** * Set the Id value for this entity. * * @param {number|string} id * * @return {Entity} this * @chainable */ setId(id?: any): any; /** * Persist the entity's state to the server. * Either creates a new record (POST) or updates an existing one (PUT) based on the entity's state, * * @return {Promise} */ save(): any; /** * Persist the changes made to this entity to the server. * * @see .save() * * @return {Promise} * * @throws {Error} */ update(): any; /** * Add an entity to a collection (persist). * * When given entity has data, create the entity and set up the relation. * * @param {Entity|number} entity Entity or id * @param {string} [property] The name of the property * * @return {Promise} */ addCollectionAssociation(entity?: any, property?: any): any; /** * Remove an entity from a collection. * * @param {Entity|number} entity Entity or id * @param {string} [property] The name of the property * * @return {Promise} */ removeCollectionAssociation(entity?: any, property?: any): any; /** * Persist the collections on the entity. * * @return {Promise} itself */ saveCollections(): any; /** * Mark this entity as clean, in its current state. * * @return {Entity} itself * @chainable */ markClean(): any; /** * Return if the entity is clean. * * @return {boolean} */ isClean(): any; /** * Return if the entity is dirty. * * @return {boolean} */ isDirty(): any; /** * Return if the entity is new (ergo, hasn't been persisted to the server). * * @return {boolean} */ isNew(): any; /** * Resets the entity to the clean state * * @param {boolean} [shallow] * * @return {Entity} itself */ reset(shallow?: any): any; /** * Sets the entity's properties to their clean values * * @return {Entity} itself * @chainable */ clear(): any; /** * Get the identifier name of this entity's reference (static). * * @return {string|null} */ static getIdentifier(): any; /** * Get the identifier name of this entity instance * * @return {string|null} */ getIdentifier(): any; /** * Set this instance's identifier. * * @param {string} identifier * * @return {Entity} itself * @chainable */ setIdentifier(identifier?: any): any; /** * Get the resource name of this entity's reference (static). * * @return {string|null} */ static getResource(): any; /** * Get the resource name of this entity instance * * @return {string|null} */ getResource(): any; /** * Set this instance's resource. * * @param {string} resource * * @return {Entity} itself * @chainable */ setResource(resource?: any): any; /** * Destroy this entity (DELETE request to the server). * * @return {Promise} */ destroy(): any; /** * Get the name of the entity. This is useful for labels in texts. * * @return {string} */ getName(): any; /** * Get the name of the entity (static). This is useful for labels in texts. * * @return {string} */ static getName(): any; /** * Set data on this entity. * * @param {{}} data * @param {boolean} markClean * @return {Entity} itself * @chainable */ setData(data?: any, markClean?: any): any; /** * Set the validator instance. * * @param {Validator} validator * @return {Entity} itself * @chainable */ setValidator(validator?: any): any; /** * Get the validator instance. * * @return {Validator} */ getValidator(): any; /** * Check if entity has validation enabled. * * @return {boolean} */ hasValidation(): any; /** * Validates the entity * * @param {string|null} propertyName Optional. The name of the property to validate. If unspecified, * all properties will be validated. * @param {Rule<*, *>[]|null} rules Optional. If unspecified, the rules will be looked up using * the metadata for the object created by ValidationRules....on(class/object) * @return {Promise<ValidateResult[]>} */ validate(propertyName?: any, rules?: any): any; /** * Get the data in this entity as a POJO. * * @param {boolean} [shallow] * * @return {{}} */ asObject(shallow?: any): any; /** * Get the data in this entity as a json string. * * @param {boolean} [shallow] * * @return {string} */ asJson(shallow?: any): any; } /** * Set the id property for en entity * * @export * @param {string} propertyName * @returns {function} * * @decorator */ export declare function idProperty(propertyName?: any): any; /** * Set the 'identifierName' metadata on the entity * * @param {string} identifierName The name of the identifier * * @return {function} * * @decorator */ export declare function identifier(identifierName?: any): any; /** * Set the 'name' metadata on the entity * * @param {string} entityName=target.name.toLowerCase The (custom) name to use * * @return {function} * * @decorator */ export declare function name(entityName?: any): any; /** * Set the repositoryReference metadata on the entity * * @param {string} repositoryReference The repository reference to use * * @return {function} * * @decorator */ export declare function repository(repositoryReference?: any): any; /** * Set the 'resourceName' metadata on the entity * * @param {string} resourceName The name of the resource * * @return {function} * * @decorator */ export declare function resource(resourceName?: any): any; /** * Set the 'validation' metadata to 'true' * * @param {[function]} ValidatorClass = Validator * * @return {function} * * @decorator */ export declare function validation(ValidatorClass?: any): any; /** * The EntityManager class */ export declare class EntityManager { repositories: any; entities: any; /** * Construct a new EntityManager. * * @param {Container} container */ constructor(container?: any); /** * Register an array of entity classes. * * @param {function[]|function} EntityClasses Array or object of Entity constructors. * * @return {EntityManager} itself * @chainable */ registerEntities(EntityClasses?: any): any; /** * Register an Entity class. * * @param {function} EntityClass * * @return {EntityManager} itself * @chainable */ registerEntity(EntityClass?: any): any; /** * Get a repository instance. * * @param {Entity|string} entity * * @return {Repository} * @throws {Error} */ getRepository(entity?: any): any; /** * Resolve given resource value to an entityReference * * @param {Entity|string} resource * * @return {Entity} * @throws {Error} */ resolveEntityReference(resource?: any): any; /** * Get an instance for `entity` * * @param {string|Entity} entity * * @return {Entity} */ getEntity(entity?: any): any; } /** * Set the 'resource' metadata and enables validation on the entity * * @param {string} resourceName The name of the resource * @param {[function]} ValidatorClass = Validator * * @return {function} * * @decorator */ export declare function validatedResource(resourceName?: any, ValidatorClass?: any): any; // eslint-disable-line no-unused-vars // eslint-disable-line no-unused-vars /** * Plugin configure * * @export * @param {*} frameworkConfig * @param {*} configCallback */ export declare function configure(frameworkConfig?: any, configCallback?: any): any; export declare const logger: any; /** * Set generic 'data' metadata. * * @param {{}} metaData The data to set * * @returns {function} * * @decorator */ export declare function data(metaData?: any): any; /** * Set the 'endpoint' metadta of an entity. Needs a set resource * * @param {string} entityEndpoint The endpoint name to use * * @return {function} * * @decorator */ export declare function endpoint(entityEndpoint?: any): any; // fix for babels property decorator export declare function ensurePropertyIsConfigurable(target?: any, propertyName?: any, descriptor?: any): any; /** * Associate a property with an entity (toOne) or a collection (toMany) * * @param {undefined|string|{}} associationData undefined={entity:propertyName}, string={entity:string}, Object={entity: string, collection: string} * * @return {function} * * @decorator */ export declare function association(associationData?: any): any; /** * Registers the 'enumerations' for an attribute's values * * @param {*[]} values - a list of valid values for the entity's attribute * * @return {Function} * * @decorator */ export declare function enumeration(values?: any): any; /** * Set the 'types' metadata on the entity * * @param {string} typeValue The type(text,string,date,datetime,integer,int,number,float,boolean,bool,smart,autodetect (based on value)) to use for this property using typer * * @return {function} * * @decorator */ export declare function type(typeValue?: any): any; export declare class AssociationSelect { criteria: any; name: any; repository: any; identifier: any; property: any; resource: any; options: any; association: any; manyAssociation: any; value: any; error: any; multiple: any; hidePlaceholder: any; selectablePlaceholder: any; placeholderValue: any; disabled: any; placeholderText: any; ownMeta: any; /** * Create a new select element. * * @param {BindingEngine} bindingEngine * @param {EntityManager} entityManager */ constructor(bindingEngine?: any, entityManager?: any); /** * (Re)Load the data for the select. * * @param {string|Array|{}} [reservedValue] * * @return {Promise} */ load(reservedValue?: any): any; /** * Set the value for the select. * * @param {string|Array|{}} value */ setValue(value?: any): any; /** * Get criteria, or default to empty object. * * @return {{}} */ getCriteria(): any; /** * Build the find that's going to fetch the option values for the select. * This method works well with associations, and reloads when they change. * * @return {Promise} */ buildFind(): any; /** * Check if all associations have values set. * * @return {boolean} */ verifyAssociationValues(): any; /** * Add a watcher to the list. Whenever given association changes, the select will reload its contents. * * @param {Entity|Array} association Entity or array of Entity instances. * * @return {AssociationSelect} */ observe(association?: any): any; /** * Check if the element property has changed * * @param {string} property * @param {string|{}} newVal * @param {string|{}} oldVal * * @return {boolean} */ isChanged(property?: any, newVal?: any, oldVal?: any): any; /** * Changed resource handler * * @param {string} resource */ resourceChanged(resource?: any): any; /** * Changed criteria handler * * @param {{}} newVal * @param {{}} oldVal */ criteriaChanged(newVal?: any, oldVal?: any): any; bind(): any; /** * Find the name of the property in meta, reversed by resource. * * @param {Metadata} meta * @param {string} resource * * @return {string} */ propertyForResource(meta?: any, resource?: any): any; /** * Dispose all subscriptions on unbind. */ unbind(): any; } export declare class Paged { data: any; loading: any; page: any; error: any; criteria: any; repository: any; resource: any; limit: any; constructor(entityManager?: any); /** * Attach to view */ attached(): any; /** * Reload data */ reloadData(): any; /** * Check if the element property has changed * * @param {string} property New element property * @param {string|{}} newVal New value * @param {string|{}} oldVal Old value * * @return {boolean} */ isChanged(property?: any, newVal?: any, oldVal?: any): any; /** * Changed page handler * * @param {integer} newVal New page value * @param {integer} oldVal Old page value */ pageChanged(newVal?: any, oldVal?: any): any; /** * Changed resource handler * * @param {{}} newVal New resource value * @param {{}} oldVal Old resource value */ resourceChanged(newVal?: any, oldVal?: any): any; /** * Changed criteria handler * * @param {{}} newVal New criteria value * @param {{}} oldVal Old criteria value */ criteriaChanged(newVal?: any, oldVal?: any): any; /** * Changed resource handler * * @param {string} resource New resource value */ resourceChanged(resource?: any): any; /** * Get data from repository */ getData(): any; }
the_stack
import { expect, test } from '@jest/globals'; import { getBSONSerializer, getBSONSizer, getValueSize, hexToByte, uuidStringToByte } from '../src/bson-serializer'; import { BinaryBigInt, createReference, Excluded, MongoId, nodeBufferToArrayBuffer, PrimaryKey, Reference, SignedBinaryBigInt, typeOf, uuid, UUID } from '@deepkit/type'; import bson from 'bson'; import { randomBytes } from 'crypto'; import { BSON_BINARY_SUBTYPE_DEFAULT, BSONType } from '../src/utils'; import { deserializeBSONWithoutOptimiser } from '../src/bson-parser'; import { deserializeBSON } from '../src/bson-deserializer'; const { Binary, calculateObjectSize, deserialize, Long, ObjectId: OfficialObjectId, serialize } = bson; test('hexToByte', () => { expect(hexToByte('00')).toBe(0); expect(hexToByte('01')).toBe(1); expect(hexToByte('0f')).toBe(15); expect(hexToByte('10')).toBe(16); expect(hexToByte('ff')).toBe(255); expect(hexToByte('f0')).toBe(240); expect(hexToByte('50')).toBe(80); expect(hexToByte('7f')).toBe(127); expect(hexToByte('f00f', 1)).toBe(15); expect(hexToByte('f0ff', 1)).toBe(255); expect(hexToByte('f00001', 2)).toBe(1); expect(hexToByte('f8')).toBe(16 * 15 + 8); expect(hexToByte('41')).toBe(16 * 4 + 1); expect(uuidStringToByte('bef8de96-41fe-442f-b70c-c3a150f8c96c', 1)).toBe(16 * 15 + 8); expect(uuidStringToByte('bef8de96-41fe-442f-b70c-c3a150f8c96c', 4)).toBe(16 * 4 + 1); expect(uuidStringToByte('bef8de96-41fe-442f-b70c-c3a150f8c96c', 6)).toBe(16 * 4 + 4); expect(uuidStringToByte('bef8de96-41fe-442f-b70c-c3a150f8c96c', 7)).toBe(16 * 2 + 15); expect(uuidStringToByte('bef8de96-41fe-442f-b70c-c3a150f8c96c', 8)).toBe(16 * 11 + 7); expect(uuidStringToByte('bef8de96-41fe-442f-b70c-c3a150f8c96c', 10)).toBe(16 * 12 + 3); expect(uuidStringToByte('bef8de96-41fe-442f-b70c-c3a150f8c96c', 11)).toBe(16 * 10 + 1); expect(uuidStringToByte('bef8de96-41fe-442f-b70c-c3a150f8c96c', 15)).toBe(16 * 6 + 12); }); test('basic string', () => { const object = { name: 'Peter' }; const expectedSize = 4 //size uint32 + 1 // type (string) + 'name\0'.length + ( 4 //string size uint32 + 'Peter'.length + 1 //string content + null ) + 1 //object null ; expect(calculateObjectSize(object)).toBe(expectedSize); const schema = typeOf<{ name: string, }>(); expect(getBSONSizer(undefined, schema)(object)).toBe(expectedSize); expect(getBSONSerializer(undefined, schema)(object).byteLength).toBe(expectedSize); expect(getBSONSerializer(undefined, schema)(object)).toEqual(serialize(object)); }); test('basic number int', () => { const object = { position: 24 }; const expectedSize = 4 //size uint32 + 1 // type (number) + 'position\0'.length + ( 4 //int uint32 ) + 1 //object null ; expect(calculateObjectSize(object)).toBe(expectedSize); const schema = typeOf<{ position: number, }>(); expect(getBSONSizer(undefined, schema)(object)).toBe(expectedSize); expect(getBSONSerializer(undefined, schema)(object).byteLength).toBe(expectedSize); expect(getBSONSerializer(undefined, schema)(object)).toEqual(serialize(object)); }); test('basic long', () => { const object = { position: 3364367088039355000n }; //23 const expectedSize = 4 //size uint32 + 1 // type (number) + 'position\0'.length + ( 4 //uint32 low bits + 4 //uint32 high bits ) + 1 //object null ; const schema = typeOf<{ position: number, }>(); expect(getBSONSizer(undefined, schema)(object)).toBe(expectedSize); expect(getBSONSerializer(undefined, schema)(object).byteLength).toBe(expectedSize); // expect(getBSONSerializer(undefined, schema)(object)).toEqual(serialize(object)); //mongo doesnt support bigint const serializer = getBSONSerializer(undefined, schema); // const deserializer = getBSONDecoder(schema); expect(getBSONSizer(undefined, schema)(object)).toBe(expectedSize); expect(serializer(object).byteLength).toBe(expectedSize); // const reParsed = getBSONDecoder<any>(schema)(serializer(object)); // expect(reParsed.position).toBe(3364367088039355000n); expect(serializer({ position: 123456n })).toEqual(serialize({ position: Long.fromNumber(123456) })); expect(serializer({ position: -123456n })).toEqual(serialize({ position: Long.fromNumber(-123456) })); expect(serializer({ position: 3364367088039355000n })).toEqual(serialize({ position: Long.fromBigInt(3364367088039355000n) })); expect(serializer({ position: -3364367088039355000n })).toEqual(serialize({ position: Long.fromBigInt(-3364367088039355000n) })); // expect(deserializer(serializer({ position: 3364367088039355000n }))).toEqual({ position: 3364367088039355000n }); // expect(deserializer(serializer({ position: -3364367088039355000n }))).toEqual({ position: -3364367088039355000n }); }); test('basic bigint', () => { const object = { position: 3364367088039355000n }; const expectedSize = 4 //size uint32 + 1 // type (binary) + 'position\0'.length + ( 4 //uint32 low bits + 4 //uint32 high bits ) + 1 //object null ; const schema = typeOf<{ position: bigint, }>(); const serializer = getBSONSerializer(undefined, schema); // const deserializer = getBSONDecoder(schema); expect(getBSONSizer(undefined, schema)(object)).toBe(expectedSize); expect(serializer(object).byteLength).toBe(expectedSize); // const reParsed = deserializer(serializer(object)); // expect(reParsed.position).toBe(3364367088039355000n); //this cases are valid when dynamic bigint serialization is activated // expect(serializer({ position: 123456n })).toEqual(serialize({ position: 123456 })); // expect(serializer({ position: -123456n })).toEqual(serialize({ position: -123456 })); // expect(serializer({ position: 3364367088039355000n })).toEqual(serialize({ position: Long.fromBigInt(3364367088039355000n) })); // expect(serializer({ position: -3364367088039355000n })).toEqual(serialize({ position: Long.fromBigInt(-3364367088039355000n) })); // // expect(serializer({ position: 9223372036854775807n })).toEqual(serialize({ position: Long.fromBigInt(9223372036854775807n) })); // expect(serializer({ position: -9223372036854775807n })).toEqual(serialize({ position: Long.fromBigInt(-9223372036854775807n) })); // expect(deserializer(serializer({ position: 123456n }))).toEqual({ position: 123456n }); // expect(deserializer(serializer({ position: -123456n }))).toEqual({ position: -123456n }); // expect(deserializer(serializer({ position: 3364367088039355000n }))).toEqual({ position: 3364367088039355000n }); // expect(deserializer(serializer({ position: -3364367088039355000n }))).toEqual({ position: -3364367088039355000n }); // // expect(deserializer(serializer({ position: 9223372036854775807n }))).toEqual({ position: 9223372036854775807n }); // expect(deserializer(serializer({ position: -9223372036854775807n }))).toEqual({ position: -9223372036854775807n }); }); test('basic BinaryBigInt', () => { const object = { position: 3364367088039355000n }; const expectedSize = 4 //size uint32 + 1 // type (binary) + 'position\0'.length + ( 4 //binary size + 1 //binary type + 8 //binary content ) + 1 //object null ; const schema = typeOf<{ position: BinaryBigInt, }>(); const serializer = getBSONSerializer(undefined, schema); expect(getBSONSizer(undefined, schema)(object)).toBe(expectedSize); expect(serializer(object).byteLength).toBe(expectedSize); { const bson = serializer({ position: 9223372036854775810n }); //force binary format expect(bson).toEqual(Buffer.from([ 28, 0, 0, 0, //size BSONType.BINARY, //type long 112, 111, 115, 105, 116, 105, 111, 110, 0, //position\n string 8, 0, 0, 0, //binary size, int32 BSON_BINARY_SUBTYPE_DEFAULT, //binary type 128, 0, 0, 0, 0, 0, 0, 2, //binary data 0, //object null ])); } { const bson = serializer({ position: -9223372036854775810n }); //force binary format expect(bson).toEqual(Buffer.from([ 28, 0, 0, 0, //size BSONType.BINARY, //type long 112, 111, 115, 105, 116, 105, 111, 110, 0, //position\n string 8, 0, 0, 0, //binary size, int32 BSON_BINARY_SUBTYPE_DEFAULT, //binary type 128, 0, 0, 0, 0, 0, 0, 2, //binary data 0, //object null ])); } }); test('basic SignedBinaryBigInt', () => { const object = { position: 3364367088039355000n }; const expectedSize = 4 //size uint32 + 1 // type (binary) + 'position\0'.length + ( 4 //binary size + 1 //binary type + 9 //binary content ) + 1 //object null ; const schema = typeOf<{ position: SignedBinaryBigInt, }>(); const serializer = getBSONSerializer(undefined, schema); expect(getBSONSizer(undefined, schema)(object)).toBe(expectedSize); expect(serializer(object).byteLength).toBe(expectedSize); { const bson = serializer({ position: 9223372036854775810n }); //force binary format expect(bson).toEqual(Buffer.from([ 29, 0, 0, 0, //size BSONType.BINARY, //type long 112, 111, 115, 105, 116, 105, 111, 110, 0, //position\n string 9, 0, 0, 0, //binary size, int32 BSON_BINARY_SUBTYPE_DEFAULT, //binary type 0, //signum 128, 0, 0, 0, 0, 0, 0, 2, //binary data 0, //object null ])); } { const bson = serializer({ position: -9223372036854775810n }); //force binary format expect(bson).toEqual(Buffer.from([ 29, 0, 0, 0, //size BSONType.BINARY, //type long 112, 111, 115, 105, 116, 105, 111, 110, 0, //position\n string 9, 0, 0, 0, //binary size, int32 BSON_BINARY_SUBTYPE_DEFAULT, //binary type 255, //signum, 255 = -1 128, 0, 0, 0, 0, 0, 0, 2, //binary data 0, //object null ])); } }); // test('basic any bigint', () => { // const object = { position: 3364367088039355000n }; // // const expectedSize = // 4 //size uint32 // + 1 // type (binary) // + 'position\0'.length // + ( // 4 //binary size // + 1 //binary type // + 9 //binary content // ) // + 1 //object null // ; // // const schema = t.schema({ // position: t.any, // }); // // const serializer = getBSONSerializer(undefined, schema); // const deserializer = getBSONDecoder(schema); // expect(getBSONSizer(undefined, schema)(object)).toBe(expectedSize); // expect(serializer(object).byteLength).toBe(expectedSize); // // const reParsed = getBSONDecoder(schema)(serializer(object)); // expect(reParsed.position).toBe(3364367088039355000n); // // expect(deserializer(serializer({ position: 123456n }))).toEqual({ position: 123456n }); // expect(deserializer(serializer({ position: -123456n }))).toEqual({ position: -123456n }); // expect(deserializer(serializer({ position: 3364367088039355000n }))).toEqual({ position: 3364367088039355000n }); // expect(deserializer(serializer({ position: -3364367088039355000n }))).toEqual({ position: -3364367088039355000n }); // // expect(deserializer(serializer({ position: 9223372036854775807n }))).toEqual({ position: 9223372036854775807n }); // expect(deserializer(serializer({ position: -9223372036854775807n }))).toEqual({ position: -9223372036854775807n }); // // { // const bson = serializer({ position: 9223372036854775810n }); //force binary format // expect(bson).toEqual(Buffer.from([ // 29, 0, 0, 0, //size // BSONType.BINARY, //type long // 112, 111, 115, 105, 116, 105, 111, 110, 0, //position\n string // // 9, 0, 0, 0, //binary size, int32 // BSON_BINARY_SUBTYPE_BIGINT, //binary type // // 1, //signum // 128, 0, 0, 0, 0, 0, 0, 2, //binary data // // 0, //object null // ])); // } // // { // const bson = serializer({ position: -9223372036854775810n }); //force binary format // expect(bson).toEqual(Buffer.from([ // 29, 0, 0, 0, //size // BSONType.BINARY, //type long // 112, 111, 115, 105, 116, 105, 111, 110, 0, //position\n string // // 9, 0, 0, 0, //binary size, int32 // BSON_BINARY_SUBTYPE_BIGINT, //binary type // // 255, //signum, 255 = -1 // 128, 0, 0, 0, 0, 0, 0, 2, //binary data // // 0, //object null // ])); // } // }); // test('basic long bigint', () => { // const bla: { n: number, m: string }[] = [ // { n: 1, m: '1' }, // { n: 1 << 16, m: 'max uint 16' }, // { n: (1 << 16) + 100, m: 'max uint 16 + 100' }, // { n: 4294967296, m: 'max uint 32' }, // { n: 4294967296 - 100, m: 'max uint 32 - 100' }, // { n: 4294967296 - 1, m: 'max uint 32 - 1' }, // { n: 4294967296 + 100, m: 'max uint 32 + 100' }, // { n: 4294967296 + 1, m: 'max uint 32 + 1' }, // { n: 4294967296 * 10 + 1, m: 'max uint 32 * 10 + 1' }, // // {n: 9223372036854775807, m: 'max uint64'}, // // {n: 9223372036854775807 + 1, m: 'max uint64 - 1'}, // // {n: 9223372036854775807 - 1, m: 'max uint64 + 2'}, // ]; // for (const b of bla) { // const long = Long.fromNumber(b.n); // console.log(b.n, long.toNumber(), long, b.m); // } // }); test('basic number double', () => { const object = { position: 149943944399 }; const expectedSize = 4 //size uint32 + 1 // type (number) + 'position\0'.length + ( 8 //double, 64bit ) + 1 //object null ; const expectedSizeNull = 4 //size uint32 + 1 // type (number) + 'position\0'.length + ( 0 //undefined ) + 1 //object null ; expect(calculateObjectSize(object)).toBe(expectedSize); expect(calculateObjectSize({ position: null })).toBe(expectedSizeNull); expect(calculateObjectSize({ position: undefined })).toBe(5); const schema = typeOf<{ position?: number, }>(); expect(getBSONSerializer(undefined, schema)(object).byteLength).toBe(expectedSize); expect(getBSONSizer(undefined, schema)(object)).toBe(expectedSize); expect(getBSONSerializer(undefined, schema)(object)).toEqual(serialize(object)); expect(getBSONSerializer(undefined, schema)({ position: undefined }).byteLength).toBe(expectedSizeNull); expect(getBSONSerializer(undefined, schema)({}).byteLength).toBe(5); }); test('basic boolean', () => { const object = { valid: true }; const expectedSize = 4 //size uint32 + 1 // type (boolean) + 'valid\0'.length + ( 1 //boolean ) + 1 //object null ; expect(calculateObjectSize(object)).toBe(expectedSize); const schema = typeOf<{ valid: boolean, }>(); expect(getBSONSerializer(undefined, schema)(object).byteLength).toBe(expectedSize); expect(getBSONSizer(undefined, schema)(object)).toBe(expectedSize); expect(getBSONSerializer(undefined, schema)(object)).toEqual(serialize(object)); }); test('basic date', () => { const object = { created: new Date }; const expectedSize = 4 //size uint32 + 1 // type (date) + 'created\0'.length + ( 8 //date ) + 1 //object null ; expect(calculateObjectSize(object)).toBe(expectedSize); const schema = typeOf<{ created: Date, }>(); const serializer = getBSONSerializer(undefined, schema); // expect(serializer(object).byteLength).toBe(expectedSize); // expect(getBSONSizer(undefined, schema)(object)).toBe(expectedSize); // expect(serializer(object)).toEqual(serialize(object)); expect(serializer({ created: new Date('2900-10-12T00:00:00.000Z') })).toEqual(serialize({ created: new Date('2900-10-12T00:00:00.000Z') })); expect(serializer({ created: new Date('1900-10-12T00:00:00.000Z') })).toEqual(serialize({ created: new Date('1900-10-12T00:00:00.000Z') })); expect(serializer({ created: new Date('1000-10-12T00:00:00.000Z') })).toEqual(serialize({ created: new Date('1000-10-12T00:00:00.000Z') })); // const deserializer = getBSONDecoder(schema); // expect(deserializer(serializer({ created: new Date('2900-10-12T00:00:00.000Z') }))).toEqual({ created: new Date('2900-10-12T00:00:00.000Z') }); // expect(deserializer(serializer({ created: new Date('1900-10-12T00:00:00.000Z') }))).toEqual({ created: new Date('1900-10-12T00:00:00.000Z') }); // expect(deserializer(serializer({ created: new Date('1000-10-12T00:00:00.000Z') }))).toEqual({ created: new Date('1000-10-12T00:00:00.000Z') }); }); test('basic binary', () => { const object = { binary: new Uint16Array(32) }; const expectedSize = 4 //size uint32 + 1 // type (date) + 'binary\0'.length + ( 4 //size of binary, uin32 + 1 //sub type + 32 * 2 //size of data ) + 1 //object null ; expect(new Uint16Array(32).byteLength).toBe(32 * 2); //this doesn't support typed arrays // expect(calculateObjectSize(object)).toBe(expectedSize); const schema = typeOf<{ binary: Uint16Array, }>(); expect(getBSONSerializer(undefined, schema)(object).byteLength).toBe(expectedSize); expect(getBSONSizer(undefined, schema)(object)).toBe(expectedSize); //doesnt support typed arrays // expect(getBSONSerializer(undefined, schema)(object)).toEqual(serialize(object)); // expect(getBSONDecoder(schema)(getBSONSerializer(undefined, schema)(object))).toEqual(object); }); test('basic arrayBuffer', () => { const arrayBuffer = new ArrayBuffer(5); const view = new Uint8Array(arrayBuffer); view[0] = 22; view[1] = 44; view[2] = 55; view[3] = 66; view[4] = 77; const object = { binary: arrayBuffer }; const expectedSize = 4 //size uint32 + 1 // type (date) + 'binary\0'.length + ( 4 //size of binary, uin32 + 1 //sub type + 5 //size of data ) + 1 //object null ; // expect(calculateObjectSize(object)).toBe(expectedSize); const schema = typeOf<{ binary: ArrayBuffer, }>(); expect(getBSONSerializer(undefined, schema)(object).byteLength).toBe(expectedSize); expect(getBSONSizer(undefined, schema)(object)).toBe(expectedSize); // expect(getBSONDecoder(schema)(getBSONSerializer(undefined, schema)(object))).toEqual(object); // expect(getBSONSerializer(undefined, schema)(object)).toEqual(serialize(object)); }); test('basic Buffer', () => { const object = { binary: new Uint8Array(32) }; const expectedSize = 4 //size uint32 + 1 // type (date) + 'binary\0'.length + ( 4 //size of binary, uin32 + 1 //sub type + 32 //size of data ) + 1 //object null ; // expect(calculateObjectSize(object)).toBe(expectedSize); const schema = typeOf<{ binary: Uint8Array, }>(); expect(getBSONSerializer(undefined, schema)(object).byteLength).toBe(expectedSize); expect(getBSONSizer(undefined, schema)(object)).toBe(expectedSize); // expect(getBSONDecoder(schema)(getBSONSerializer(undefined, schema)(object))).toEqual(object); Buffer.alloc(2); Buffer.alloc(200); Buffer.alloc(20000); // expect(getBSONDecoder(schema)(getBSONSerializer(undefined, schema)({ // binary: Buffer.alloc(44) // }))).toEqual({ // binary: new Uint8Array(44) // }); }); test('basic uuid', () => { const uuidRandomBinary = new Binary( Buffer.allocUnsafe(16), Binary.SUBTYPE_UUID ); const object = { uuid: '75ed2328-89f2-4b89-9c49-1498891d616d' }; const expectedSize = 4 //size uint32 + 1 // type (date) + 'uuid\0'.length + ( 4 //size of binary + 1 //sub type + 16 //content of uuid ) + 1 //object null ; expect(calculateObjectSize({ uuid: uuidRandomBinary })).toBe(expectedSize); const schema = typeOf<{ uuid: UUID, }>(); expect(getBSONSizer(undefined, schema)(object)).toBe(expectedSize); expect(getBSONSerializer(undefined, schema)(object).byteLength).toBe(expectedSize); const uuidPlain = Buffer.from([0x75, 0xed, 0x23, 0x28, 0x89, 0xf2, 0x4b, 0x89, 0x9c, 0x49, 0x14, 0x98, 0x89, 0x1d, 0x61, 0x6d]); const uuidBinary = new Binary(uuidPlain, 4); const objectBinary = { uuid: uuidBinary }; expect(getBSONSerializer(undefined, schema)(object).byteLength).toBe(expectedSize); expect(getBSONSerializer(undefined, schema)(object)).toEqual(serialize(objectBinary)); // const bson = serialize(objectBinary); // const parsed = parseObject(new ParserV2(bson)); // expect(parsed.uuid).toBe('75ed2328-89f2-4b89-9c49-1498891d616d'); }); test('basic objectId', () => { const object = { _id: '507f191e810c19729de860ea' }; const expectedSize = 4 //size uint32 + 1 // type + '_id\0'.length + ( 12 //size of objectId ) + 1 //object null ; const nativeBson = { _id: new OfficialObjectId('507f191e810c19729de860ea') }; expect(calculateObjectSize(nativeBson)).toBe(expectedSize); const schema = typeOf<{ _id: MongoId, }>(); expect(getBSONSerializer(undefined, schema)(object).byteLength).toBe(expectedSize); expect(getBSONSizer(undefined, schema)(object)).toBe(expectedSize); expect(getBSONSerializer(undefined, schema)(object)).toEqual(serialize(nativeBson)); }); test('basic nested', () => { const object = { name: { anotherOne: 'Peter2' } }; const expectedSize = 4 //size uint32 + 1 //type (object) + 'name\0'.length + ( 4 //size uint32 + 1 //type (object) + 'anotherOne\0'.length + ( 4 //string size uint32 + 'Peter2'.length + 1 //string content + null ) + 1 //object null ) + 1 //object null ; expect(calculateObjectSize(object)).toBe(expectedSize); const schema = typeOf<{ name: { anotherOne: string }, }>(); expect(getBSONSizer(undefined, schema)(object)).toBe(expectedSize); expect(getBSONSerializer(undefined, schema)(object)).toEqual(serialize(object)); }); test('basic map', () => { const object = { name: new Map([['abc', 'Peter']]) }; const expectedSize = 4 //size uint32 + 1 //type (array) + 'name\0'.length + ( 4 //size uint32 of array + 1 //type (array) + '0\0'.length //key + ( 4 //size uint32 of array + 1 //type (string) + '0\0'.length //key + ( 4 //string size uint32 + 'abc'.length + 1 //string content + null ) + 1 //type (string) + '1\0'.length //key + ( 4 //string size uint32 + 'Peter'.length + 1 //string content + null ) + 1 //object null ) + 1 //object null ) + 1 //object null ; expect(calculateObjectSize({ name: [['abc', 'Peter']] })).toBe(expectedSize); const schema = typeOf<{ name: Map<string, string> }>(); expect(getBSONSizer(undefined, schema)(object)).toBe(expectedSize); expect(getBSONSerializer(undefined, schema)(object).byteLength).toBe(expectedSize); expect(getBSONSerializer(undefined, schema)(object)).toEqual(serialize({ name: [['abc', 'Peter']] })); }); test('basic set', () => { const object = { name: new Set(['abc', 'Peter']) }; const expectedSize = 4 //size uint32 + 1 //type (array) + 'name\0'.length + ( 4 //size uint32 of array + 1 //type (string) + '0\0'.length //key + ( 4 //string size uint32 + 'abc'.length + 1 //string content + null ) + 1 //type (string) + '1\0'.length //key + ( 4 //string size uint32 + 'Peter'.length + 1 //string content + null ) + 1 //object null ) + 1 //object null ; expect(calculateObjectSize({ name: ['abc', 'Peter'] })).toBe(expectedSize); const schema = typeOf<{ name: Set<string> }>(); expect(getBSONSizer(undefined, schema)(object)).toBe(expectedSize); expect(getBSONSerializer(undefined, schema)(object).byteLength).toBe(expectedSize); expect(getBSONSerializer(undefined, schema)(object)).toEqual(serialize({ name: ['abc', 'Peter'] })); }); test('basic array', () => { const object = { name: ['Peter3'] }; const expectedSize = 4 //size uint32 + 1 //type (array) + 'name\0'.length + ( 4 //size uint32 of array + 1 //type (string) + '0\0'.length //key + ( 4 //string size uint32 + 'Peter3'.length + 1 //string content + null ) + 1 //object null ) + 1 //object null ; expect(calculateObjectSize(object)).toBe(expectedSize); const schema = typeOf<{ name: string[] }>(); expect(getBSONSerializer(undefined, schema)(object).byteLength).toBe(expectedSize); expect(getBSONSizer(undefined, schema)(object)).toBe(expectedSize); expect(getBSONSerializer(undefined, schema)(object)).toEqual(serialize(object)); }); // test('number', () => { // const object = { name: 'Peter4', tags: ['a', 'b', 'c'], priority: 15, position: 149943944399, valid: true, created: new Date() }; // // const schema = t.schema({ // name: t.string, // tags: t.array(t.string), // priority: t.number, // position: t.number, // valid: t.boolean, // created: t.date, // }); // // expect(getBSONSizer(undefined, schema)(object)).toBe(calculateObjectSize(object)); // expect(getBSONSerializer(undefined, schema)(object)).toEqual(serialize(object)); // }); // test('all supported base types', () => { const object = { name: 'Peter4', tags: ['a', 'b', 'c'], priority: 15, position: 149943944399, valid: true, created: new Date() }; const schema = typeOf<{ name: string, tags: string[], priority: number, position: number, valid: boolean, created: Date }>(); expect(getBSONSizer(undefined, schema)(object)).toBe(calculateObjectSize(object)); expect(getBSONSerializer(undefined, schema)(object)).toEqual(serialize(object)); }); // test('string utf8', () => { // const schema = typeOf<{ // name: string, // any: any // }>(); // // const serialize = getBSONSerializer(undefined, schema); // const parse = getBSONDecoder(schema); // // expect(parse(serialize({ name: 'Peter' }))).toEqual({ name: 'Peter' }); // expect(parse(serialize({ name: 'Peter✌️' }))).toEqual({ name: 'Peter✌️' }); // expect(parse(serialize({ name: '✌️' }))).toEqual({ name: '✌️' }); // expect(parse(serialize({ name: '🌉' }))).toEqual({ name: '🌉' }); // expect(parse(serialize({ name: 'πøˆ️' }))).toEqual({ name: 'πøˆ️' }); // expect(parse(serialize({ name: 'Ѓ' }))).toEqual({ name: 'Ѓ' }); // expect(parse(serialize({ name: '㒨' }))).toEqual({ name: '㒨' }); // expect(parse(serialize({ name: '﨣' }))).toEqual({ name: '﨣' }); // // expect(parse(serialize({ any: { base: true } }))).toEqual({ any: { base: true } }); // expect(parse(serialize({ any: { '✌️': true } }))).toEqual({ any: { '✌️': true } }); // expect(parse(serialize({ any: { 'Ѓ': true } }))).toEqual({ any: { 'Ѓ': true } }); // expect(parse(serialize({ any: { 㒨: true } }))).toEqual({ any: { 㒨: true } }); // expect(parse(serialize({ any: { 﨣: true } }))).toEqual({ any: { 﨣: true } }); // }); test('optional field', () => { const schema = typeOf<{ find: string, batchSize: number, limit?: number, skip?: number, }>(); const findSerializer = getBSONSerializer(undefined, schema); const bson = findSerializer({ find: 'user', batchSize: 1, limit: 1, }); const bsonOfficial = serialize({ find: 'user', batchSize: 1, limit: 1, }); expect(bson).toEqual(bsonOfficial); }); test('complex', () => { const schema = typeOf<{ find: string, batchSize: number, limit?: number, filter: any, projection: any, sort: any, skip?: number, }>(); const findSerializer = getBSONSerializer(undefined, schema); const bson = findSerializer({ find: 'user', batchSize: 1, limit: 1, }); const bsonOfficial = serialize({ find: 'user', batchSize: 1, limit: 1, }); expect(bson).toEqual(bsonOfficial); }); //for the moment, bson does not support embedded // test('embedded', () => { // class DecoratedValue { // constructor(public items: string[] = []) { // } // } // // const object = { v: new DecoratedValue(['Peter3']) }; // // const expectedSize = // 4 //size uint32 // + 1 //type (array) // + 'v\0'.length // + ( // 4 //size uint32 of array // + 1 //type (string) // + '0\0'.length //key // + ( // 4 //string size uint32 // + 'Peter3'.length + 1 //string content + null // ) // + 1 //object null // ) // + 1 //object null // ; // // expect(calculateObjectSize({ v: ['Peter3'] })).toBe(expectedSize); // // const schema = typeOf<{ // v: Embedded<DecoratedValue> // }>(); // // const bson = getBSONSerializer(undefined, schema)(object); // // const officialDeserialize = deserialize(Buffer.from(bson)); // expect(officialDeserialize.v).toEqual(['Peter3']); // // expect(bson.byteLength).toBe(expectedSize); // expect(getBSONSizer(undefined, schema)(object)).toBe(expectedSize); // // expect(bson).toEqual(serialize({ v: ['Peter3'] })); // // // const back = getBSONDecoder(schema)(bson); // // expect(back.v).toBeInstanceOf(DecoratedValue); // // expect(back.v.items).toEqual(['Peter3']); // // expect(back).toEqual(object); // }); test('reference', () => { class Entity { public id: number & PrimaryKey = 0; constructor(public title: string) { } } const object = { v: createReference(Entity, { id: 5 }) }; const expectedSize = 4 //size uint32 + 1 //type (number) + 'v\0'.length + ( 4 //int uint32 ) + 1 //object null ; expect(calculateObjectSize({ v: 5 })).toBe(expectedSize); const schema = typeOf<{ v: Entity & Reference }>(); expect(getBSONSizer(undefined, schema)(object)).toBe(expectedSize); const bson = getBSONSerializer(undefined, schema)(object); const officialDeserialize = deserialize(Buffer.from(bson)); expect(officialDeserialize.v).toEqual(5); expect(bson.byteLength).toBe(expectedSize); expect(bson).toEqual(serialize({ v: 5 })); // const back = getBSONDecoder(schema)(bson); // expect(back.v).toBeInstanceOf(DecoratedValue); // expect(back.v.items).toEqual(['Peter3']); // expect(back).toEqual(object); }); test('bson length', () => { const nonce = randomBytes(24); const SaslStartCommand = typeOf<{ saslStart: 1, $db: string, mechanism: string, payload: Uint8Array, autoAuthorize: 1, options: { skipEmptyExchange: true } }>(); const message = { saslStart: 1, $db: 'admin', mechanism: 'SCRAM-SHA-1', payload: Buffer.concat([Buffer.from('n,,', 'utf8'), Buffer.from(`n=Peter,r=${nonce.toString('base64')}`, 'utf8')]), autoAuthorize: 1, options: { skipEmptyExchange: true } }; expect(message.payload.byteLength).toBe(13 + nonce.toString('base64').length); const size = getBSONSizer(undefined, SaslStartCommand)(message); expect(size).toBe(calculateObjectSize(message)); const bson = getBSONSerializer(undefined, SaslStartCommand)(message); expect(bson).toEqual(serialize(message)); }); test('arrayBuffer', () => { const schema = typeOf<{ name: string, secondId: MongoId, preview: ArrayBuffer, }>(); const message = { name: 'myName', secondId: '5bf4a1ccce060e0b38864c9e', preview: nodeBufferToArrayBuffer(Buffer.from('Baar', 'utf8')) }; expect(Buffer.from(message.preview).toString('utf8')).toBe('Baar'); const mongoMessage = { name: message.name, secondId: new OfficialObjectId(message.secondId), preview: new Binary(Buffer.from(message.preview)), }; const size = getBSONSizer(undefined, schema)(message); expect(size).toBe(calculateObjectSize(mongoMessage)); const bson = getBSONSerializer(undefined, schema)(message); expect(bson).toEqual(serialize(mongoMessage)); // const back = getBSONDecoder(schema)(bson); // expect(Buffer.from(back.preview).toString('utf8')).toBe('Baar'); // expect(back.preview).toEqual(message.preview); }); test('typed array', () => { const schema = typeOf<{ name: string, secondId: MongoId, preview: Uint16Array, }>(); const message = { name: 'myName', secondId: '5bf4a1ccce060e0b38864c9e', preview: new Uint16Array(nodeBufferToArrayBuffer(Buffer.from('LAA3AEIATQBYAA==', 'base64'))), //44, 55, 66, 77, 88 }; expect(message.preview).toBeInstanceOf(Uint16Array); expect(message.preview.byteLength).toBe(10); const mongoMessage = { name: message.name, secondId: new OfficialObjectId(message.secondId), preview: new Binary(Buffer.from(new Uint8Array(message.preview.buffer, message.preview.byteOffset, message.preview.byteLength))), }; const size = getBSONSizer(undefined, schema)(message); expect(size).toBe(calculateObjectSize(mongoMessage)); const bson = getBSONSerializer(undefined, schema)(message); expect(bson).toEqual(serialize(mongoMessage)); // const back = getBSONDecoder(schema)(bson); // expect(back.preview).toEqual(message.preview); }); test('union string | number', () => { const schema = typeOf<{ v: string | number, }>(); expect(getBSONSizer(undefined, schema)({ v: 'abc' })).toBe(calculateObjectSize({ v: 'abc' })); expect(getBSONSizer(undefined, schema)({ v: 2 })).toBe(calculateObjectSize({ v: 3 })); expect(getBSONSerializer(undefined, schema)({ v: 'abc' })).toEqual(serialize({ v: 'abc' })); expect(getBSONSerializer(undefined, schema)({ v: 2 })).toEqual(serialize({ v: 2 })); }); test('union number | class', () => { class MyClass { id: number = 0; } const schema = typeOf<{ v: number | MyClass, }>(); expect(getBSONSizer(undefined, schema)({ v: { id: 5 } })).toBe(calculateObjectSize({ v: { id: 5 } })); expect(getBSONSizer(undefined, schema)({ v: 2 })).toBe(calculateObjectSize({ v: 3 })); expect(getBSONSerializer(undefined, schema)({ v: { id: 5 } })).toEqual(serialize({ v: { id: 5 } })); expect(getBSONSerializer(undefined, schema)({ v: 2 })).toEqual(serialize({ v: 2 })); }); test('index signature', () => { const schema = typeOf<{ [name: string]: number }>(); expect(getBSONSizer(undefined, schema)({ a: 5 })).toBe(calculateObjectSize({ a: 5 })); expect(getBSONSizer(undefined, schema)({ a: 5, b: 6 })).toBe(calculateObjectSize({ a: 5, b: 6 })); expect(getBSONSerializer(undefined, schema)({ a: 5 })).toEqual(serialize({ a: 5 })); expect(getBSONSerializer(undefined, schema)({ a: 5, b: 6 })).toEqual(serialize({ a: 5, b: 6 })); }); test('index signature + properties', () => { const schema = typeOf<{ id: number; [name: string]: number | string }>(); expect(getBSONSizer(undefined, schema)({ id: 1, a: 5 })).toBe(calculateObjectSize({ id: 1, a: 5 })); expect(getBSONSizer(undefined, schema)({ id: 1, a: 5, b: 6 })).toBe(calculateObjectSize({ id: 1, a: 5, b: 6 })); expect(getBSONSerializer(undefined, schema)({ id: 1, a: 5 })).toEqual(serialize({ id: 1, a: 5 })); expect(getBSONSerializer(undefined, schema)({ id: 1, a: 5, b: 6 })).toEqual(serialize({ id: 1, a: 5, b: 6 })); }); test('exclude', () => { const schema = typeOf<{ id: number; password: string & Excluded }>(); expect(getBSONSizer(undefined, schema)({ id: 1, password: 'asdasd' })).toBe(calculateObjectSize({ id: 1 })); expect(getBSONSerializer(undefined, schema)({ id: 1, password: 'asdasd' })).toEqual(serialize({ id: 1 })); }); test('promise', () => { const schema = typeOf<{ id: Promise<number>; }>(); expect(getBSONSizer(undefined, schema)({ id: 1 })).toBe(calculateObjectSize({ id: 1 })); expect(getBSONSerializer(undefined, schema)({ id: 1 })).toEqual(serialize({ id: 1 })); }); test('regepx', () => { const schema = typeOf<{ id: RegExp }>(); expect(getBSONSizer(undefined, schema)({ id: /asd/g })).toBe(calculateObjectSize({ id: /asd/g })); expect(getBSONSerializer(undefined, schema)({ id: /asd/g })).toEqual(serialize({ id: /asd/g })); }); test('typed any and undefined', () => { const schema = typeOf<{ data: any, }>(); const message = { data: { $set: {}, $inc: undefined, }, }; // expect(getValueSize({ $inc: undefined })).toBe(calculateObjectSize({ $inc: undefined })); //official BSON does not include undefined values, but we do expect(getValueSize({ $inc: [undefined] })).toBe(calculateObjectSize({ $inc: [undefined] })); // const size = getBSONSizer(undefined, schema)(message); // expect(size).toBe(calculateObjectSize(message)); //official bson doesnt include undefined //todo: not sure what the expectation here was const bson = getBSONSerializer(undefined, schema)(message); // expect(bson).toEqual(serialize(message)); //official bson doesnt include undefined // const back = getBSONDecoder(schema)(bson); // expect(back.data.$set).toEqual({}); // expect(back.data.$inc).toEqual(undefined); // expect('$inc' in back.data).toEqual(true); }); test('Excluded', () => { class Model { id: UUID & PrimaryKey = uuid(); excludedForMongo: string & Excluded<'bson'> = 'excludedForMongo'; constructor(public name: string) { } } const model = new Model('asd'); interface Message { insert: string; $db: string; documents: Model[]; } const fn = getBSONSerializer<Message>(); const bson = fn({ insert: 'a', $db: 'b', documents: [model] }); const back = deserializeBSONWithoutOptimiser(bson); expect(back.documents[0].name).toBe('asd'); expect(back.documents[0].excludedForMongo).toBeUndefined(); }); test('complex recursive', () => { class ModuleApi { api?: ModuleApi; imports: ModuleApi[] = []; constructor( public name: string, ) { } } const data = { name: 'a', api: { imports: [], name: 'a2', }, imports: [ { name: 'b', api: { imports: [], name: 'b2', }, imports: [ { imports: [], name: 'c', } ], } ], }; const fn = getBSONSerializer<ModuleApi>(); { const bson = fn(data); console.log('first', Buffer.from(bson).toString('hex')); const back1 = deserializeBSONWithoutOptimiser(bson); console.log('back 1', back1); expect(back1).toEqual(data); } { const bson = fn(data); console.log('second', Buffer.from(bson).toString('hex')); const back1 = deserializeBSONWithoutOptimiser(bson); console.log('back 1', back1); expect(back1).toEqual(data); } { const bson = fn(data); const back1 = deserializeBSON<ModuleApi>(bson); console.log('back 1', back1); expect(back1).toEqual(data); } });
the_stack
const mockChokidarWatcherOn = jest.fn(); const fakeChokidarWatcher = { on: mockChokidarWatcherOn, }; const fakeChokidarWatcherOn = { get readyCallback(): () => void { expect(mockChokidarWatcherOn.mock.calls.length).toBeGreaterThanOrEqual(1); // The call to the first 'watcher.on()' in the production code is the one we actually want here. // This is a pretty fragile, but at least with this helper class, // we would have to change it only in one place if it ever breaks const firstCall = mockChokidarWatcherOn.mock.calls[0]; // let's make sure the first argument is the 'ready' event, // just to be double safe expect(firstCall[0]).toBe('ready'); // the second argument is the callback return firstCall[1]; }, get fileEventCallback(): (event: 'add' | 'addDir' | 'change' | 'unlink' | 'unlinkDir', path: string) => Promise<void> { expect(mockChokidarWatcherOn.mock.calls.length).toBeGreaterThanOrEqual(2); const secondCall = mockChokidarWatcherOn.mock.calls[1]; // let's make sure the first argument is not the 'ready' event, // just to be double safe expect(secondCall[0]).not.toBe('ready'); // the second argument is the callback return secondCall[1]; }, }; const mockChokidarWatch = jest.fn(); jest.mock('chokidar', () => ({ watch: mockChokidarWatch, })); const fakeChokidarWatch = { get includeArgs(): string[] { expect(mockChokidarWatch.mock.calls.length).toBe(1); // the include args are the first parameter to the 'watch()' call return mockChokidarWatch.mock.calls[0][0]; }, get excludeArgs(): string[] { expect(mockChokidarWatch.mock.calls.length).toBe(1); // the ignore args are a property of the second parameter to the 'watch()' call const chokidarWatchOpts = mockChokidarWatch.mock.calls[0][1]; return chokidarWatchOpts.ignored; }, }; import * as cxschema from '@aws-cdk/cloud-assembly-schema'; import * as cxapi from '@aws-cdk/cx-api'; import { Bootstrapper } from '../lib/api/bootstrap'; import { CloudFormationDeployments, DeployStackOptions } from '../lib/api/cloudformation-deployments'; import { DeployStackResult } from '../lib/api/deploy-stack'; import { Template } from '../lib/api/util/cloudformation'; import { CdkToolkit, Tag } from '../lib/cdk-toolkit'; import { RequireApproval } from '../lib/diff'; import { instanceMockFrom, MockCloudExecutable, TestStackArtifact } from './util'; let cloudExecutable: MockCloudExecutable; let bootstrapper: jest.Mocked<Bootstrapper>; beforeEach(() => { jest.resetAllMocks(); mockChokidarWatch.mockReturnValue(fakeChokidarWatcher); // on() in chokidar's Watcher returns 'this' mockChokidarWatcherOn.mockReturnValue(fakeChokidarWatcher); bootstrapper = instanceMockFrom(Bootstrapper); bootstrapper.bootstrapEnvironment.mockResolvedValue({ noOp: false, outputs: {} } as any); cloudExecutable = new MockCloudExecutable({ stacks: [ MockStack.MOCK_STACK_A, MockStack.MOCK_STACK_B, ], nestedAssemblies: [{ stacks: [MockStack.MOCK_STACK_C], }], }); }); function defaultToolkitSetup() { return new CdkToolkit({ cloudExecutable, configuration: cloudExecutable.configuration, sdkProvider: cloudExecutable.sdkProvider, cloudFormation: new FakeCloudFormation({ 'Test-Stack-A': { Foo: 'Bar' }, 'Test-Stack-B': { Baz: 'Zinga!' }, 'Test-Stack-C': { Baz: 'Zinga!' }, }), }); } describe('deploy', () => { test('fails when no valid stack names are given', async () => { // GIVEN const toolkit = defaultToolkitSetup(); // WHEN await expect(() => toolkit.deploy({ selector: { patterns: ['Test-Stack-D'] } })).rejects.toThrow('No stacks match the name(s) Test-Stack-D'); }); describe('with hotswap deployment', () => { test("passes through the 'hotswap' option to CloudFormationDeployments.deployStack()", async () => { // GIVEN const mockCfnDeployments = instanceMockFrom(CloudFormationDeployments); mockCfnDeployments.deployStack.mockReturnValue(Promise.resolve({ noOp: false, outputs: {}, stackArn: 'stackArn', stackArtifact: instanceMockFrom(cxapi.CloudFormationStackArtifact), })); const cdkToolkit = new CdkToolkit({ cloudExecutable, configuration: cloudExecutable.configuration, sdkProvider: cloudExecutable.sdkProvider, cloudFormation: mockCfnDeployments, }); // WHEN await cdkToolkit.deploy({ selector: { patterns: ['Test-Stack-A'] }, requireApproval: RequireApproval.Never, hotswap: true, }); // THEN expect(mockCfnDeployments.deployStack).toHaveBeenCalledWith(expect.objectContaining({ hotswap: true, })); }); }); describe('makes correct CloudFormation calls', () => { test('without options', async () => { // GIVEN const toolkit = defaultToolkitSetup(); // WHEN await toolkit.deploy({ selector: { patterns: ['Test-Stack-A', 'Test-Stack-B'] } }); }); test('with stacks all stacks specified as double wildcard', async () => { // GIVEN const toolkit = defaultToolkitSetup(); // WHEN await toolkit.deploy({ selector: { patterns: ['**'] } }); }); test('with one stack specified', async () => { // GIVEN const toolkit = defaultToolkitSetup(); // WHEN await toolkit.deploy({ selector: { patterns: ['Test-Stack-A'] } }); }); test('with stacks all stacks specified as wildcard', async () => { // GIVEN const toolkit = defaultToolkitSetup(); // WHEN await toolkit.deploy({ selector: { patterns: ['*'] } }); }); test('with sns notification arns', async () => { // GIVEN const notificationArns = ['arn:aws:sns:::cfn-notifications', 'arn:aws:sns:::my-cool-topic']; const toolkit = new CdkToolkit({ cloudExecutable, configuration: cloudExecutable.configuration, sdkProvider: cloudExecutable.sdkProvider, cloudFormation: new FakeCloudFormation({ 'Test-Stack-A': { Foo: 'Bar' }, 'Test-Stack-B': { Baz: 'Zinga!' }, }, notificationArns), }); // WHEN await toolkit.deploy({ selector: { patterns: ['Test-Stack-A', 'Test-Stack-B'] }, notificationArns, }); }); test('globless bootstrap uses environment without question', async () => { // GIVEN const toolkit = defaultToolkitSetup(); // WHEN await toolkit.bootstrap(['aws://56789/south-pole'], bootstrapper, {}); // THEN expect(bootstrapper.bootstrapEnvironment).toHaveBeenCalledWith({ account: '56789', region: 'south-pole', name: 'aws://56789/south-pole', }, expect.anything(), expect.anything()); expect(bootstrapper.bootstrapEnvironment).toHaveBeenCalledTimes(1); }); test('globby bootstrap uses whats in the stacks', async () => { // GIVEN const toolkit = defaultToolkitSetup(); cloudExecutable.configuration.settings.set(['app'], 'something'); // WHEN await toolkit.bootstrap(['aws://*/bermuda-triangle-1'], bootstrapper, {}); // THEN expect(bootstrapper.bootstrapEnvironment).toHaveBeenCalledWith({ account: '123456789012', region: 'bermuda-triangle-1', name: 'aws://123456789012/bermuda-triangle-1', }, expect.anything(), expect.anything()); expect(bootstrapper.bootstrapEnvironment).toHaveBeenCalledTimes(1); }); test('bootstrap can be invoked without the --app argument', async () => { // GIVEN cloudExecutable.configuration.settings.clear(); const mockSynthesize = jest.fn(); cloudExecutable.synthesize = mockSynthesize; const toolkit = defaultToolkitSetup(); // WHEN await toolkit.bootstrap(['aws://123456789012/west-pole'], bootstrapper, {}); // THEN expect(bootstrapper.bootstrapEnvironment).toHaveBeenCalledWith({ account: '123456789012', region: 'west-pole', name: 'aws://123456789012/west-pole', }, expect.anything(), expect.anything()); expect(bootstrapper.bootstrapEnvironment).toHaveBeenCalledTimes(1); expect(cloudExecutable.hasApp).toEqual(false); expect(mockSynthesize).not.toHaveBeenCalled(); }); }); }); describe('watch', () => { test("fails when no 'watch' settings are found", async () => { const toolkit = defaultToolkitSetup(); await expect(() => { return toolkit.watch({ selector: { patterns: [] } }); }).rejects.toThrow("Cannot use the 'watch' command without specifying at least one directory to monitor. " + 'Make sure to add a "watch" key to your cdk.json'); }); test('observes only the root directory by default', async () => { cloudExecutable.configuration.settings.set(['watch'], {}); const toolkit = defaultToolkitSetup(); await toolkit.watch({ selector: { patterns: [] } }); const includeArgs = fakeChokidarWatch.includeArgs; expect(includeArgs.length).toBe(1); }); test("allows providing a single string in 'watch.include'", async () => { cloudExecutable.configuration.settings.set(['watch'], { include: 'my-dir', }); const toolkit = defaultToolkitSetup(); await toolkit.watch({ selector: { patterns: [] } }); expect(fakeChokidarWatch.includeArgs).toStrictEqual(['my-dir']); }); test("allows providing an array of strings in 'watch.include'", async () => { cloudExecutable.configuration.settings.set(['watch'], { include: ['my-dir1', '**/my-dir2/*'], }); const toolkit = defaultToolkitSetup(); await toolkit.watch({ selector: { patterns: [] } }); expect(fakeChokidarWatch.includeArgs).toStrictEqual(['my-dir1', '**/my-dir2/*']); }); test('ignores the output dir, dot files, dot directories, and node_modules by default', async () => { cloudExecutable.configuration.settings.set(['watch'], {}); cloudExecutable.configuration.settings.set(['output'], 'cdk.out'); const toolkit = defaultToolkitSetup(); await toolkit.watch({ selector: { patterns: [] } }); expect(fakeChokidarWatch.excludeArgs).toStrictEqual([ 'cdk.out/**', '**/.*', '**/.*/**', '**/node_modules/**', ]); }); test("allows providing a single string in 'watch.exclude'", async () => { cloudExecutable.configuration.settings.set(['watch'], { exclude: 'my-dir', }); const toolkit = defaultToolkitSetup(); await toolkit.watch({ selector: { patterns: [] } }); const excludeArgs = fakeChokidarWatch.excludeArgs; expect(excludeArgs.length).toBe(5); expect(excludeArgs[0]).toBe('my-dir'); }); test("allows providing an array of strings in 'watch.exclude'", async () => { cloudExecutable.configuration.settings.set(['watch'], { exclude: ['my-dir1', '**/my-dir2'], }); const toolkit = defaultToolkitSetup(); await toolkit.watch({ selector: { patterns: [] } }); const excludeArgs = fakeChokidarWatch.excludeArgs; expect(excludeArgs.length).toBe(6); expect(excludeArgs[0]).toBe('my-dir1'); expect(excludeArgs[1]).toBe('**/my-dir2'); }); describe('with file change events', () => { let toolkit: CdkToolkit; let cdkDeployMock: jest.Mock; beforeEach(async () => { cloudExecutable.configuration.settings.set(['watch'], {}); toolkit = defaultToolkitSetup(); cdkDeployMock = jest.fn(); toolkit.deploy = cdkDeployMock; await toolkit.watch({ selector: { patterns: [] } }); }); test("does not trigger a 'deploy' before the 'ready' event has fired", async () => { await fakeChokidarWatcherOn.fileEventCallback('add', 'my-file'); expect(cdkDeployMock).not.toHaveBeenCalled(); }); describe("when the 'ready' event has already fired", () => { beforeEach(() => { // The ready callback triggers a deployment so each test // that uses this function will see 'cdkDeployMock' called // an additional time. fakeChokidarWatcherOn.readyCallback(); }); test("an initial 'deploy' is triggered, without any file changes", async () => { expect(cdkDeployMock).toHaveBeenCalledTimes(1); }); test("does trigger a 'deploy' for a file change", async () => { await fakeChokidarWatcherOn.fileEventCallback('add', 'my-file'); expect(cdkDeployMock).toHaveBeenCalledTimes(2); }); test("triggers a 'deploy' twice for two file changes", async () => { await Promise.all([ fakeChokidarWatcherOn.fileEventCallback('add', 'my-file1'), fakeChokidarWatcherOn.fileEventCallback('change', 'my-file2'), ]); expect(cdkDeployMock).toHaveBeenCalledTimes(3); }); test("batches file changes that happen during 'deploy'", async () => { await Promise.all([ fakeChokidarWatcherOn.fileEventCallback('add', 'my-file1'), fakeChokidarWatcherOn.fileEventCallback('change', 'my-file2'), fakeChokidarWatcherOn.fileEventCallback('unlink', 'my-file3'), fakeChokidarWatcherOn.fileEventCallback('add', 'my-file4'), ]); expect(cdkDeployMock).toHaveBeenCalledTimes(3); }); }); }); }); describe('synth', () => { test('with no stdout option', async () => { // GIVE const toolkit = defaultToolkitSetup(); // THEN await expect(toolkit.synth(['Test-Stack-A'], false, true)).resolves.toBeUndefined(); }); afterEach(() => { process.env.STACKS_TO_VALIDATE = undefined; }); describe('stack with error and flagged for validation', () => { beforeEach(() => { cloudExecutable = new MockCloudExecutable({ stacks: [ MockStack.MOCK_STACK_A, MockStack.MOCK_STACK_B, ], nestedAssemblies: [{ stacks: [ { properties: { validateOnSynth: true }, ...MockStack.MOCK_STACK_WITH_ERROR }, ], }], }); }); test('causes synth to fail if autoValidate=true', async() => { const toolkit = defaultToolkitSetup(); const autoValidate = true; await expect(toolkit.synth([], false, true, autoValidate)).rejects.toBeDefined(); }); test('causes synth to succeed if autoValidate=false', async() => { const toolkit = defaultToolkitSetup(); const autoValidate = false; await expect(toolkit.synth([], false, true, autoValidate)).resolves.toBeUndefined(); }); }); test('stack has error and was explicitly selected', async() => { cloudExecutable = new MockCloudExecutable({ stacks: [ MockStack.MOCK_STACK_A, MockStack.MOCK_STACK_B, ], nestedAssemblies: [{ stacks: [ { properties: { validateOnSynth: false }, ...MockStack.MOCK_STACK_WITH_ERROR }, ], }], }); const toolkit = defaultToolkitSetup(); await expect(toolkit.synth(['Test-Stack-A/witherrors'], false, true)).rejects.toBeDefined(); }); test('stack has error, is not flagged for validation and was not explicitly selected', async () => { cloudExecutable = new MockCloudExecutable({ stacks: [ MockStack.MOCK_STACK_A, MockStack.MOCK_STACK_B, ], nestedAssemblies: [{ stacks: [ { properties: { validateOnSynth: false }, ...MockStack.MOCK_STACK_WITH_ERROR }, ], }], }); const toolkit = defaultToolkitSetup(); await toolkit.synth([], false, true); }); test('stack has dependency and was explicitly selected', async () => { cloudExecutable = new MockCloudExecutable({ stacks: [ MockStack.MOCK_STACK_C, MockStack.MOCK_STACK_D, ], }); const toolkit = defaultToolkitSetup(); await expect(toolkit.synth([MockStack.MOCK_STACK_D.stackName], true, false)).resolves.toBeDefined(); }); }); class MockStack { public static readonly MOCK_STACK_A: TestStackArtifact = { stackName: 'Test-Stack-A', template: { Resources: { TemplateName: 'Test-Stack-A' } }, env: 'aws://123456789012/bermuda-triangle-1', metadata: { '/Test-Stack-A': [ { type: cxschema.ArtifactMetadataEntryType.STACK_TAGS, data: [ { key: 'Foo', value: 'Bar' }, ], }, ], }, }; public static readonly MOCK_STACK_B: TestStackArtifact = { stackName: 'Test-Stack-B', template: { Resources: { TemplateName: 'Test-Stack-B' } }, env: 'aws://123456789012/bermuda-triangle-1', metadata: { '/Test-Stack-B': [ { type: cxschema.ArtifactMetadataEntryType.STACK_TAGS, data: [ { key: 'Baz', value: 'Zinga!' }, ], }, ], }, }; public static readonly MOCK_STACK_C: TestStackArtifact = { stackName: 'Test-Stack-C', template: { Resources: { TemplateName: 'Test-Stack-C' } }, env: 'aws://123456789012/bermuda-triangle-1', metadata: { '/Test-Stack-C': [ { type: cxschema.ArtifactMetadataEntryType.STACK_TAGS, data: [ { key: 'Baz', value: 'Zinga!' }, ], }, ], }, displayName: 'Test-Stack-A/Test-Stack-C', }; public static readonly MOCK_STACK_D: TestStackArtifact = { stackName: 'Test-Stack-D', template: { Resources: { TemplateName: 'Test-Stack-D' } }, env: 'aws://123456789012/bermuda-triangle-1', metadata: { '/Test-Stack-D': [ { type: cxschema.ArtifactMetadataEntryType.STACK_TAGS, data: [ { key: 'Baz', value: 'Zinga!' }, ], }, ], }, depends: [MockStack.MOCK_STACK_C.stackName], } public static readonly MOCK_STACK_WITH_ERROR: TestStackArtifact = { stackName: 'witherrors', env: 'aws://123456789012/bermuda-triangle-1', template: { resource: 'errorresource' }, metadata: { '/resource': [ { type: cxschema.ArtifactMetadataEntryType.ERROR, data: 'this is an error', }, ], }, displayName: 'Test-Stack-A/witherrors', } } class FakeCloudFormation extends CloudFormationDeployments { private readonly expectedTags: { [stackName: string]: Tag[] } = {}; private readonly expectedNotificationArns?: string[]; constructor( expectedTags: { [stackName: string]: { [key: string]: string } } = {}, expectedNotificationArns?: string[], ) { super({ sdkProvider: undefined as any }); for (const [stackName, tags] of Object.entries(expectedTags)) { this.expectedTags[stackName] = Object.entries(tags).map(([Key, Value]) => ({ Key, Value })) .sort((l, r) => l.Key.localeCompare(r.Key)); } if (expectedNotificationArns) { this.expectedNotificationArns = expectedNotificationArns; } } public deployStack(options: DeployStackOptions): Promise<DeployStackResult> { expect([MockStack.MOCK_STACK_A.stackName, MockStack.MOCK_STACK_B.stackName, MockStack.MOCK_STACK_C.stackName]) .toContain(options.stack.stackName); expect(options.tags).toEqual(this.expectedTags[options.stack.stackName]); expect(options.notificationArns).toEqual(this.expectedNotificationArns); return Promise.resolve({ stackArn: `arn:aws:cloudformation:::stack/${options.stack.stackName}/MockedOut`, noOp: false, outputs: { StackName: options.stack.stackName }, stackArtifact: options.stack, }); } public readCurrentTemplate(stack: cxapi.CloudFormationStackArtifact): Promise<Template> { switch (stack.stackName) { case MockStack.MOCK_STACK_A.stackName: return Promise.resolve({}); case MockStack.MOCK_STACK_B.stackName: return Promise.resolve({}); case MockStack.MOCK_STACK_C.stackName: return Promise.resolve({}); default: return Promise.reject(`Not an expected mock stack: ${stack.stackName}`); } } }
the_stack
import * as React from 'react'; import * as PropTypes from 'prop-types'; import { storiesOf } from '@storybook/react'; import compose from 'recompose/compose'; import mapProps from 'recompose/mapProps'; import getContext from 'recompose/getContext'; import withContext from 'recompose/withContext'; import withHandlers from 'recompose/withHandlers'; import withState from 'recompose/withState'; import { Provider, connect as reduxConnect } from 'react-redux'; import { createStore } from 'redux'; import { createSelector } from 'reselect'; import GenericGriddle, { connect, actions, components, selectors, plugins, utils, ColumnDefinition, RowDefinition, GriddleProps } from '../src/module'; const { Cell, Row, Table, TableBody, TableHeading, TableHeadingCell } = components; const { SettingsWrapper, SettingsToggle, Settings } = components; const { LegacyStylePlugin, LocalPlugin, PositionPlugin } = plugins; import fakeData, { FakeData } from './fakeData'; import { person, fakeData2, personClass, fakeData3 } from './fakeData2'; type Griddle = new () => GenericGriddle<FakeData>; const Griddle = (GenericGriddle as unknown) as Griddle; function sortBySecondCharacter(data, column, sortAscending = true) { return data.sort((original, newRecord) => { original = (!!original.get(column) && original.get(column)) || ''; newRecord = (!!newRecord.get(column) && newRecord.get(column)) || ''; if (original[1] === newRecord[1]) { return 0; } else if (original[1] > newRecord[1]) { return sortAscending ? 1 : -1; } else { return sortAscending ? -1 : 1; } }); } // from mdn function getRandomIntInclusive(min, max) { min = Math.ceil(min); max = Math.floor(max); return Math.floor(Math.random() * (max - min + 1)) + min; } function getRandomFakeData() { const start = getRandomIntInclusive(0, fakeData.length - 10); return fakeData.slice(start, start + 10); } const GreenLeftSortIconComponent = props => ( <span style={{ color: '#00ff00' }}> {props.icon && <span className={props.iconClassName}>{props.icon}</span>} {props.title} </span> ); const MakeBlueComponent = props => ( <div style={{ backgroundColor: '#0000FF' }}> {props.value} {props.rowData && ( <small style={{ marginLeft: 5, opacity: 0.5 }}> {props.rowData.company} </small> )} </div> ); const EnhanceWithRowData = connect((state, props) => ({ rowData: selectors.rowDataSelector(state, props) })); const EnhancedCustomComponent = EnhanceWithRowData(MakeBlueComponent); interface TestState { count: number; data?: any; searchString?: string; } function testReducer(state: TestState = { count: 1 }, action: any) { switch (action.type) { case 'INCREMENT': return { ...state, count: state.count + 1 }; case 'DECREMENT': return { ...state, count: state.count - 1 }; case 'SET_DATA': return { ...state, data: action.data }; case 'SET_SEARCH_STRING': return { ...state, searchString: action.searchString }; default: return state; } } let testStore = createStore(testReducer); storiesOf('Griddle main', module) .add('with local', () => { return ( <Griddle data={fakeData} plugins={[LocalPlugin]}> <RowDefinition /> </Griddle> ); }) .add('with external prop changes', () => { const NoResultsWithN = connect( (state: any) => ({ n: state.get('n'), addTen: state.get('addTen') }), () => {} )(({ n, addTen }) => ( <div> <p> <code>n = {n}</code> </p> <button onClick={addTen}>+10</button> </div> )); class Stateful extends React.Component<{}, { n: number }> { constructor(props) { super(props); this.state = { n: 0 }; } render() { const { n } = this.state; return ( <div> <p> Click to change Griddle props:{' '} <button onClick={() => this.setState(({ n }) => ({ n: n + 1 }))}> {n} </button> <button onClick={() => this.setState({ n: 0 })}>Reset</button> </p> <Griddle n={n} addTen={() => this.setState(({ n }) => ({ n: n + 10 }))} plugins={[LocalPlugin]} data={fakeData.filter((d, i) => i % n === 0)} components={{ NoResults: NoResultsWithN }} styleConfig={{ styles: { Layout: { color: n % 3 ? 'blue' : 'inherit' } } }} textProperties={{ settingsToggle: `Settings (${n})` }} /> </div> ); } } return <Stateful />; }) .add('with local, delayed data', () => { class DeferredGriddle extends React.Component< GriddleProps<FakeData>, { data?: FakeData[] } > { private timeout; constructor(props) { super(props); this.state = {}; } componentDidMount() { this.resetData(); } componentWillUnmount() { this.timeout && clearTimeout(this.timeout); } resetData = () => { this.setState({ data: null }); this.timeout && clearTimeout(this.timeout); this.timeout = setTimeout(() => { this.setState({ data: this.props.data }); }, 2000); }; render() { return ( <div> <p> <button onClick={this.resetData}>Reload Data</button> </p> <Griddle {...this.props} data={this.state.data} /> </div> ); } } return <DeferredGriddle data={fakeData} plugins={[LocalPlugin]} />; }) .add('with local and legacy (v0) styles', () => { return ( <Griddle data={fakeData} plugins={[LocalPlugin, LegacyStylePlugin]} /> ); }) .add('with local and events', () => { // don't do things this way - fine for example storybook const events = { onFilter: filter => console.log('onFilter', filter), onSort: sortProperties => console.log('onSort', sortProperties), onNext: () => console.log('onNext'), onPrevious: () => console.log('onPrevious'), onGetPage: pageNumber => console.log('onGetPage', pageNumber) }; return ( <Griddle data={fakeData} plugins={[LocalPlugin]} events={events}> <RowDefinition /> </Griddle> ); }) .add('with Row & Cell events', () => { return ( <Griddle data={fakeData} plugins={[LocalPlugin]} components={{ RowEnhancer: OriginalComponent => props => ( <OriginalComponent {...props} onClick={() => console.log(`Click Row ${props.griddleKey}`)} onMouseEnter={() => console.log(`MouseEnter Row ${props.griddleKey}`) } onMouseLeave={() => console.log(`MouseLeave Row ${props.griddleKey}`) } /> ), CellEnhancer: OriginalComponent => props => ( <OriginalComponent {...props} onClick={() => console.log(`Click ${props.value}`)} onMouseEnter={() => console.log(`MouseEnter ${props.value}`)} onMouseLeave={() => console.log(`MouseLeave ${props.value}`)} /> ) }} styleConfig={{ styles: { Table: { borderCollapse: 'collapse' } // To prevent Row enter/leave between cells } }} /> ); }) .add('with local and filterable set', () => { return ( <div> <small>Name is not filterable</small> <Griddle data={fakeData} plugins={[LocalPlugin]}> <RowDefinition> <ColumnDefinition id="name" filterable={false} /> <ColumnDefinition id="city" filterable /> <ColumnDefinition id="state" /> </RowDefinition> </Griddle> </div> ); }) .add('with local and sort set', () => { const sortProperties = [{ id: 'name', sortAscending: true }]; return ( <Griddle data={fakeData} plugins={[LocalPlugin]} sortProperties={sortProperties} > <RowDefinition /> </Griddle> ); }) .add('with custom default sort', () => { return ( <div> <small>Sorts all columns by second character</small> <Griddle data={fakeData} plugins={[LocalPlugin]} sortMethod={sortBySecondCharacter} > <RowDefinition> <ColumnDefinition id="name" order={2} /> <ColumnDefinition id="state" order={1} /> </RowDefinition> </Griddle> </div> ); }) .add('with custom sort on name', () => { return ( <div> <small>Sorts name by second character</small> <Griddle data={fakeData} plugins={[LocalPlugin]}> <RowDefinition> <ColumnDefinition id="name" order={2} title="NAME" sortMethod={sortBySecondCharacter} /> <ColumnDefinition id="state" order={1} /> </RowDefinition> </Griddle> </div> ); }) .add('with sortable set to true/false', () => { return ( <div> <small> Using ColumnDefinition sortable (false on name, true on state). </small> <Griddle data={fakeData} plugins={[LocalPlugin]}> <RowDefinition> <ColumnDefinition id="name" order={2} sortable={false} /> <ColumnDefinition id="state" order={1} sortable={true} /> </RowDefinition> </Griddle> </div> ); }) .add('with sort disabled on name via plugin', () => { const { setSortProperties } = utils.sortUtils; const disableSortPlugin = (...columnsWithSortDisabled) => ({ events: { setSortProperties: sortProperties => { const { columnId } = sortProperties; if (columnsWithSortDisabled.findIndex(c => c === columnId) >= 0) { return () => {}; } return setSortProperties(sortProperties); } } }); return ( <div> <small>Using custom plugin to disable sort</small> <Griddle data={fakeData} plugins={[LocalPlugin, disableSortPlugin('name')]} > <RowDefinition> <ColumnDefinition id="name" order={2} /> <ColumnDefinition id="state" order={1} /> </RowDefinition> </Griddle> </div> ); }) .add('with custom css-class names on state', () => { const css = ` tr:nth-child(2n+1) .customClassName { background-color: #eee; } .customHeaderClassName { color: red; } .blue { color: blue; } .asc { background-color: #666; color: white; } .desc { background-color: #999; color: black; } `; return ( <div> <style type="text/css">{css}</style> <small> Sets dynamic (name - click to sort) and static (state) class names on header and body cells </small> <Griddle data={fakeData} plugins={[LocalPlugin]}> <RowDefinition> <ColumnDefinition id="name" headerCssClassName={({ sortProperty }) => sortProperty && (sortProperty.sortAscending ? 'asc' : 'desc') } cssClassName={({ value }) => value.startsWith('L') && 'blue'} /> <ColumnDefinition id="state" cssClassName="customClassName" headerCssClassName="customHeaderClassName" /> </RowDefinition> </Griddle> </div> ); }) .add('with cssClassName string on RowDefinition', () => { const css = ` .lucky { background-color: #cfc; color: #060; } `; return ( <div> <style type="text/css">{css}</style> <small>Uses cssClassName to apply static class name</small> <Griddle data={fakeData} plugins={[LocalPlugin]}> <RowDefinition cssClassName="lucky"> <ColumnDefinition id="name" /> <ColumnDefinition id="state" /> </RowDefinition> </Griddle> </div> ); }) .add('with cssClassName function on RowDefinition', () => { const css = ` .row-1 { background-color: #ccc; } .row-2 { background-color: #999; } .lucky { background-color: #cfc; color: #060; } `; return ( <div> <style type="text/css">{css}</style> <small>Uses cssClassName to apply calculated class names</small> <Griddle data={fakeData} plugins={[LocalPlugin]}> <RowDefinition cssClassName={({ rowData: d, index: i }) => d && d.favoriteNumber === 7 ? 'lucky' : `row-${i % 3}` } > <ColumnDefinition id="name" /> <ColumnDefinition id="state" /> </RowDefinition> </Griddle> </div> ); }) .add('with custom component on name', () => { return ( <div> <small>Everything in the name column should be blue</small> <Griddle data={fakeData} plugins={[LocalPlugin]}> <RowDefinition> <ColumnDefinition id="name" order={2} customComponent={MakeBlueComponent} width={800} /> <ColumnDefinition id="state" order={1} width={100} /> </RowDefinition> </Griddle> </div> ); }) .add("with 'connected' custom component", () => { return ( <div> <small> Everything in the name column should be blue and we should now see the company name also </small> <Griddle data={fakeData} plugins={[LocalPlugin]}> <RowDefinition> <ColumnDefinition id="name" order={2} customComponent={EnhancedCustomComponent} /> <ColumnDefinition id="state" order={1} /> </RowDefinition> </Griddle> </div> ); }) .add('with controlled griddle component', () => { class Something extends React.Component<{}, any> { constructor(props) { super(props); this.state = { data: getRandomFakeData(), sortProperties: {} }; } onFilter = filter => { console.log('onFilter', filter); this.setState({ data: getRandomFakeData() }); }; onSort = sortProperties => { console.log('onSort', sortProperties); this.setState({ data: getRandomFakeData(), sortProperties: { something: { ...sortProperties, sortAscending: getRandomIntInclusive(0, 1) > 0 ? true : false } } }); }; onNext = () => { console.log('onNext'); this.setState({ data: getRandomFakeData() }); }; onPrevious = () => { console.log('onPrevious'); this.setState({ data: getRandomFakeData() }); }; onGetPage = pageNumber => { console.log('onGetPage', pageNumber); this.setState({ data: getRandomFakeData() }); }; render() { const pageProperties = { currentPage: getRandomIntInclusive(1, 10), recordCount: getRandomIntInclusive(1, 1000) }; // don't do things this way - fine for example storybook const events = { onFilter: this.onFilter, onSort: this.onSort, onNext: this.onNext, onPrevious: this.onPrevious, onGetPage: this.onGetPage }; return ( <Griddle data={this.state.data} events={events} styleConfig={{ classNames: { Cell: 'hahaha' } }} sortProperties={this.state.sortProperties} pageProperties={pageProperties} > <RowDefinition> <ColumnDefinition id="name" width={500} style={{ color: '#FAB' }} /> <ColumnDefinition id="state" /> </RowDefinition> </Griddle> ); } } return <Something />; }) .add('with controlled griddle component with no results', () => { return <Griddle data={[]} />; }) .add('with extraData', () => { const customHeadingComponent = ({ title, extra }) => ( <span> {title} {extra && <em> {extra}</em>} </span> ); const customComponent = ({ value, extra }) => ( <span> {value} {extra && <em> {extra}</em>} </span> ); const components = { Cell: ({ value, extra }) => ( <td> {value} {extra && <strong> {extra}</strong>} </td> ), TableHeadingCell: ({ title, extra }) => ( <th> {title} {extra && <strong> {extra}</strong>} </th> ) }; return ( <div> <small> <em>extra</em> from <code>custom(Heading)Component</code>;{' '} <strong>extra</strong> from <code>(TableHeading)Cell</code> </small> <Griddle data={fakeData} plugins={[LocalPlugin]} components={components} > <RowDefinition rowKey="name"> <ColumnDefinition id="name" order={2} extraData={{ extra: 'extra' }} customHeadingComponent={customHeadingComponent} customComponent={customComponent} /> <ColumnDefinition id="state" order={1} /> </RowDefinition> </Griddle> </div> ); }) .add('with extra re-render', () => { let data = fakeData; class customComponent extends React.Component<any, any> { state = { timesRendered: 1 }; componentWillReceiveProps() { this.setState(state => ({ timesRendered: state.timesRendered + 1 })); } render() { const { value, extra } = this.props; const { timesRendered } = this.state; return ( <span> {value} {extra && <em> {extra}</em>} {timesRendered} </span> ); } } let interval = null; class UpdatingDataTable extends React.Component<any, any> { constructor(props, context) { super(props, context); this.state = { data: this.updateDataWithProgress(props.data, 0), progressValue: 0, extraData: { extra: 'times re-rendered: ' } }; } updateDataWithProgress(data, progressValue) { return data.map(item => ({ ...item, progress: progressValue })); } componentDidMount() { interval = setInterval(() => { this.setState(state => { const newProgressValue = state.progressValue + 1; return { data: this.updateDataWithProgress(state.data, newProgressValue), progressValue: newProgressValue }; }); }, 5000); } componentWillUnmount() { clearInterval(interval); } render() { const { data, extraData } = this.state; return ( <div> <small> <em>extra</em> from <code>custom(Heading)Component</code>;{' '} <strong>extra</strong> from <code>(TableHeading)Cell</code> </small> <Griddle data={data} plugins={[LocalPlugin]}> <RowDefinition rowKey="name"> <ColumnDefinition id="name" order={2} extraData={extraData} customComponent={customComponent} /> <ColumnDefinition id="state" order={1} /> <ColumnDefinition id="progress" /> </RowDefinition> </Griddle> </div> ); } } return <UpdatingDataTable data={fakeData} />; }) .add('with custom griddle key', () => { return ( <div> <small>The key should be the name property </small> <Griddle data={fakeData} plugins={[LocalPlugin]}> <RowDefinition rowKey="name"> <ColumnDefinition id="name" order={2} /> <ColumnDefinition id="state" order={1} /> </RowDefinition> </Griddle> </div> ); }) .add("with custom griddle key that doesn't exist", () => { return ( <div> <small>The key should be the name property </small> <Griddle data={fakeData} plugins={[LocalPlugin]}> <RowDefinition rowKey="garbage"> <ColumnDefinition id="name" order={2} /> <ColumnDefinition id="state" order={1} /> </RowDefinition> </Griddle> </div> ); }) .add('with custom heading component', () => { return ( <div> <style type="text/css">{` .griddle-heading-ascending:before { content: '↑'; } .griddle-heading-descending:before { content: '↓'; } `}</style> <small> Name should have a green heading component -- sort icon should show up on the left of the title </small> <Griddle data={fakeData} plugins={[LocalPlugin]}> <RowDefinition> <ColumnDefinition id="name" order={2} customHeadingComponent={GreenLeftSortIconComponent} /> <ColumnDefinition id="state" order={1} /> </RowDefinition> </Griddle> </div> ); }) .add('with many columns', () => { return ( <div> <small> State should be first, name should be last, and the rest should be in order. Default order increments from 1000. </small> <Griddle data={fakeData} plugins={[LocalPlugin]}> <RowDefinition> <ColumnDefinition id="name" order={2000} /> <ColumnDefinition id="col1" /> <ColumnDefinition id="col2" /> <ColumnDefinition id="col3" /> <ColumnDefinition id="col4" /> <ColumnDefinition id="col5" /> <ColumnDefinition id="col6" /> <ColumnDefinition id="col7" /> <ColumnDefinition id="state" order={1} /> </RowDefinition> </Griddle> </div> ); }) .add('with conditional columns', () => { return ( <div> <small>The first column should be visible, the second ignored.</small> <Griddle data={fakeData} plugins={[LocalPlugin]}> <RowDefinition> {true && <ColumnDefinition id="col1" />} {false && <ColumnDefinition id="col2" />} </RowDefinition> </Griddle> </div> ); }) .add('with override row component', () => { const NewRow = props => ( <tr> <td>hi</td> </tr> ); return ( <Griddle data={fakeData} components={{ Row: NewRow }} /> ); }) .add('with list row component', () => { // Ported from https://github.com/GriddleGriddle/griddle-docs/blob/429f318778604c5e7500c1c949fe1c3137972419/components/GriddleList.js const CustomRowComponent = connect((state, props) => ({ rowData: plugins.LocalPlugin.selectors.rowDataSelector(state, props) }))(({ rowData }) => ( <div style={{ backgroundColor: '#EEE', border: '1px solid #AAA', padding: 5, margin: '10px 0 10px 0' }} > <h1>{rowData.name}</h1> <ul> <li> <strong>State</strong>: {rowData.state} </li> <li> <strong>Company</strong>: {rowData.company} </li> </ul> </div> )); // HoC for overriding Table component to just render the default TableBody component // We could use this entirely if we wanted and connect and map over visible rows but // Using this + tableBody to take advantange of code that Griddle LocalPlugin already has const CustomTableComponent = OriginalComponent => class CustomTableComponent extends React.Component<{}> { static contextTypes = { components: PropTypes.object }; render() { return <this.context.components.TableBody />; } }; const CustomTableBody = ({ rowIds, Row, style, className }) => ( <div style={style} className={className}> {rowIds && rowIds.map(r => <Row key={r} griddleKey={r} />)} </div> ); return ( <Griddle data={fakeData} pageProperties={{ pageSize: 5 }} plugins={[plugins.LocalPlugin]} components={{ Row: CustomRowComponent, TableContainer: CustomTableComponent, TableBody: CustomTableBody, SettingsToggle: props => null }} /> ); }) .add('with virtual scrolling', () => { return ( <Griddle data={fakeData} plugins={[LocalPlugin, PositionPlugin({ tableHeight: 300 })]} > <RowDefinition> <ColumnDefinition id="name" order={2} customHeadingComponent={GreenLeftSortIconComponent} width={300} /> <ColumnDefinition id="state" order={1} width={400} /> </RowDefinition> </Griddle> ); }) .add('set fakeData to constructed Objects', () => { type Griddle = new () => GenericGriddle<person>; const Griddle = (GenericGriddle as unknown) as Griddle; return ( <Griddle data={fakeData2} plugins={[LocalPlugin]}> <RowDefinition /> </Griddle> ); }) .add('set fakeData to class Objects', () => { type Griddle = new () => GenericGriddle<personClass>; const Griddle = (GenericGriddle as unknown) as Griddle; return ( <Griddle data={fakeData3} plugins={[LocalPlugin]}> <RowDefinition /> </Griddle> ); }) .add('with nested column data', () => { interface NestedData { id: number; name: string; location: { country: string; city: string; state: string; }; company: string; favoriteNumber: number; } type Griddle = new () => GenericGriddle<NestedData>; const Griddle = (GenericGriddle as unknown) as Griddle; const localData: NestedData[] = [ { id: 0, name: 'Mayer Leonard', location: { country: 'United Kingdom', city: 'Kapowsin', state: 'Hawaii' }, company: 'Ovolo', favoriteNumber: 7 }, { id: 1, name: 'Koch Becker', location: { city: 'Johnsonburg', state: 'New Jersey', country: 'Madagascar' }, company: 'Eventage', favoriteNumber: 2 }, { id: 2, name: 'Lowery Hopkins', location: { city: 'Blanco', state: 'Arizona', country: 'Ukraine' }, company: 'Comtext', favoriteNumber: 3 }, { id: 3, name: 'Walters Mays', location: { city: 'Glendale', state: 'Illinois', country: 'New Zealand' }, company: 'Corporana', favoriteNumber: 6 }, { id: 4, name: 'Shaw Lowe', location: { city: 'Coultervillle', state: 'Wyoming', country: 'Ecuador' }, company: 'Isologica', favoriteNumber: 2 }, { id: 5, name: 'Ola Fernandez', location: { city: 'Deltaville', state: 'Delaware', country: 'Virgin Islands (US)' }, company: 'Pawnagra', favoriteNumber: 7 } ]; return ( <Griddle data={localData} plugins={[LocalPlugin]}> <RowDefinition> <ColumnDefinition id="name" /> <ColumnDefinition id="location.state" nested={true} /> </RowDefinition> </Griddle> ); }) .add('with custom store listener (check the console!)', () => { const paginationListener = (prevState, nextState) => { const page = nextState.getIn(['pageProperties', 'currentPage']); page % 2 ? console.log('pageProperties->currentPage is odd!') : console.log('pageProperties->currentPage is even!'); }; return ( <Griddle data={fakeData} plugins={[LocalPlugin]} listeners={{ anExternalListener: paginationListener }} > <RowDefinition> <ColumnDefinition id="name" order={2} customHeadingComponent={GreenLeftSortIconComponent} width={300} /> <ColumnDefinition id="state" order={1} width={400} /> </RowDefinition> </Griddle> ); }); storiesOf('Plugins', module).add('styleConfig', () => { const stylePlugin = { components: { Style: () => ( <style type="text/css"> {` .plugin-layout { border: 5px solid green; padding: 5px; } .plugin-row:nth-child(2n+1) { background-color: #eee; } `} </style> ) }, styleConfig: { icons: { TableHeadingCell: { sortDescendingIcon: ' (desc)', sortAscendingIcon: ' (asc)' } }, classNames: { Layout: 'plugin-layout', Row: 'plugin-row' }, styles: { Filter: { backgroundColor: 'blue', color: 'white', fontSize: '200%' } } } }; return ( <div> <small> Uses styles from plugin unless overridden (filter should be black). </small> <Griddle data={fakeData} plugins={[LocalPlugin, stylePlugin]} styleConfig={{ styles: { Filter: { backgroundColor: 'black', fontStyle: 'italic' } } }} /> </div> ); }); storiesOf('Data Missing', module) .add('base (data=undefined)', () => { return <Griddle />; }) .add('base (data=null)', () => { return <Griddle data={null} />; }) .add('local (data=undefined)', () => { return <Griddle plugins={[LocalPlugin]} />; }) .add('local (data=null)', () => { return <Griddle data={null} plugins={[LocalPlugin]} />; }); storiesOf('Data Empty', module) .add('base', () => { return <Griddle data={[]} />; }) .add('local', () => { return <Griddle data={[]} plugins={[LocalPlugin]} />; }); storiesOf('Cell', module) .add('base cell', () => { const someValue = 'hi from storybook'; return ( <table> <tbody> <tr> <Cell value={someValue} className="someClass" style={{ fontSize: 20, color: '#FAB' }} onClick={() => console.log('clicked')} onMouseEnter={() => console.log('mouse over')} onMouseLeave={() => console.log('mouse out')} /> </tr> </tbody> </table> ); }) .add('CellContainer', () => { return ( <Griddle data={fakeData} plugins={[LocalPlugin]}> <RowDefinition> <ColumnDefinition id="name" order={2} /> <ColumnDefinition id="state" order={1} /> </RowDefinition> </Griddle> ); }); storiesOf('Bug fixes', module) .add('Shared column title', () => { return ( <Griddle data={fakeData} plugins={[LocalPlugin]}> <RowDefinition> <ColumnDefinition id="name" order={2} title="Same" /> <ColumnDefinition id="state" order={1} title="Same" /> </RowDefinition> </Griddle> ); }) .add('Date values converted to null', () => { interface DateData { _id: number; foo: string; date: Date; bar: string; } type Griddle = new () => GenericGriddle<DateData>; const Griddle = (GenericGriddle as unknown) as Griddle; const dateData = [ { _id: 1, foo: 'hello', date: new Date('2017-02-15'), bar: 'world' }, { _id: 2, foo: 'today', date: new Date(), bar: 'bar' } ]; return ( <Griddle data={dateData} plugins={[LocalPlugin]}> <RowDefinition> <ColumnDefinition id={'_id'} title="ID" /> <ColumnDefinition id={'foo'} title="Foo" /> <ColumnDefinition id={'date'} title="Date" type="date" /> <ColumnDefinition id={'bar'} title="Bar" /> </RowDefinition> </Griddle> ); }) .add('Delete row', () => { const enhanceWithOnClick = onClick => class ComputeThing extends React.Component<any, any> { static propTypes = { rowData: PropTypes.object.isRequired }; localClick = () => { const { id } = this.props.rowData; onClick(id); }; render() { const { rowData: { id } } = this.props; return ( <button type="button" onClick={this.localClick}> Remove {id} </button> ); } }; class SomeComponent extends React.Component<{}, { data: FakeData[] }> { private Component; constructor(props) { super(props); this.state = { data: [ { id: 0, name: 'Mayer Leonard', country: 'United Kingdom', city: 'Kapowsin', state: 'Hawaii', company: 'Ovolo', favoriteNumber: 7 }, { id: 1, name: 'Koch Becker', city: 'Johnsonburg', state: 'New Jersey', country: 'Madagascar', company: 'Eventage', favoriteNumber: 2 }, { id: 2, name: 'Lowery Hopkins', city: 'Blanco', state: 'Arizona', country: 'Ukraine', company: 'Comtext', favoriteNumber: 3 }, { id: 3, name: 'Walters Mays', city: 'Glendale', state: 'Illinois', country: 'New Zealand', company: 'Corporana', favoriteNumber: 6 }, { id: 4, name: 'Shaw Lowe', city: 'Coultervillle', state: 'Wyoming', country: 'Ecuador', company: 'Isologica', favoriteNumber: 2 }, { id: 5, name: 'Ola Fernandez', city: 'Deltaville', state: 'Delaware', country: 'Virgin Islands (US)', company: 'Pawnagra', favoriteNumber: 7 } ] }; this.Component = EnhanceWithRowData(enhanceWithOnClick(this.onRemove)); } onRemove = rowId => { const newData = this.state.data.filter(x => x.id !== rowId); this.setState({ data: newData }); }; render() { return ( <Griddle data={this.state.data} plugins={[LocalPlugin]}> <RowDefinition> <ColumnDefinition id="id" /> <ColumnDefinition id="name" /> <ColumnDefinition id="somethingTotallyMadeUp" title="Compute thing" customComponent={this.Component} /> </RowDefinition> </Griddle> ); } } return <SomeComponent />; }); storiesOf('Filter', module) .add('with Filter place-holder', () => { return ( <Griddle data={fakeData} plugins={[LocalPlugin]} textProperties={{ filterPlaceholder: 'My new Filter text!' }} > <RowDefinition /> </Griddle> ); }) .add('with Custom Filter for the column "name"', () => { class CustomFilter extends components.Filter { public setFilter = (e: any) => { this.props.setFilter({ name: e.target.value }); }; public render() { return ( <label> {'Name: '} <input type="text" name="filter" onChange={this.setFilter} style={this.props.style} className={this.props.className} /> </label> ); } } return ( <Griddle data={fakeData} plugins={[LocalPlugin]} components={{ Filter: CustomFilter }} > <RowDefinition /> </Griddle> ); }); storiesOf('Redux', module) .add('with custom filter connected to another Redux store', () => { // https://stackoverflow.com/questions/47229902/griddle-v1-9-inputbox-in-customfiltercomponent-lose-focus const CustomFilterComponent = props => ( <input value={props.searchString || ''} onChange={e => { props.setSearchString(e.target.value); }} /> ); const setSearchStringActionCreator = searchString => ({ type: 'SET_SEARCH_STRING', searchString }); const CustomFilterConnectedComponent = reduxConnect( (state: TestState) => ({ searchString: state.searchString }), dispatch => ({ setSearchString: e => dispatch(setSearchStringActionCreator(e)) }) )(CustomFilterComponent); const plugins = [ LocalPlugin, { components: { Filter: CustomFilterConnectedComponent } } ]; const SomePage = props => ( <div> <Griddle data={props.data} plugins={plugins} storeKey="griddleStore" /> Component outside of Griddle that's sharing state <CustomFilterConnectedComponent /> </div> ); const SomePageConnected = reduxConnect((state: TestState) => ({ data: !state.searchString ? state.data : state.data.filter(r => Object.keys(r).some( k => r[k] && r[k].toString().indexOf(state.searchString) > -1 ) ) }))(SomePage); testStore.dispatch({ type: 'SET_DATA', data: fakeData }); return ( <Provider store={testStore}> <SomePageConnected /> </Provider> ); }) .add('custom column chooser', () => { const columnChooser = compose( connect( state => ({ columns: createSelector( selectors.sortedColumnPropertiesSelector, colMap => { const columns = colMap.valueSeq().toJS(); return columns.filter(c => !c.isMetadata); } )(state) }), { toggleColumn: actions.toggleColumn } ), withHandlers({ onToggle: ({ toggleColumn }) => event => { toggleColumn(event.target.name); } }) )(({ columns, onToggle }) => { return ( <div> {Object.keys(columns).map(c => ( <label key={columns[c].id}> <input type="checkbox" name={columns[c].id} defaultChecked={columns[c].visible !== false} onChange={onToggle} /> {columns[c].title || columns[c].id} </label> ))} </div> ); }); const SimpleColumnChooserPlugin = { components: { SettingsComponents: { columnChooser } } }; return ( <Griddle data={fakeData} plugins={[LocalPlugin, SimpleColumnChooserPlugin]} settingsComponentObjects={{ pageSizeSettings: null }} > <RowDefinition> <ColumnDefinition id="name" /> <ColumnDefinition id="company" /> <ColumnDefinition id="state" /> <ColumnDefinition id="country" visible={false} /> </RowDefinition> </Griddle> ); }) .add('custom page size settings', () => { const pageSizeSettings = ({ pageSizes }) => compose( connect( state => ({ pageSize: selectors.pageSizeSelector(state) }), { setPageSize: actions.setPageSize } ), withHandlers({ onChange: props => e => { props.setPageSize(+e.target.value); } }) )(({ pageSize, onChange }) => { return ( <div> <select onChange={onChange} defaultValue={pageSize}> {pageSizes.map(s => ( <option key={s}>{s}</option> ))} </select> </div> ); }); const PageSizeDropDownPlugin = config => ({ components: { SettingsComponents: { pageSizeSettings: pageSizeSettings(config) } } }); const pluginConfig = { pageSizes: [5, 10, 20, 50] }; return ( <Griddle data={fakeData} plugins={[LocalPlugin, PageSizeDropDownPlugin(pluginConfig)]} settingsComponentObjects={{ columnChooser: null }} /> ); }) .add( 'with custom storeKey and child connected to another Redux store', () => { // basically the demo redux stuff const countSelector = state => state.count; const CountComponent = props => ( <div> <button type="button" onClick={props.increment}> + </button> <input value={props.count} readOnly style={{ width: '2em', textAlign: 'center' }} /> <button type="button" onClick={props.decrement}> − </button> </div> ); // should get count from other store const ConnectedComponent = reduxConnect( state => ({ count: countSelector(state) }), dispatch => ({ increment: () => { dispatch({ type: 'INCREMENT' }); }, decrement: () => { dispatch({ type: 'DECREMENT' }); } }) )(CountComponent); return ( <div> <Provider store={testStore}> <div> <Griddle data={fakeData} plugins={[LocalPlugin]} storeKey="griddleStore" > <RowDefinition> <ColumnDefinition id="name" /> <ColumnDefinition id="state" /> <ColumnDefinition id="customCount" customComponent={ConnectedComponent} /> </RowDefinition> </Griddle> Component outside of Griddle that's sharing state <ConnectedComponent /> </div> </Provider> </div> ); } ); storiesOf('Row', module) .add('base row', () => { const columnIds = [1, 2, 3]; return ( <table> <tbody> <Row Cell={({ columnId }) => <td>Cell {columnId}</td>} columnIds={columnIds} /> </tbody> </table> ); }) .add('with local plugin container', () => { const testPlugin = { components: { Cell: ({ griddleKey, columnId }) => ( <td>{`${griddleKey} ${columnId}`}</td> ) } }; return ( <Griddle data={fakeData} plugins={[LocalPlugin, testPlugin]}> <RowDefinition> <ColumnDefinition id="name" order={2} /> <ColumnDefinition id="state" order={1} /> </RowDefinition> </Griddle> ); }); storiesOf('TableBody', module) .add('base table body', () => { const rowIds = [1, 2, 3]; const FakeRow = ({ griddleKey }) => ( <tr> <td>Row id: {griddleKey}</td> </tr> ); return ( <table> <TableBody rowIds={rowIds} Row={FakeRow} /> </table> ); }) .add('with local container', () => { const junkPlugin = { components: { Row: props => ( <tr> <td>{props.griddleKey}</td> </tr> ), // override local row container RowContainer: original => props => original(props) } }; return ( <Griddle data={fakeData} plugins={[LocalPlugin, junkPlugin]}> <RowDefinition> <ColumnDefinition id="name" order={2} /> <ColumnDefinition id="state" order={1} /> </RowDefinition> </Griddle> ); }); storiesOf('TableHeadingCell', module).add('base table heading cell', () => { return ( <table> <thead> <tr> <TableHeadingCell title="New Title" onClick={() => console.log('clicked')} onMouseEnter={() => console.log('mouse over')} onMouseLeave={() => console.log('mouse out')} /> </tr> </thead> </table> ); }); storiesOf('TableHeading', module).add('base table heading', () => { const columnTitles = ['one', 'two', 'three']; return ( <table> <TableHeading columnTitles={columnTitles} TableHeadingCell={TableHeadingCell} /> </table> ); }); storiesOf('Table', module) .add('base table', () => { const noResults = props => <p>Nothing!</p>; return <Table NoResults={noResults} />; }) .add('empty with columns', () => { const components = { Table: ({ TableHeading, TableBody, NoResults, style, visibleRows }) => ( <table style={style}> <TableHeading /> {visibleRows ? TableBody && <TableBody /> : NoResults && <NoResults />} </table> ), NoResultsContainer: compose( getContext({ components: PropTypes.object }), connect(state => ({ columnIds: selectors.columnIdsSelector(state), style: selectors.stylesForComponentSelector(state, 'NoResults') })), mapProps(props => ({ NoResults: props.components.NoResults, ...props })) ), NoResults: ({ columnIds, style }) => ( <tr style={style}> <td colSpan={columnIds.length}>Nothing!</td> </tr> ) }; const styleConfig = { styles: { NoResults: { backgroundColor: '#eee', textAlign: 'center' } as React.CSSProperties, Table: { width: '80%' } } }; return ( <Griddle styleConfig={styleConfig} components={components}> <RowDefinition> <ColumnDefinition id="name" order={2} /> <ColumnDefinition id="state" order={1} /> </RowDefinition> </Griddle> ); }) .add('base table with visibleRows', () => { const tableHeading = props => ( <thead> <tr> <th>One</th> <th>Two</th> <th>Three</th> </tr> </thead> ); const tableBody = props => ( <tbody> <tr> <td>uno</td> <td>dos</td> <td>tres</td> </tr> </tbody> ); return ( <Table visibleRows={1} TableHeading={tableHeading} TableBody={tableBody} /> ); }); storiesOf('SettingsWrapper', module) .add('base disabled', () => { return <SettingsWrapper />; }) .add('base enabled not visible', () => { const toggle = props => <div>Toggle!</div>; return <SettingsWrapper isEnabled={true} SettingsToggle={toggle} />; }) .add('base enabled and visible', () => { const settings = props => <div>Settings!</div>; return ( <SettingsWrapper isEnabled={true} isVisible={true} Settings={settings} /> ); }); storiesOf('SettingsToggle', module).add('base', () => { const onClick = () => console.log('toggle'); return <SettingsToggle onClick={onClick} text={'Toggle!'} />; }); storiesOf('Settings', module) .add('base', () => { const components = [1, 2, 3].map((n, i) => props => ( <div>Settings {n}</div> )); return <Settings settingsComponents={components} />; }) .add('disable settings', () => { return ( <Griddle data={fakeData} plugins={[LocalPlugin]} enableSettings={false} /> ); }) .add('change settings toggle button text', () => { return ( <Griddle data={fakeData} plugins={[LocalPlugin]} textProperties={{ settingsToggle: 'Toggle!' }} /> ); }) .add('remove built-in settings', () => { const plugin = { components: { SettingsComponents: null }, settingsComponentObjects: { fancy: { order: 1, component: () => <div>Fancy Settings Component</div> } } }; return <Griddle data={fakeData} plugins={[LocalPlugin, plugin]} />; }) .add('reorder built-in settings', () => { const settingsComponentObjects = { before: { order: 1, component: () => <div>Before</div> }, columnChooser: { order: 2 }, between: { order: 3, component: () => <div>Between</div> }, pageSizeSettings: { order: 4 }, after: { order: 5, component: () => <div>After</div> } }; return ( <Griddle data={fakeData} plugins={[LocalPlugin]} settingsComponentObjects={settingsComponentObjects} /> ); }) .add('relocate page size setting near pagination', () => { const PageSizeSettings = components.SettingsComponents.pageSizeSettings; const PageSizeDropDownInPaginationPlugin = { components: { Pagination: ({ Next, Previous, PageDropdown }) => ( <div> <PageSizeSettings /> {Previous && <Previous />} {PageDropdown && <PageDropdown />} {Next && <Next />} </div> ) }, initialState: { textProperties: { next: '▶', previous: '◀' } } }; return ( <Griddle data={fakeData} plugins={[LocalPlugin, PageSizeDropDownInPaginationPlugin]} settingsComponentObjects={{ pageSizeSettings: null }} /> ); }); storiesOf('core', module) .add('Can replace core', () => { const core = { components: { Layout: () => <h1>Core Replaced!</h1> } }; return <Griddle core={core} />; }) .add('Can handle null core', () => { return <Griddle core={null} />; }); storiesOf('TypeScript', module).add( 'GriddleComponent accepts expected types', () => { class Custom extends React.Component<{ value }> { render() { return <strong>{this.props.value}</strong>; } } return ( <Griddle data={fakeData} plugins={[LocalPlugin]}> <RowDefinition> <ColumnDefinition id="name" customComponent={({ value }) => <em>{value}</em>} /> <ColumnDefinition id="state" customComponent={Custom} /> </RowDefinition> </Griddle> ); } );
the_stack
import _ from 'lodash' import { expect } from 'chai' import { _rewriteJsUnsafe } from '../../lib/js' import fse from 'fs-extra' import Bluebird from 'bluebird' import rp from '@cypress/request-promise' import snapshot from 'snap-shot-it' import * as astTypes from 'ast-types' import sinon from 'sinon' import { testSourceWithExternalSourceMap, testSourceWithInlineSourceMap, } from '../fixtures' const URL = 'http://example.com/foo.js' function match (varName, prop) { return `globalThis.top.Cypress.resolveWindowReference(globalThis, ${varName}, '${prop}')` } function matchLocation () { return `globalThis.top.Cypress.resolveLocationReference(globalThis)` } function testExpectedJs (string: string, expected: string) { // use _rewriteJsUnsafe so exceptions can cause the test to fail const actual = _rewriteJsUnsafe(URL, string) expect(actual).to.eq(expected) } describe('js rewriter', function () { afterEach(() => { sinon.restore() }) context('.rewriteJs', function () { context('transformations', function () { context('injects Cypress window property resolver', () => { [ ['window.top', match('window', 'top')], ['window.parent', match('window', 'parent')], ['window[\'top\']', match('window', 'top')], ['window[\'parent\']', match('window', 'parent')], ['window["top"]', match('window', 'top')], ['window["parent"]', match('window', 'parent')], ['foowindow.top', match('foowindow', 'top')], ['foowindow[\'top\']', match('foowindow', 'top')], ['window.topfoo'], ['window[\'topfoo\']'], ['window[\'top\'].foo', `${match('window', 'top')}.foo`], ['window.top.foo', `${match('window', 'top')}.foo`], ['window.top["foo"]', `${match('window', 'top')}["foo"]`], ['window[\'top\']["foo"]', `${match('window', 'top')}["foo"]`], [ 'if (window["top"] != window["parent"]) run()', `if (${match('window', 'top')} != ${match('window', 'parent')}) run()`, ], [ 'if (top != self) run()', `if (${match('globalThis', 'top')} != self) run()`, ], [ 'if (window != top) run()', `if (window != ${match('globalThis', 'top')}) run()`, ], [ 'if (top.location != self.location) run()', `if (${match('top', 'location')} != ${match('self', 'location')}) run()`, ], [ 'n = (c = n).parent', `n = ${match('c = n', 'parent')}`, ], [ 'e.top = "0"', `globalThis.top.Cypress.resolveWindowReference(globalThis, e, 'top', "0")`, ], ['e.top += 0'], [ 'e.bottom += e.top', `e.bottom += ${match('e', 'top')}`, ], [ 'if (a = (e.top = "0")) { }', `if (a = (globalThis.top.Cypress.resolveWindowReference(globalThis, e, 'top', "0"))) { }`, ], // test that double quotes remain double-quoted [ 'a = "b"; window.top', `a = "b"; ${match('window', 'top')}`, ], ['({ top: "foo", parent: "bar" })'], ['top: "foo"; parent: "bar";'], ['top: break top'], ['top: continue top;'], [ 'function top() { window.top }; function parent(...top) { window.top }', `function top() { ${match('window', 'top')} }; function parent(...top) { ${match('window', 'top')} }`, ], [ '(top, ...parent) => { window.top }', `(top, ...parent) => { ${match('window', 'top')} }`, ], [ '(function top() { window.top }); (function parent(...top) { window.top })', `(function top() { ${match('window', 'top')} }); (function parent(...top) { ${match('window', 'top')} })`, ], [ 'top += 4', ], [ // test that arguments are not replaced 'function foo(location) { location.href = \'bar\' }', ], [ // test that global variables are replaced 'function foo(notLocation) { location.href = \'bar\' }', `function foo(notLocation) { ${matchLocation()}.href = \'bar\' }`, ], [ // test that scoped declarations are not replaced 'let location = "foo"; location.href = \'bar\'', ], [ 'location.href = "bar"', `${matchLocation()}.href = "bar"`, ], [ 'location = "bar"', `${matchLocation()}.href = "bar"`, ], [ 'window.location.href = "bar"', `${match('window', 'location')}.href = "bar"`, ], [ 'window.location = "bar"', `globalThis.top.Cypress.resolveWindowReference(globalThis, window, 'location', "bar")`, ], [ 'document.location.href = "bar"', `${match('document', 'location')}.href = "bar"`, ], [ 'document.location = "bar"', `globalThis.top.Cypress.resolveWindowReference(globalThis, document, 'location', "bar")`, ], ] .forEach(([string, expected]) => { if (!expected) { expected = string } it(`${string} => ${expected}`, () => { testExpectedJs(string, expected) }) }) }) it('throws an error via the driver if AST visiting throws an error', () => { // if astTypes.visit throws, that indicates a bug in our js-rules, and so we should stop rewriting const err = new Error('foo') err.stack = 'stack' sinon.stub(astTypes, 'visit').throws(err) const actual = _rewriteJsUnsafe(URL, 'console.log()') snapshot(actual) }) it('replaces jira window getter', () => { const jira = `\ for (; !function (n) { return n === n.parent }(n);) {}\ ` const jira2 = `\ (function(n){for(;!function(l){return l===l.parent}(l)&&function(l){try{if(void 0==l.location.href)return!1}catch(l){return!1}return!0}(l.parent);)l=l.parent;return l})\ ` const jira3 = `\ function satisfiesSameOrigin(w) { try { // Accessing location.href from a window on another origin will throw an exception. if ( w.location.href == undefined) { return false; } } catch (e) { return false; } return true; } function isTopMostWindow(w) { return w === w.parent; } while (!isTopMostWindow(parentOf) && satisfiesSameOrigin(parentOf.parent)) { parentOf = parentOf.parent; }\ ` testExpectedJs(jira, `\ for (; !function (n) { return n === ${match('n', 'parent')}; }(n);) {}\ `) testExpectedJs(jira2, `\ (function(n){for(;!function(l){return l===${match('l', 'parent')};}(l)&&function(l){try{if(void 0==${match('l', 'location')}.href)return!1}catch(l){return!1}return!0}(${match('l', 'parent')});)l=${match('l', 'parent')};return l})\ `) testExpectedJs(jira3, `\ function satisfiesSameOrigin(w) { try { // Accessing location.href from a window on another origin will throw an exception. if ( ${match('w', 'location')}.href == undefined) { return false; } } catch (e) { return false; } return true; } function isTopMostWindow(w) { return w === ${match('w', 'parent')}; } while (!isTopMostWindow(parentOf) && satisfiesSameOrigin(${match('parentOf', 'parent')})) { parentOf = ${match('parentOf', 'parent')}; }\ `) }) describe('libs', () => { const cdnUrl = 'https://cdnjs.cloudflare.com/ajax/libs' const needsDash = ['backbone', 'underscore'] let libs = { jquery: `${cdnUrl}/jquery/3.3.1/jquery.js`, jqueryui: `${cdnUrl}/jqueryui/1.12.1/jquery-ui.js`, angular: `${cdnUrl}/angular.js/1.6.5/angular.js`, bootstrap: `${cdnUrl}/twitter-bootstrap/4.0.0/js/bootstrap.js`, moment: `${cdnUrl}/moment.js/2.20.1/moment.js`, lodash: `${cdnUrl}/lodash.js/4.17.5/lodash.js`, vue: `${cdnUrl}/vue/2.5.13/vue.js`, backbone: `${cdnUrl}/backbone.js/1.3.3/backbone.js`, cycle: `${cdnUrl}/cyclejs-core/7.0.0/cycle.js`, d3: `${cdnUrl}/d3/4.13.0/d3.js`, underscore: `${cdnUrl}/underscore.js/1.8.3/underscore.js`, foundation: `${cdnUrl}/foundation/6.4.3/js/foundation.js`, require: `${cdnUrl}/require.js/2.3.5/require.js`, rxjs: `${cdnUrl}/rxjs/5.5.6/Rx.js`, bluebird: `${cdnUrl}/bluebird/3.5.1/bluebird.js`, } libs = _ .chain(libs) .clone() .reduce((memo, url, lib) => { memo[lib] = url memo[`${lib}Min`] = url .replace(/js$/, 'min.js') .replace(/css$/, 'min.css') if (needsDash.includes(lib)) { memo[`${lib}Min`] = url.replace('min', '-min') } return memo } , {}) .extend({ knockoutDebug: `${cdnUrl}/knockout/3.4.2/knockout-debug.js`, knockoutMin: `${cdnUrl}/knockout/3.4.2/knockout-min.js`, emberMin: `${cdnUrl}/ember.js/2.18.2/ember.min.js`, emberProd: `${cdnUrl}/ember.js/2.18.2/ember.prod.js`, reactDev: `${cdnUrl}/react/16.2.0/umd/react.development.js`, reactProd: `${cdnUrl}/react/16.2.0/umd/react.production.min.js`, vendorBundle: 'https://s3.amazonaws.com/internal-test-runner-assets.cypress.io/vendor.bundle.js', hugeApp: 'https://s3.amazonaws.com/internal-test-runner-assets.cypress.io/huge_app.js', }) .value() as unknown as typeof libs _.each(libs, (url, lib) => { it(`does not corrupt code from '${lib}'`, function () { // may have to download and rewrite large files this.timeout(30000) const pathToLib = `/tmp/${lib}` const downloadFile = () => { return rp(url) .then((resp) => { return Bluebird.fromCallback((cb) => { fse.writeFile(pathToLib, resp, cb) }) .return(resp) }) } return fse .readFile(pathToLib, 'utf8') .catch(downloadFile) .then((libCode) => { const stripped = _rewriteJsUnsafe(url, libCode) expect(() => eval(stripped), 'is valid JS').to.not.throw }) }) }) }) }) context('source maps', function () { it('emits sourceInfo as expected', function (done) { _rewriteJsUnsafe(URL, 'window.top', (sourceInfo) => { snapshot(sourceInfo) done() return '' }) }) it('emits info about existing inline sourcemap', function (done) { _rewriteJsUnsafe(URL, testSourceWithInlineSourceMap, (sourceInfo) => { snapshot(sourceInfo) done() return '' }) }) it('emits info about existing external sourcemap', function (done) { _rewriteJsUnsafe(URL, testSourceWithExternalSourceMap, (sourceInfo) => { snapshot(sourceInfo) done() return '' }) }) }) }) })
the_stack
interface Console { Console: NodeJS.ConsoleConstructor; /** * A simple assertion test that verifies whether `value` is truthy. * If it is not, an `AssertionError` is thrown. * If provided, the error `message` is formatted using `util.format()` and used as the error message. */ assert(value: any, message?: string, ...optionalParams: any[]): void; /** * When `stdout` is a TTY, calling `console.clear()` will attempt to clear the TTY. * When `stdout` is not a TTY, this method does nothing. */ clear(): void; /** * Maintains an internal counter specific to `label` and outputs to `stdout` the number of times `console.count()` has been called with the given `label`. */ count(label?: string): void; /** * Resets the internal counter specific to `label`. */ countReset(label?: string): void; /** * The `console.debug()` function is an alias for {@link console.log()}. */ debug(message?: any, ...optionalParams: any[]): void; /** * Uses {@link util.inspect()} on `obj` and prints the resulting string to `stdout`. * This function bypasses any custom `inspect()` function defined on `obj`. */ dir(obj: any, options?: NodeJS.InspectOptions): void; /** * This method calls {@link console.log()} passing it the arguments received. Please note that this method does not produce any XML formatting */ dirxml(...data: any[]): void; /** * Prints to `stderr` with newline. */ error(message?: any, ...optionalParams: any[]): void; /** * Increases indentation of subsequent lines by two spaces. * If one or more `label`s are provided, those are printed first without the additional indentation. */ group(...label: any[]): void; /** * The `console.groupCollapsed()` function is an alias for {@link console.group()}. */ groupCollapsed(...label: any[]): void; /** * Decreases indentation of subsequent lines by two spaces. */ groupEnd(): void; /** * The {@link console.info()} function is an alias for {@link console.log()}. */ info(message?: any, ...optionalParams: any[]): void; /** * Prints to `stdout` with newline. */ log(message?: any, ...optionalParams: any[]): void; /** * This method does not display anything unless used in the inspector. * Prints to `stdout` the array `array` formatted as a table. */ table(tabularData: any, properties?: string[]): void; /** * Starts a timer that can be used to compute the duration of an operation. Timers are identified by a unique `label`. */ time(label?: string): void; /** * Stops a timer that was previously started by calling {@link console.time()} and prints the result to `stdout`. */ timeEnd(label?: string): void; /** * For a timer that was previously started by calling {@link console.time()}, prints the elapsed time and other `data` arguments to `stdout`. */ timeLog(label?: string, ...data: any[]): void; /** * Prints to `stderr` the string 'Trace :', followed by the {@link util.format()} formatted message and stack trace to the current position in the code. */ trace(message?: any, ...optionalParams: any[]): void; /** * The {@link console.warn()} function is an alias for {@link console.error()}. */ warn(message?: any, ...optionalParams: any[]): void; // --- Inspector mode only --- /** * This method does not display anything unless used in the inspector. * Starts a JavaScript CPU profile with an optional label. */ profile(label?: string): void; /** * This method does not display anything unless used in the inspector. * Stops the current JavaScript CPU profiling session if one has been started and prints the report to the Profiles panel of the inspector. */ profileEnd(label?: string): void; /** * This method does not display anything unless used in the inspector. * Adds an event with the label `label` to the Timeline panel of the inspector. */ timeStamp(label?: string): void; } // Declare "static" methods in Error interface ErrorConstructor { /** Create .stack property on a target object */ captureStackTrace(targetObject: object, constructorOpt?: Function): void; /** * Optional override for formatting stack traces * * @see https://github.com/v8/v8/wiki/Stack%20Trace%20API#customizing-stack-traces */ prepareStackTrace?: (err: Error, stackTraces: NodeJS.CallSite[]) => any; stackTraceLimit: number; } // Node.js ESNEXT support interface String { /** Removes whitespace from the left end of a string. */ trimLeft(): string; /** Removes whitespace from the right end of a string. */ trimRight(): string; } interface ImportMeta { url: string; } /*-----------------------------------------------* * * * GLOBAL * * * ------------------------------------------------*/ // For backwards compability interface NodeRequire extends NodeJS.Require {} interface RequireResolve extends NodeJS.RequireResolve {} interface NodeModule extends NodeJS.Module {} declare var process: NodeJS.Process; declare var global: NodeJS.Global; declare var console: Console; declare var __filename: string; declare var __dirname: string; declare function setTimeout(callback: (...args: any[]) => void, ms: number, ...args: any[]): NodeJS.Timeout; declare namespace setTimeout { function __promisify__(ms: number): Promise<void>; function __promisify__<T>(ms: number, value: T): Promise<T>; } declare function clearTimeout(timeoutId: NodeJS.Timeout): void; declare function setInterval(callback: (...args: any[]) => void, ms: number, ...args: any[]): NodeJS.Timeout; declare function clearInterval(intervalId: NodeJS.Timeout): void; declare function setImmediate(callback: (...args: any[]) => void, ...args: any[]): NodeJS.Immediate; declare namespace setImmediate { function __promisify__(): Promise<void>; function __promisify__<T>(value: T): Promise<T>; } declare function clearImmediate(immediateId: NodeJS.Immediate): void; declare function queueMicrotask(callback: () => void): void; declare var require: NodeRequire; declare var module: NodeModule; // Same as module.exports declare var exports: any; // Buffer class type BufferEncoding = "ascii" | "utf8" | "utf-8" | "utf16le" | "ucs2" | "ucs-2" | "base64" | "latin1" | "binary" | "hex"; /** * Raw data is stored in instances of the Buffer class. * A Buffer is similar to an array of integers but corresponds to a raw memory allocation outside the V8 heap. A Buffer cannot be resized. * Valid string encodings: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex' */ declare class Buffer extends Uint8Array { /** * Allocates a new buffer containing the given {str}. * * @param str String to store in buffer. * @param encoding encoding to use, optional. Default is 'utf8' * @deprecated since v10.0.0 - Use `Buffer.from(string[, encoding])` instead. */ constructor(str: string, encoding?: BufferEncoding); /** * Allocates a new buffer of {size} octets. * * @param size count of octets to allocate. * @deprecated since v10.0.0 - Use `Buffer.alloc()` instead (also see `Buffer.allocUnsafe()`). */ constructor(size: number); /** * Allocates a new buffer containing the given {array} of octets. * * @param array The octets to store. * @deprecated since v10.0.0 - Use `Buffer.from(array)` instead. */ constructor(array: Uint8Array); /** * Produces a Buffer backed by the same allocated memory as * the given {ArrayBuffer}/{SharedArrayBuffer}. * * * @param arrayBuffer The ArrayBuffer with which to share memory. * @deprecated since v10.0.0 - Use `Buffer.from(arrayBuffer[, byteOffset[, length]])` instead. */ constructor(arrayBuffer: ArrayBuffer | SharedArrayBuffer); /** * Allocates a new buffer containing the given {array} of octets. * * @param array The octets to store. * @deprecated since v10.0.0 - Use `Buffer.from(array)` instead. */ constructor(array: any[]); /** * Copies the passed {buffer} data onto a new {Buffer} instance. * * @param buffer The buffer to copy. * @deprecated since v10.0.0 - Use `Buffer.from(buffer)` instead. */ constructor(buffer: Buffer); /** * When passed a reference to the .buffer property of a TypedArray instance, * the newly created Buffer will share the same allocated memory as the TypedArray. * The optional {byteOffset} and {length} arguments specify a memory range * within the {arrayBuffer} that will be shared by the Buffer. * * @param arrayBuffer The .buffer property of any TypedArray or a new ArrayBuffer() */ static from(arrayBuffer: ArrayBuffer | SharedArrayBuffer, byteOffset?: number, length?: number): Buffer; /** * Creates a new Buffer using the passed {data} * @param data data to create a new Buffer */ static from(data: number[]): Buffer; static from(data: Uint8Array): Buffer; /** * Creates a new buffer containing the coerced value of an object * A `TypeError` will be thrown if {obj} has not mentioned methods or is not of other type appropriate for `Buffer.from()` variants. * @param obj An object supporting `Symbol.toPrimitive` or `valueOf()`. */ static from(obj: { valueOf(): string | object } | { [Symbol.toPrimitive](hint: 'string'): string }, byteOffset?: number, length?: number): Buffer; /** * Creates a new Buffer containing the given JavaScript string {str}. * If provided, the {encoding} parameter identifies the character encoding. * If not provided, {encoding} defaults to 'utf8'. */ static from(str: string, encoding?: BufferEncoding): Buffer; /** * Creates a new Buffer using the passed {data} * @param values to create a new Buffer */ static of(...items: number[]): Buffer; /** * Returns true if {obj} is a Buffer * * @param obj object to test. */ static isBuffer(obj: any): obj is Buffer; /** * Returns true if {encoding} is a valid encoding argument. * Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex' * * @param encoding string to test. */ static isEncoding(encoding: string): encoding is BufferEncoding; /** * Gives the actual byte length of a string. encoding defaults to 'utf8'. * This is not the same as String.prototype.length since that returns the number of characters in a string. * * @param string string to test. * @param encoding encoding used to evaluate (defaults to 'utf8') */ static byteLength( string: string | NodeJS.ArrayBufferView | ArrayBuffer | SharedArrayBuffer, encoding?: BufferEncoding ): number; /** * Returns a buffer which is the result of concatenating all the buffers in the list together. * * If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer. * If the list has exactly one item, then the first item of the list is returned. * If the list has more than one item, then a new Buffer is created. * * @param list An array of Buffer objects to concatenate * @param totalLength Total length of the buffers when concatenated. * If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly. */ static concat(list: Uint8Array[], totalLength?: number): Buffer; /** * The same as buf1.compare(buf2). */ static compare(buf1: Uint8Array, buf2: Uint8Array): number; /** * Allocates a new buffer of {size} octets. * * @param size count of octets to allocate. * @param fill if specified, buffer will be initialized by calling buf.fill(fill). * If parameter is omitted, buffer will be filled with zeros. * @param encoding encoding used for call to buf.fill while initalizing */ static alloc(size: number, fill?: string | Buffer | number, encoding?: BufferEncoding): Buffer; /** * Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents * of the newly created Buffer are unknown and may contain sensitive data. * * @param size count of octets to allocate */ static allocUnsafe(size: number): Buffer; /** * Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents * of the newly created Buffer are unknown and may contain sensitive data. * * @param size count of octets to allocate */ static allocUnsafeSlow(size: number): Buffer; /** * This is the number of bytes used to determine the size of pre-allocated, internal Buffer instances used for pooling. This value may be modified. */ static poolSize: number; write(string: string, encoding?: BufferEncoding): number; write(string: string, offset: number, encoding?: BufferEncoding): number; write(string: string, offset: number, length: number, encoding?: BufferEncoding): number; toString(encoding?: string, start?: number, end?: number): string; toJSON(): { type: 'Buffer'; data: number[] }; equals(otherBuffer: Uint8Array): boolean; compare( otherBuffer: Uint8Array, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number ): number; copy(targetBuffer: Uint8Array, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; /** * Returns a new `Buffer` that references **the same memory as the original**, but offset and cropped by the start and end indices. * * This method is incompatible with `Uint8Array#slice()`, which returns a copy of the original memory. * * @param begin Where the new `Buffer` will start. Default: `0`. * @param end Where the new `Buffer` will end (not inclusive). Default: `buf.length`. */ slice(begin?: number, end?: number): Buffer; /** * Returns a new `Buffer` that references **the same memory as the original**, but offset and cropped by the start and end indices. * * This method is compatible with `Uint8Array#subarray()`. * * @param begin Where the new `Buffer` will start. Default: `0`. * @param end Where the new `Buffer` will end (not inclusive). Default: `buf.length`. */ subarray(begin?: number, end?: number): Buffer; writeUIntLE(value: number, offset: number, byteLength: number): number; writeUIntBE(value: number, offset: number, byteLength: number): number; writeIntLE(value: number, offset: number, byteLength: number): number; writeIntBE(value: number, offset: number, byteLength: number): number; readUIntLE(offset: number, byteLength: number): number; readUIntBE(offset: number, byteLength: number): number; readIntLE(offset: number, byteLength: number): number; readIntBE(offset: number, byteLength: number): number; readUInt8(offset?: number): number; readUInt16LE(offset?: number): number; readUInt16BE(offset?: number): number; readUInt32LE(offset?: number): number; readUInt32BE(offset?: number): number; readInt8(offset?: number): number; readInt16LE(offset?: number): number; readInt16BE(offset?: number): number; readInt32LE(offset?: number): number; readInt32BE(offset?: number): number; readFloatLE(offset?: number): number; readFloatBE(offset?: number): number; readDoubleLE(offset?: number): number; readDoubleBE(offset?: number): number; reverse(): this; swap16(): Buffer; swap32(): Buffer; swap64(): Buffer; writeUInt8(value: number, offset?: number): number; writeUInt16LE(value: number, offset?: number): number; writeUInt16BE(value: number, offset?: number): number; writeUInt32LE(value: number, offset?: number): number; writeUInt32BE(value: number, offset?: number): number; writeInt8(value: number, offset?: number): number; writeInt16LE(value: number, offset?: number): number; writeInt16BE(value: number, offset?: number): number; writeInt32LE(value: number, offset?: number): number; writeInt32BE(value: number, offset?: number): number; writeFloatLE(value: number, offset?: number): number; writeFloatBE(value: number, offset?: number): number; writeDoubleLE(value: number, offset?: number): number; writeDoubleBE(value: number, offset?: number): number; fill(value: string | Uint8Array | number, offset?: number, end?: number, encoding?: BufferEncoding): this; indexOf(value: string | number | Uint8Array, byteOffset?: number, encoding?: BufferEncoding): number; lastIndexOf(value: string | number | Uint8Array, byteOffset?: number, encoding?: BufferEncoding): number; entries(): IterableIterator<[number, number]>; includes(value: string | number | Buffer, byteOffset?: number, encoding?: BufferEncoding): boolean; keys(): IterableIterator<number>; values(): IterableIterator<number>; } /*----------------------------------------------* * * * GLOBAL INTERFACES * * * *-----------------------------------------------*/ declare namespace NodeJS { interface InspectOptions { /** * If set to `true`, getters are going to be * inspected as well. If set to `'get'` only getters without setter are going * to be inspected. If set to `'set'` only getters having a corresponding * setter are going to be inspected. This might cause side effects depending on * the getter function. * @default `false` */ getters?: 'get' | 'set' | boolean; showHidden?: boolean; /** * @default 2 */ depth?: number | null; colors?: boolean; customInspect?: boolean; showProxy?: boolean; maxArrayLength?: number | null; /** * Specifies the maximum number of characters to * include when formatting. Set to `null` or `Infinity` to show all elements. * Set to `0` or negative to show no characters. * @default Infinity */ maxStringLength?: number | null; breakLength?: number; /** * Setting this to `false` causes each object key * to be displayed on a new line. It will also add new lines to text that is * longer than `breakLength`. If set to a number, the most `n` inner elements * are united on a single line as long as all properties fit into * `breakLength`. Short array elements are also grouped together. Note that no * text will be reduced below 16 characters, no matter the `breakLength` size. * For more information, see the example below. * @default `true` */ compact?: boolean | number; sorted?: boolean | ((a: string, b: string) => number); } interface ConsoleConstructorOptions { stdout: WritableStream; stderr?: WritableStream; ignoreErrors?: boolean; colorMode?: boolean | 'auto'; inspectOptions?: InspectOptions; } interface ConsoleConstructor { prototype: Console; new(stdout: WritableStream, stderr?: WritableStream, ignoreErrors?: boolean): Console; new(options: ConsoleConstructorOptions): Console; } interface CallSite { /** * Value of "this" */ getThis(): any; /** * Type of "this" as a string. * This is the name of the function stored in the constructor field of * "this", if available. Otherwise the object's [[Class]] internal * property. */ getTypeName(): string | null; /** * Current function */ getFunction(): Function | undefined; /** * Name of the current function, typically its name property. * If a name property is not available an attempt will be made to try * to infer a name from the function's context. */ getFunctionName(): string | null; /** * Name of the property [of "this" or one of its prototypes] that holds * the current function */ getMethodName(): string | null; /** * Name of the script [if this function was defined in a script] */ getFileName(): string | null; /** * Current line number [if this function was defined in a script] */ getLineNumber(): number | null; /** * Current column number [if this function was defined in a script] */ getColumnNumber(): number | null; /** * A call site object representing the location where eval was called * [if this function was created using a call to eval] */ getEvalOrigin(): string | undefined; /** * Is this a toplevel invocation, that is, is "this" the global object? */ isToplevel(): boolean; /** * Does this call take place in code defined by a call to eval? */ isEval(): boolean; /** * Is this call in native V8 code? */ isNative(): boolean; /** * Is this a constructor call? */ isConstructor(): boolean; } interface ErrnoException extends Error { errno?: number; code?: string; path?: string; syscall?: string; stack?: string; } interface EventEmitter { addListener(event: string | symbol, listener: (...args: any[]) => void): this; on(event: string | symbol, listener: (...args: any[]) => void): this; once(event: string | symbol, listener: (...args: any[]) => void): this; removeListener(event: string | symbol, listener: (...args: any[]) => void): this; off(event: string | symbol, listener: (...args: any[]) => void): this; removeAllListeners(event?: string | symbol): this; setMaxListeners(n: number): this; getMaxListeners(): number; listeners(event: string | symbol): Function[]; rawListeners(event: string | symbol): Function[]; emit(event: string | symbol, ...args: any[]): boolean; listenerCount(type: string | symbol): number; // Added in Node 6... prependListener(event: string | symbol, listener: (...args: any[]) => void): this; prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; eventNames(): Array<string | symbol>; } interface ReadableStream extends EventEmitter { readable: boolean; read(size?: number): string | Buffer; setEncoding(encoding: string): this; pause(): this; resume(): this; isPaused(): boolean; pipe<T extends WritableStream>(destination: T, options?: { end?: boolean; }): T; unpipe(destination?: WritableStream): this; unshift(chunk: string | Uint8Array, encoding?: BufferEncoding): void; wrap(oldStream: ReadableStream): this; [Symbol.asyncIterator](): AsyncIterableIterator<string | Buffer>; } interface WritableStream extends EventEmitter { writable: boolean; write(buffer: Uint8Array | string, cb?: (err?: Error | null) => void): boolean; write(str: string, encoding?: string, cb?: (err?: Error | null) => void): boolean; end(cb?: () => void): void; end(data: string | Uint8Array, cb?: () => void): void; end(str: string, encoding?: string, cb?: () => void): void; } interface ReadWriteStream extends ReadableStream, WritableStream { } interface Domain extends EventEmitter { run<T>(fn: (...args: any[]) => T, ...args: any[]): T; add(emitter: EventEmitter | Timer): void; remove(emitter: EventEmitter | Timer): void; bind<T extends Function>(cb: T): T; intercept<T extends Function>(cb: T): T; addListener(event: string, listener: (...args: any[]) => void): this; on(event: string, listener: (...args: any[]) => void): this; once(event: string, listener: (...args: any[]) => void): this; removeListener(event: string, listener: (...args: any[]) => void): this; removeAllListeners(event?: string): this; } interface MemoryUsage { rss: number; heapTotal: number; heapUsed: number; external: number; arrayBuffers: number; } interface CpuUsage { user: number; system: number; } interface ProcessRelease { name: string; sourceUrl?: string; headersUrl?: string; libUrl?: string; lts?: string; } interface ProcessVersions { http_parser: string; node: string; v8: string; ares: string; uv: string; zlib: string; modules: string; openssl: string; } type Platform = 'aix' | 'android' | 'darwin' | 'freebsd' | 'linux' | 'openbsd' | 'sunos' | 'win32' | 'cygwin' | 'netbsd'; type Signals = "SIGABRT" | "SIGALRM" | "SIGBUS" | "SIGCHLD" | "SIGCONT" | "SIGFPE" | "SIGHUP" | "SIGILL" | "SIGINT" | "SIGIO" | "SIGIOT" | "SIGKILL" | "SIGPIPE" | "SIGPOLL" | "SIGPROF" | "SIGPWR" | "SIGQUIT" | "SIGSEGV" | "SIGSTKFLT" | "SIGSTOP" | "SIGSYS" | "SIGTERM" | "SIGTRAP" | "SIGTSTP" | "SIGTTIN" | "SIGTTOU" | "SIGUNUSED" | "SIGURG" | "SIGUSR1" | "SIGUSR2" | "SIGVTALRM" | "SIGWINCH" | "SIGXCPU" | "SIGXFSZ" | "SIGBREAK" | "SIGLOST" | "SIGINFO"; type MultipleResolveType = 'resolve' | 'reject'; type BeforeExitListener = (code: number) => void; type DisconnectListener = () => void; type ExitListener = (code: number) => void; type RejectionHandledListener = (promise: Promise<any>) => void; type UncaughtExceptionListener = (error: Error) => void; type UnhandledRejectionListener = (reason: {} | null | undefined, promise: Promise<any>) => void; type WarningListener = (warning: Error) => void; type MessageListener = (message: any, sendHandle: any) => void; type SignalsListener = (signal: Signals) => void; type NewListenerListener = (type: string | symbol, listener: (...args: any[]) => void) => void; type RemoveListenerListener = (type: string | symbol, listener: (...args: any[]) => void) => void; type MultipleResolveListener = (type: MultipleResolveType, promise: Promise<any>, value: any) => void; interface Socket extends ReadWriteStream { isTTY?: true; } // Alias for compatibility interface ProcessEnv extends Dict<string> {} interface HRTime { (time?: [number, number]): [number, number]; } interface ProcessReport { /** * Directory where the report is written. * working directory of the Node.js process. * @default '' indicating that reports are written to the current */ directory: string; /** * Filename where the report is written. * The default value is the empty string. * @default '' the output filename will be comprised of a timestamp, * PID, and sequence number. */ filename: string; /** * Returns a JSON-formatted diagnostic report for the running process. * The report's JavaScript stack trace is taken from err, if present. */ getReport(err?: Error): string; /** * If true, a diagnostic report is generated on fatal errors, * such as out of memory errors or failed C++ assertions. * @default false */ reportOnFatalError: boolean; /** * If true, a diagnostic report is generated when the process * receives the signal specified by process.report.signal. * @defaul false */ reportOnSignal: boolean; /** * If true, a diagnostic report is generated on uncaught exception. * @default false */ reportOnUncaughtException: boolean; /** * The signal used to trigger the creation of a diagnostic report. * @default 'SIGUSR2' */ signal: Signals; /** * Writes a diagnostic report to a file. If filename is not provided, the default filename * includes the date, time, PID, and a sequence number. * The report's JavaScript stack trace is taken from err, if present. * * @param fileName Name of the file where the report is written. * This should be a relative path, that will be appended to the directory specified in * `process.report.directory`, or the current working directory of the Node.js process, * if unspecified. * @param error A custom error used for reporting the JavaScript stack. * @return Filename of the generated report. */ writeReport(fileName?: string): string; writeReport(error?: Error): string; writeReport(fileName?: string, err?: Error): string; } interface ResourceUsage { fsRead: number; fsWrite: number; involuntaryContextSwitches: number; ipcReceived: number; ipcSent: number; majorPageFault: number; maxRSS: number; minorPageFault: number; sharedMemorySize: number; signalsCount: number; swappedOut: number; systemCPUTime: number; unsharedDataSize: number; unsharedStackSize: number; userCPUTime: number; voluntaryContextSwitches: number; } interface Process extends EventEmitter { /** * Can also be a tty.WriteStream, not typed due to limitation.s */ stdout: WriteStream; /** * Can also be a tty.WriteStream, not typed due to limitation.s */ stderr: WriteStream; stdin: ReadStream; openStdin(): Socket; argv: string[]; argv0: string; execArgv: string[]; execPath: string; abort(): void; chdir(directory: string): void; cwd(): string; debugPort: number; emitWarning(warning: string | Error, name?: string, ctor?: Function): void; env: ProcessEnv; exit(code?: number): never; exitCode?: number; getgid(): number; setgid(id: number | string): void; getuid(): number; setuid(id: number | string): void; geteuid(): number; seteuid(id: number | string): void; getegid(): number; setegid(id: number | string): void; getgroups(): number[]; setgroups(groups: Array<string | number>): void; setUncaughtExceptionCaptureCallback(cb: ((err: Error) => void) | null): void; hasUncaughtExceptionCaptureCallback(): boolean; version: string; versions: ProcessVersions; config: { target_defaults: { cflags: any[]; default_configuration: string; defines: string[]; include_dirs: string[]; libraries: string[]; }; variables: { clang: number; host_arch: string; node_install_npm: boolean; node_install_waf: boolean; node_prefix: string; node_shared_openssl: boolean; node_shared_v8: boolean; node_shared_zlib: boolean; node_use_dtrace: boolean; node_use_etw: boolean; node_use_openssl: boolean; target_arch: string; v8_no_strict_aliasing: number; v8_use_snapshot: boolean; visibility: string; }; }; kill(pid: number, signal?: string | number): void; pid: number; ppid: number; title: string; arch: string; platform: Platform; mainModule?: Module; memoryUsage(): MemoryUsage; cpuUsage(previousValue?: CpuUsage): CpuUsage; nextTick(callback: Function, ...args: any[]): void; release: ProcessRelease; features: { inspector: boolean; debug: boolean; uv: boolean; ipv6: boolean; tls_alpn: boolean; tls_sni: boolean; tls_ocsp: boolean; tls: boolean; }; /** * Can only be set if not in worker thread. */ umask(mask?: number): number; uptime(): number; hrtime: HRTime; domain: Domain; // Worker send?(message: any, sendHandle?: any, options?: { swallowErrors?: boolean}, callback?: (error: Error | null) => void): boolean; disconnect(): void; connected: boolean; /** * The `process.allowedNodeEnvironmentFlags` property is a special, * read-only `Set` of flags allowable within the [`NODE_OPTIONS`][] * environment variable. */ allowedNodeEnvironmentFlags: ReadonlySet<string>; /** * Only available with `--experimental-report` */ report?: ProcessReport; resourceUsage(): ResourceUsage; /* EventEmitter */ addListener(event: "beforeExit", listener: BeforeExitListener): this; addListener(event: "disconnect", listener: DisconnectListener): this; addListener(event: "exit", listener: ExitListener): this; addListener(event: "rejectionHandled", listener: RejectionHandledListener): this; addListener(event: "uncaughtException", listener: UncaughtExceptionListener): this; addListener(event: "uncaughtExceptionMonitor", listener: UncaughtExceptionListener): this; addListener(event: "unhandledRejection", listener: UnhandledRejectionListener): this; addListener(event: "warning", listener: WarningListener): this; addListener(event: "message", listener: MessageListener): this; addListener(event: Signals, listener: SignalsListener): this; addListener(event: "newListener", listener: NewListenerListener): this; addListener(event: "removeListener", listener: RemoveListenerListener): this; addListener(event: "multipleResolves", listener: MultipleResolveListener): this; emit(event: "beforeExit", code: number): boolean; emit(event: "disconnect"): boolean; emit(event: "exit", code: number): boolean; emit(event: "rejectionHandled", promise: Promise<any>): boolean; emit(event: "uncaughtException", error: Error): boolean; emit(event: "uncaughtExceptionMonitor", error: Error): boolean; emit(event: "unhandledRejection", reason: any, promise: Promise<any>): boolean; emit(event: "warning", warning: Error): boolean; emit(event: "message", message: any, sendHandle: any): this; emit(event: Signals, signal: Signals): boolean; emit(event: "newListener", eventName: string | symbol, listener: (...args: any[]) => void): this; emit(event: "removeListener", eventName: string, listener: (...args: any[]) => void): this; emit(event: "multipleResolves", listener: MultipleResolveListener): this; on(event: "beforeExit", listener: BeforeExitListener): this; on(event: "disconnect", listener: DisconnectListener): this; on(event: "exit", listener: ExitListener): this; on(event: "rejectionHandled", listener: RejectionHandledListener): this; on(event: "uncaughtException", listener: UncaughtExceptionListener): this; on(event: "uncaughtExceptionMonitor", listener: UncaughtExceptionListener): this; on(event: "unhandledRejection", listener: UnhandledRejectionListener): this; on(event: "warning", listener: WarningListener): this; on(event: "message", listener: MessageListener): this; on(event: Signals, listener: SignalsListener): this; on(event: "newListener", listener: NewListenerListener): this; on(event: "removeListener", listener: RemoveListenerListener): this; on(event: "multipleResolves", listener: MultipleResolveListener): this; once(event: "beforeExit", listener: BeforeExitListener): this; once(event: "disconnect", listener: DisconnectListener): this; once(event: "exit", listener: ExitListener): this; once(event: "rejectionHandled", listener: RejectionHandledListener): this; once(event: "uncaughtException", listener: UncaughtExceptionListener): this; once(event: "uncaughtExceptionMonitor", listener: UncaughtExceptionListener): this; once(event: "unhandledRejection", listener: UnhandledRejectionListener): this; once(event: "warning", listener: WarningListener): this; once(event: "message", listener: MessageListener): this; once(event: Signals, listener: SignalsListener): this; once(event: "newListener", listener: NewListenerListener): this; once(event: "removeListener", listener: RemoveListenerListener): this; once(event: "multipleResolves", listener: MultipleResolveListener): this; prependListener(event: "beforeExit", listener: BeforeExitListener): this; prependListener(event: "disconnect", listener: DisconnectListener): this; prependListener(event: "exit", listener: ExitListener): this; prependListener(event: "rejectionHandled", listener: RejectionHandledListener): this; prependListener(event: "uncaughtException", listener: UncaughtExceptionListener): this; prependListener(event: "uncaughtExceptionMonitor", listener: UncaughtExceptionListener): this; prependListener(event: "unhandledRejection", listener: UnhandledRejectionListener): this; prependListener(event: "warning", listener: WarningListener): this; prependListener(event: "message", listener: MessageListener): this; prependListener(event: Signals, listener: SignalsListener): this; prependListener(event: "newListener", listener: NewListenerListener): this; prependListener(event: "removeListener", listener: RemoveListenerListener): this; prependListener(event: "multipleResolves", listener: MultipleResolveListener): this; prependOnceListener(event: "beforeExit", listener: BeforeExitListener): this; prependOnceListener(event: "disconnect", listener: DisconnectListener): this; prependOnceListener(event: "exit", listener: ExitListener): this; prependOnceListener(event: "rejectionHandled", listener: RejectionHandledListener): this; prependOnceListener(event: "uncaughtException", listener: UncaughtExceptionListener): this; prependOnceListener(event: "uncaughtExceptionMonitor", listener: UncaughtExceptionListener): this; prependOnceListener(event: "unhandledRejection", listener: UnhandledRejectionListener): this; prependOnceListener(event: "warning", listener: WarningListener): this; prependOnceListener(event: "message", listener: MessageListener): this; prependOnceListener(event: Signals, listener: SignalsListener): this; prependOnceListener(event: "newListener", listener: NewListenerListener): this; prependOnceListener(event: "removeListener", listener: RemoveListenerListener): this; prependOnceListener(event: "multipleResolves", listener: MultipleResolveListener): this; listeners(event: "beforeExit"): BeforeExitListener[]; listeners(event: "disconnect"): DisconnectListener[]; listeners(event: "exit"): ExitListener[]; listeners(event: "rejectionHandled"): RejectionHandledListener[]; listeners(event: "uncaughtException"): UncaughtExceptionListener[]; listeners(event: "uncaughtExceptionMonitor"): UncaughtExceptionListener[]; listeners(event: "unhandledRejection"): UnhandledRejectionListener[]; listeners(event: "warning"): WarningListener[]; listeners(event: "message"): MessageListener[]; listeners(event: Signals): SignalsListener[]; listeners(event: "newListener"): NewListenerListener[]; listeners(event: "removeListener"): RemoveListenerListener[]; listeners(event: "multipleResolves"): MultipleResolveListener[]; } interface Global { Array: typeof Array; ArrayBuffer: typeof ArrayBuffer; Boolean: typeof Boolean; Buffer: typeof Buffer; DataView: typeof DataView; Date: typeof Date; Error: typeof Error; EvalError: typeof EvalError; Float32Array: typeof Float32Array; Float64Array: typeof Float64Array; Function: typeof Function; GLOBAL: Global; Infinity: typeof Infinity; Int16Array: typeof Int16Array; Int32Array: typeof Int32Array; Int8Array: typeof Int8Array; Intl: typeof Intl; JSON: typeof JSON; Map: MapConstructor; Math: typeof Math; NaN: typeof NaN; Number: typeof Number; Object: typeof Object; Promise: typeof Promise; RangeError: typeof RangeError; ReferenceError: typeof ReferenceError; RegExp: typeof RegExp; Set: SetConstructor; String: typeof String; Symbol: Function; SyntaxError: typeof SyntaxError; TypeError: typeof TypeError; URIError: typeof URIError; Uint16Array: typeof Uint16Array; Uint32Array: typeof Uint32Array; Uint8Array: typeof Uint8Array; Uint8ClampedArray: typeof Uint8ClampedArray; WeakMap: WeakMapConstructor; WeakSet: WeakSetConstructor; clearImmediate: (immediateId: Immediate) => void; clearInterval: (intervalId: Timeout) => void; clearTimeout: (timeoutId: Timeout) => void; console: typeof console; decodeURI: typeof decodeURI; decodeURIComponent: typeof decodeURIComponent; encodeURI: typeof encodeURI; encodeURIComponent: typeof encodeURIComponent; escape: (str: string) => string; eval: typeof eval; global: Global; isFinite: typeof isFinite; isNaN: typeof isNaN; parseFloat: typeof parseFloat; parseInt: typeof parseInt; process: Process; /** * @deprecated Use `global`. */ root: Global; setImmediate: (callback: (...args: any[]) => void, ...args: any[]) => Immediate; setInterval: (callback: (...args: any[]) => void, ms: number, ...args: any[]) => Timeout; setTimeout: (callback: (...args: any[]) => void, ms: number, ...args: any[]) => Timeout; queueMicrotask: typeof queueMicrotask; undefined: typeof undefined; unescape: (str: string) => string; gc: () => void; v8debug?: any; } interface RefCounted { ref(): this; unref(): this; } // compatibility with older typings interface Timer extends RefCounted { hasRef(): boolean; refresh(): this; } interface Immediate extends RefCounted { hasRef(): boolean; _onImmediate: Function; // to distinguish it from the Timeout class } interface Timeout extends Timer { hasRef(): boolean; refresh(): this; } type TypedArray = Uint8Array | Uint8ClampedArray | Uint16Array | Uint32Array | Int8Array | Int16Array | Int32Array | Float32Array | Float64Array; type ArrayBufferView = TypedArray | DataView; interface Require { /* tslint:disable-next-line:callable-types */ (id: string): any; resolve: RequireResolve; cache: Dict<NodeModule>; /** * @deprecated */ extensions: RequireExtensions; main: Module | undefined; } interface RequireResolve { (id: string, options?: { paths?: string[]; }): string; paths(request: string): string[] | null; } interface RequireExtensions extends Dict<(m: Module, filename: string) => any> { '.js': (m: Module, filename: string) => any; '.json': (m: Module, filename: string) => any; '.node': (m: Module, filename: string) => any; } interface Module { exports: any; require: Require; id: string; filename: string; loaded: boolean; parent: Module | null; children: Module[]; paths: string[]; } interface Dict<T> { [key: string]: T | undefined; } interface ReadOnlyDict<T> { readonly [key: string]: T | undefined; } }
the_stack
import * as React from 'react'; import { mount, ReactWrapper } from 'enzyme'; import Countdown, { CountdownProps } from './Countdown'; import { calcTimeDelta, formatTimeDelta } from './utils'; import { CountdownProps as LegacyCountdownProps } from './LegacyCountdown'; const timeDiff = 90110456; const now = jest.fn(() => 1482363367071); Date.now = now; const defaultStats = { total: 0, days: 0, hours: 0, minutes: 0, seconds: 0, milliseconds: 0, completed: false, }; describe('<Countdown />', () => { jest.useFakeTimers(); let wrapper: ReactWrapper<CountdownProps, any, Countdown>; let countdownDate: number; const countdownMs = 10000; beforeEach(() => { Date.now = now; const date = Date.now() + countdownMs; const root = document.createElement('div'); countdownDate = date; wrapper = mount(<Countdown date={date} />, { attachTo: root }); }); it('should render custom renderer output', () => { wrapper = mount( <Countdown date={Date.now() + timeDiff} renderer={props => ( <div> {props.days} {props.hours} {props.minutes} {props.seconds} </div> )} /> ); expect(wrapper).toMatchSnapshot(); }); it('should render and unmount component on countdown end', () => { const zeroPadTime = 0; class Completionist extends React.Component<any> { componentDidMount() {} render() { return ( <div> Completed! {this.props.name} {this.props.children} </div> ); } } let completionist; Completionist.prototype.componentDidMount = jest.fn(); wrapper = mount( <Countdown date={Date.now() + timeDiff} zeroPadTime={zeroPadTime}> <Completionist ref={el => { completionist = el; }} name="master" > Another child </Completionist> </Countdown> ); expect(Completionist.prototype.componentDidMount).not.toBeCalled(); expect(wrapper).toMatchSnapshot(); // Forward in time wrapper.setProps({ date: 0 }); expect(wrapper.state().timeDelta.completed).toBe(true); expect(wrapper.props().children!.type).toBe(Completionist); expect(Completionist.prototype.componentDidMount).toBeCalled(); const computedProps = { ...wrapper.props() }; delete computedProps.children; const obj = wrapper.instance(); const { timeDelta } = wrapper.state(); expect(completionist.props).toEqual({ countdown: { ...timeDelta, api: obj.getApi(), props: wrapper.props(), formatted: formatTimeDelta(timeDelta, { zeroPadTime }), }, name: 'master', children: 'Another child', }); expect(wrapper).toMatchSnapshot(); }); it('should render with daysInHours => true', () => { wrapper = mount(<Countdown date={Date.now() + timeDiff} daysInHours />); expect(wrapper).toMatchSnapshot(); }); it('should render with zeroPadDays => 3', () => { wrapper = mount(<Countdown date={Date.now() + 10 * 86400 * 1000} zeroPadDays={3} />); expect(wrapper).toMatchSnapshot(); }); it('should trigger onTick and onComplete callbacks', () => { const onTick = jest.fn(stats => { expect(stats).toEqual(calcTimeDelta(countdownDate)); }); const onComplete = jest.fn(stats => { expect(stats.total).toEqual(0); }); wrapper.setProps({ onTick, onComplete }); expect(onTick).not.toBeCalled(); // Forward 6s in time now.mockReturnValue(countdownDate - 6000); jest.runTimersToTime(6000); expect(onTick.mock.calls.length).toBe(6); expect(wrapper.state().timeDelta.total).toBe(6000); wrapper.update(); expect(wrapper).toMatchSnapshot(); // Forward 3 more seconds now.mockReturnValue(countdownDate - 1000); jest.runTimersToTime(3000); expect(onTick.mock.calls.length).toBe(9); expect(wrapper.state().timeDelta.total).toBe(1000); expect(wrapper.state().timeDelta.completed).toBe(false); // The End: onComplete callback gets triggered instead of the onTick callback now.mockReturnValue(countdownDate); jest.runTimersToTime(1000); expect(onTick.mock.calls.length).toBe(9); expect(onTick).toBeCalledWith({ ...defaultStats, total: 1000, seconds: 1, }); expect(onComplete.mock.calls.length).toBe(1); expect(onComplete).toBeCalledWith({ ...defaultStats, completed: true }); expect(wrapper.state().timeDelta.completed).toBe(true); }); it('should run through the controlled component by updating the date prop', () => { const root = document.createElement('div'); wrapper = mount(<Countdown date={1000} controlled />, { attachTo: root }); const obj = wrapper.instance(); const api = obj.getApi(); expect(obj.interval).toBeUndefined(); expect(wrapper.state().timeDelta.completed).toBe(false); expect(api.isCompleted()).toBe(false); wrapper.setProps({ date: 0 }); expect(wrapper.state().timeDelta.total).toBe(0); expect(wrapper.state().timeDelta.completed).toBe(true); expect(api.isCompleted()).toBe(true); }); it('should only reset time delta state when date prop is changing', () => { const root = document.createElement('div'); wrapper = mount(<Countdown date={1000} />, { attachTo: root }); const obj = wrapper.instance(); obj.setTimeDeltaState = jest.fn(); function mergeProps(partialProps: Partial<CountdownProps>): CountdownProps { return { ...wrapper.props(), ...partialProps }; } wrapper.setProps(mergeProps({ date: 500 })); expect(obj.setTimeDeltaState).toHaveBeenCalledTimes(1); wrapper.setProps(mergeProps({ intervalDelay: 999 })); expect(obj.setTimeDeltaState).toHaveBeenCalledTimes(1); wrapper.setProps(mergeProps({ date: 500 })); expect(obj.setTimeDeltaState).toHaveBeenCalledTimes(1); wrapper.setProps(mergeProps({ precision: 3 })); expect(obj.setTimeDeltaState).toHaveBeenCalledTimes(1); wrapper.setProps(mergeProps({ date: 750 })); expect(obj.setTimeDeltaState).toHaveBeenCalledTimes(2); wrapper.setProps(mergeProps({ children: <div /> })); expect(obj.setTimeDeltaState).toHaveBeenCalledTimes(2); wrapper.setProps(mergeProps({ date: 1000 })); expect(obj.setTimeDeltaState).toHaveBeenCalledTimes(3); }); it('should not (try to) set state after component unmount', () => { expect(wrapper.state().timeDelta.completed).toBe(false); now.mockReturnValue(countdownDate - 6000); jest.runTimersToTime(6000); expect(wrapper.state().timeDelta.total).toBe(6000); wrapper.instance().mounted = false; now.mockReturnValue(countdownDate - 3000); jest.runTimersToTime(3000); expect(wrapper.state().timeDelta.total).toBe(6000); }); it('should set countdown status to STOPPED if a prop-update occurs that updates a completed countdown', () => { wrapper = mount(<Countdown date={countdownDate} />); const obj = wrapper.instance(); const api = obj.getApi(); expect(api.isStarted()).toBe(true); wrapper.setProps({ date: countdownDate + 1000 }); expect(api.isStarted()).toBe(true); wrapper.setProps({ date: 0 }); expect(api.isCompleted()).toBe(true); wrapper.setProps({ date: countdownDate + 1000 }); expect(api.isStopped()).toBe(true); }); it('should pause => start => pause => stop and restart countdown', () => { const spies = { onMount: jest.fn(), onStart: jest.fn(), onPause: jest.fn(), onStop: jest.fn(), }; wrapper = mount(<Countdown date={countdownDate} {...spies} />); const obj = wrapper.instance(); const api = obj.getApi(); expect(obj.offsetStartTimestamp).toBe(0); expect(obj.offsetTime).toBe(0); expect(api.isStarted()).toBe(true); expect(api.isPaused()).toBe(false); expect(api.isStopped()).toBe(false); expect(api.isCompleted()).toBe(false); expect(spies.onMount).toHaveBeenCalledTimes(1); expect(spies.onMount).toHaveBeenCalledWith({ completed: false, total: 10000, days: 0, hours: 0, minutes: 0, seconds: 10, milliseconds: 0, }); expect(spies.onStart).toHaveBeenCalledTimes(1); expect(spies.onPause).toHaveBeenCalledTimes(0); expect(spies.onStop).toHaveBeenCalledTimes(0); let runMs = 2000; const nowBeforePause = countdownDate - (countdownMs - runMs); now.mockReturnValue(nowBeforePause); jest.runTimersToTime(runMs); expect(wrapper.state().timeDelta.total).toBe(countdownMs - runMs); api.pause(); expect(api.isStarted()).toBe(false); expect(api.isPaused()).toBe(true); expect(api.isStopped()).toBe(false); expect(api.isCompleted()).toBe(false); expect(spies.onMount).toHaveBeenCalledTimes(1); expect(spies.onStart).toHaveBeenCalledTimes(1); expect(spies.onPause).toHaveBeenCalledTimes(1); expect(spies.onPause).toHaveBeenCalledWith({ completed: false, total: 8000, days: 0, hours: 0, minutes: 0, seconds: 8, milliseconds: 0, }); expect(spies.onStop).toHaveBeenCalledTimes(0); // Calling pause() a 2nd time while paused should return early api.pause(); expect(api.isPaused()).toBe(true); expect(spies.onPause).toHaveBeenCalledTimes(1); runMs += 2000; const pausedMs = 2000; now.mockReturnValue(countdownDate - (countdownMs - runMs)); jest.runTimersToTime(runMs); expect(countdownMs - runMs + pausedMs).toBe(8000); expect(wrapper.state().timeDelta.total).toBe(8000); expect(obj.offsetStartTimestamp).toBe(nowBeforePause); expect(obj.offsetTime).toBe(0); api.start(); expect(api.isStarted()).toBe(true); expect(api.isPaused()).toBe(false); expect(api.isStopped()).toBe(false); expect(api.isCompleted()).toBe(false); expect(spies.onMount).toHaveBeenCalledTimes(1); expect(spies.onStart).toHaveBeenCalledTimes(2); expect(spies.onStart).toHaveBeenCalledWith({ completed: false, total: 8000, days: 0, hours: 0, minutes: 0, seconds: 8, milliseconds: 0, }); expect(spies.onPause).toHaveBeenCalledTimes(1); expect(spies.onStop).toHaveBeenCalledTimes(0); expect(wrapper.state().timeDelta.total).toBe(8000); expect(obj.offsetStartTimestamp).toBe(0); expect(obj.offsetTime).toBe(pausedMs); runMs += 1000; now.mockReturnValue(countdownDate - (countdownMs - runMs)); jest.runTimersToTime(runMs); expect(countdownMs - runMs + pausedMs).toBe(7000); expect(wrapper.state().timeDelta.total).toBe(7000); expect(obj.offsetStartTimestamp).toBe(0); expect(obj.offsetTime).toBe(pausedMs); runMs += 1000; now.mockReturnValue(countdownDate - (countdownMs - runMs)); jest.runTimersToTime(runMs); api.pause(); expect(obj.offsetStartTimestamp).toBe(now()); expect(obj.offsetTime).toBe(2000); expect(wrapper.state().timeDelta).toEqual({ completed: false, total: 6000, days: 0, hours: 0, minutes: 0, seconds: 6, milliseconds: 0, }); runMs += 1000; now.mockReturnValue(countdownDate - (countdownMs - runMs)); jest.runTimersToTime(runMs); api.stop(); expect(obj.offsetStartTimestamp).toBe(now()); expect(obj.offsetTime).toBe(runMs); expect(api.isStarted()).toBe(false); expect(api.isPaused()).toBe(false); expect(api.isStopped()).toBe(true); expect(api.isCompleted()).toBe(false); expect(spies.onMount).toHaveBeenCalledTimes(1); expect(spies.onStart).toHaveBeenCalledTimes(2); expect(spies.onPause).toHaveBeenCalledTimes(2); expect(spies.onStop).toHaveBeenCalledTimes(1); expect(spies.onStop).toHaveBeenCalledWith({ completed: false, total: 10000, days: 0, hours: 0, minutes: 0, seconds: 10, milliseconds: 0, }); // Calling stop() a 2nd time while stopped should return early api.stop(); expect(api.isStopped()).toBe(true); expect(spies.onStop).toHaveBeenCalledTimes(1); api.start(); runMs += 10000; now.mockReturnValue(countdownDate + runMs + pausedMs); jest.runTimersToTime(countdownMs + pausedMs); expect(wrapper.state().timeDelta.total).toBe(0); expect(wrapper.state().timeDelta.completed).toBe(true); expect(api.isCompleted()).toBe(true); expect(obj.offsetStartTimestamp).toBe(0); expect(obj.offsetTime).toBe(7000); expect(spies.onMount).toHaveBeenCalledTimes(1); expect(spies.onStart).toHaveBeenCalledTimes(3); expect(spies.onPause).toHaveBeenCalledTimes(2); expect(spies.onStop).toHaveBeenCalledTimes(1); }); it('should not auto start countdown', () => { const spies = { onStart: jest.fn(), }; wrapper = mount(<Countdown date={countdownDate} autoStart={false} {...spies} />); const obj = wrapper.instance(); const api = obj.getApi(); expect(spies.onStart).toHaveBeenCalledTimes(0); expect(api.isStarted()).toBe(false); expect(api.isPaused()).toBe(false); expect(api.isStopped()).toBe(true); expect(api.isCompleted()).toBe(false); expect(obj).toEqual( expect.objectContaining({ offsetStartTimestamp: countdownDate - countdownMs, offsetTime: 0, }) ); api.start(); expect(spies.onStart).toHaveBeenCalledTimes(1); expect(api.isStarted()).toBe(true); expect(api.isPaused()).toBe(false); expect(api.isStopped()).toBe(false); expect(api.isCompleted()).toBe(false); expect(obj).toEqual( expect.objectContaining({ offsetStartTimestamp: 0, offsetTime: 0, }) ); // Calling start() a 2nd time while started should return early api.start(); expect(spies.onStart).toHaveBeenCalledTimes(1); }); it('should continuously call the renderer if date is in the future', () => { const renderer = jest.fn(() => <div />); wrapper = mount(<Countdown date={countdownDate} renderer={renderer} />); expect(renderer).toHaveBeenCalledTimes(2); // Forward 1s now.mockReturnValue(countdownDate - 9000); jest.runTimersToTime(1000); expect(renderer).toHaveBeenCalledTimes(3); // Forward 2s now.mockReturnValue(countdownDate - 8000); jest.runTimersToTime(1000); expect(renderer).toHaveBeenCalledTimes(4); expect(wrapper.state().timeDelta.total).toBe(8000); expect(wrapper.state().timeDelta.completed).toBe(false); }); it('should stop immediately if date is in the past', () => { const renderer = jest.fn(() => <div />); countdownDate = Date.now() - 10000; wrapper = mount(<Countdown date={countdownDate} renderer={renderer} />); expect(renderer).toHaveBeenCalledTimes(2); // Forward 1s now.mockReturnValue(countdownDate - 9000); jest.runTimersToTime(1000); expect(renderer).toHaveBeenCalledTimes(2); // Forward 2s now.mockReturnValue(countdownDate - 8000); jest.runTimersToTime(1000); expect(renderer).toHaveBeenCalledTimes(2); expect(wrapper.state().timeDelta.total).toBe(0); expect(wrapper.state().timeDelta.completed).toBe(true); }); it('should not stop the countdown and go into overtime', () => { const onTick = jest.fn(); wrapper = mount( <Countdown date={countdownDate} overtime={true} onTick={onTick}> <div>Completed? Overtime!</div> </Countdown> ); const obj = wrapper.instance(); const api = obj.getApi(); // Forward 9s now.mockReturnValue(countdownDate - 1000); jest.runTimersToTime(9000); expect(wrapper.text()).toMatchInlineSnapshot(`"00:00:00:01"`); expect(onTick).toHaveBeenCalledTimes(9); // Forward 1s now.mockReturnValue(countdownDate); jest.runTimersToTime(1000); expect(wrapper.text()).toMatchInlineSnapshot(`"00:00:00:00"`); expect(onTick).toHaveBeenCalledTimes(10); expect(wrapper.state().timeDelta.total).toBe(0); expect(wrapper.state().timeDelta.completed).toBe(true); expect(api.isCompleted()).toBe(false); // Forward 1s (overtime) now.mockReturnValue(countdownDate + 1000); jest.runTimersToTime(1000); expect(wrapper.text()).toMatchInlineSnapshot(`"-00:00:00:01"`); expect(onTick).toHaveBeenCalledTimes(11); expect(wrapper.state().timeDelta.total).toBe(-1000); expect(wrapper.state().timeDelta.completed).toBe(true); expect(api.isCompleted()).toBe(false); }); describe('legacy mode', () => { class LegacyCountdownOverlay extends React.Component<LegacyCountdownProps> { render() { return <div>{this.props.count}</div>; } } it('should render legacy countdown', () => { wrapper = mount( <Countdown count={3}> <LegacyCountdownOverlay /> </Countdown> ); expect(wrapper.find('div').text()).toBe('3'); }); it('should render legacy countdown without count prop', () => { wrapper = mount( <Countdown> <LegacyCountdownOverlay /> </Countdown> ); expect(wrapper.find('div').text()).toBe('3'); }); it('should render null without children', () => { wrapper = mount(<Countdown count={3}></Countdown>); expect(wrapper.html()).toBe(''); wrapper.setProps({}); wrapper.unmount(); }); it('should allow adding time in seconds', () => { const ref = React.createRef<Countdown>(); wrapper = mount( <> <Countdown ref={ref} count={3}> <LegacyCountdownOverlay /> </Countdown> </> ); expect(wrapper.find('div').text()).toBe('3'); ref && ref.current && ref.current.addTime(2); jest.runOnlyPendingTimers(); wrapper.update(); expect(wrapper.find('div').text()).toBe('4'); }); it('should trigger onComplete callback when count reaches 0', () => { const ref = React.createRef<Countdown>(); const onComplete = jest.fn(); wrapper = mount( <> <Countdown ref={ref} count={3} onComplete={onComplete}> <LegacyCountdownOverlay /> </Countdown> </> ); expect(onComplete).not.toHaveBeenCalled(); ref && ref.current && ref.current.addTime(-2); jest.runOnlyPendingTimers(); wrapper.update(); expect(onComplete).toHaveBeenCalled(); expect(wrapper.find('div').text()).toBe('1'); }); }); afterEach(() => { try { wrapper.detach(); } catch (e) {} }); });
the_stack
import moment = require('moment'); export = Lightpick; export as namespace Lightpick; declare class Lightpick { constructor(options: Lightpick.Options); gotoToday(): void; gotoDate(date?: Lightpick.InputDate): void; gotoMonth(month: number): void; gotoYear(year: number): void; prevMonth(): void; nextMonth(): void; setStartDate(date?: Lightpick.InputDate, preventOnSelect?: boolean): void; setEndDate(date?: Lightpick.InputDate, preventOnSelect?: boolean): void; /** * Set date when singleDate is true. */ setDate(date?: Lightpick.InputDate, preventOnSelect?: boolean): void; /** * Set date range. */ setDateRange(start?: Lightpick.InputDate, end?: Lightpick.InputDate, preventOnSelect?: boolean): void; setDisableDates(dates: ReadonlyArray<Lightpick.DisabledDate>): void; /** * Return current start of date range as moment object. */ getStartDate(): Lightpick.OutputDate; /** * Return current end of date range as moment object. */ getEndDate(): Lightpick.OutputDate; /** * Return current date as moment object. */ getDate(): Lightpick.OutputDate; /** * Returns the date in a string format. */ toString(format: Lightpick.FormatSpecification): string; /** * Make the picker visible. */ show(): void; /** * Hide the picker. */ hide(): void; /** * Hide the picker and remove all event listeners. */ destroy(): void; reset(): void; /** * Update picker options. */ reloadOptions(options: Lightpick.Options): void; /** * Tells whether the picker is currently visible or not. * Visibility can be changed with show() / hide() methods. */ readonly isShowing: boolean; /** * The calendar containing HTML element. */ readonly el: HTMLElement; } declare namespace Lightpick { type InputDate = moment.MomentInput | null; type OutputDate = moment.Moment | null; type InputDateRange = [InputDate, InputDate]; type DisabledDate = InputDate | InputDateRange; type FormatSpecification = moment.MomentFormatSpecification; interface Options { /** * Bind the datepicker to a form field. */ field: Options.Field; /** * If exists then end of date range will set here. */ secondField?: Options.Field | undefined; /** * ISO day of the week. */ firstDay?: Options.DayOfWeek | undefined; /** * Selector of the parent element that the date range picker will be added to, if not provided this will be 'body'. */ parentEl?: string | Node | undefined; /** * Language code for names of days, months by Date.prototype.toLocaleString(). 'auto' will try detect user browser language. */ lang?: string | undefined; /** * The default output format. */ format?: string | undefined; /** * Separator between dates when one field. */ separator?: string | undefined; /** * Number of visible months. */ numberOfMonths?: number | undefined; /** * Number of columns months. */ numberOfColumns?: number | undefined; /** * Choose a single date instead of a date range. */ singleDate?: boolean | undefined; /** * Close calendar when picked date/range. */ autoclose?: boolean | undefined; /** * Repick start/end instead of new range. This option working only when exists `secondField`. */ repick?: boolean | undefined; startDate?: InputDate | undefined; endDate?: InputDate | undefined; /** * The minimum/earliest date that can be selected. */ minDate?: InputDate | undefined; /** * The maximum/latest date that can be selected. */ maxDate?: InputDate | undefined; disableDates?: ReadonlyArray<DisabledDate> | undefined; /** * Select second date after the first selected date. */ selectForward?: boolean | undefined; /** * Select second date before the first selected date. */ selectBackward?: boolean | undefined; /** * The minimum days of the selected range. */ minDays?: number | undefined; /** * The maximum days of the selected range. */ maxDays?: number | undefined; /** * Show tooltip. */ hoveringTooltip?: boolean | undefined; /** * Close calendar when clicked outside the elements specified in field or parentEl. Recommended use when autoclose is set to false. */ hideOnBodyClick?: boolean | undefined; /** * Footer calendar, if set to `true` will use default footer (Reset/Apply buttons) or custom string (html). */ footer?: boolean | string | undefined; /** * If set to `false` then will reset selected range when disabled dates exists in selected range. */ disabledDatesInRange?: boolean | undefined; /** * Calc date range in nights. (For hotels when last date doesn't include to range.) */ tooltipNights?: boolean | undefined; orientation?: Options.Orientation | undefined; /** * Disable Saturday and Sunday. */ disableWeekends?: boolean | undefined; /** * Show calendar inline. If true and parentEl is not provided then will use parentNode of field. */ inline?: boolean | undefined; /** * Determines the weekday display style. * Two weekdays may have the same narrow style for some locales (e.g. Tuesday's narrow style is also T). */ weekdayStyle?: Options.WeekdayStyle | undefined; /** * Dropdown selections for years, months. Can be false for disable both dropdowns. */ dropdowns?: boolean | Options.Dropdowns | undefined; locale?: Options.Locale | undefined; /** * Triggered when either date / start date or end date has been changed. */ onSelect?: Options.OnSelectFn | undefined; /** * Triggered when start date has been changed. */ onSelectStart?: Options.OnSelectStartEndFn | undefined; /** * Triggered when end date has been changed. */ onSelectEnd?: Options.OnSelectStartEndFn | undefined; /** * Triggered when calendar has been opened. */ onOpen?: Options.OnOpenFn | undefined; /** * Triggered when calendar has been closed. */ onClose?: Options.OnCloseFn | undefined; onError?: Options.OnErrorFn | undefined; /** * Triggered when the months select is changed. */ onMonthsChange?: Options.OnMonthsChangeFn | undefined; /** * Triggered when the years select is changed. */ onYearsChange?: Options.OnYearsChangeFn | undefined; } namespace Options { type Field = Element & { value: string }; type DayOfWeek = 1 | 2 | 3 | 4 | 5 | 6 | 7; type Orientation = | 'auto' | 'left' | 'right' | 'top' | 'bottom' | 'top left' | 'top right' | 'bottom left' | 'bottom right'; type WeekdayStyle = 'long' | 'short' | 'narrow'; interface Dropdowns { /** * Can be false for disable dropdown of years. */ years?: boolean | Dropdowns.Years | undefined; /** * true/false for enable/disable dropdown of months. */ months?: boolean | undefined; } namespace Dropdowns { interface Years { min?: number | undefined; max?: number | undefined; } } interface Locale { /** * Text for buttons. */ buttons?: Locale.Buttons | undefined; /** * Text for tooltip. */ tooltip?: Partial<Locale.PluralizeFnLocale> | undefined; /** * Show tooltip text on disabled dates. (Eg. «Already booked») */ tooltipOnDisabled?: string | undefined; pluralize?: Locale.PluralizeFn | undefined; } namespace Locale { interface Buttons { prev?: string | undefined; next?: string | undefined; close?: string | undefined; reset?: string | undefined; apply?: string | undefined; } /** * Function for calc plural text. More examples for another locales on betsol/numerous. * * @see https://github.com/betsol/numerous/tree/master/locales */ interface PluralizeFn { (i: string | number, locale: PluralizeFnLocale): string; } interface PluralizeFnLocale { one: string; other: string; } } /** * Callback function for when a date is selected. */ interface OnSelectFn { (this: Lightpick, startDate: OutputDate, endDate: OutputDate): void; } /** * Callback function for when either a start or an end date is selected. */ interface OnSelectStartEndFn { (this: Lightpick, date: OutputDate): void; } /** * Callback function for when the picker becomes visible. */ interface OnOpenFn { (this: Lightpick): void; } /** * Callback function for when the picker is hidden. */ interface OnCloseFn { (this: Lightpick): void; } interface OnErrorFn { (this: Lightpick, message: string): void; } /** * Callback function for when the months select is changed. */ interface OnMonthsChangeFn { (this: Lightpick, month: number): void; } /** * Callback function for when the years select is changed. */ interface OnYearsChangeFn { (this: Lightpick, year: number): void; } } }
the_stack
import { Circle, Curve, PathItem, Point, applyTransforms, convertToRelative, js2path, path2js, } from './_path'; import { JsApi } from '../lib/jsapi'; import { Plugin } from './_types'; import { cleanupOutData } from './_tools'; let roundData: (data: number[]) => number[]; let precision: number; let error: number; let arcThreshold: number; let arcTolerance: number; export const defaultParams = { applyTransforms: true, applyTransformsStroked: true, makeArcs: undefined as { threshold: number; tolerance: number }, // { // threshold: 2.5, // Coefficient of rounding error. // tolerance: 0.5, // Percentage of radius. // }, straightCurves: true, lineShorthands: true, curveSmoothShorthands: true, floatPrecision: 3, transformPrecision: 5, removeUseless: true, collapseRepeated: true, utilizeAbsolute: true, leadingZero: false, negativeExtraSpace: true, }; export type Params = typeof defaultParams; /** * Convert absolute Path to relative, collapse repeated instructions, * detect and convert Lineto shorthands, remove useless instructions like "l0,0", * trim useless delimiters and leading zeros, decrease accuracy of floating-point numbers. */ function fn(item: JsApi, params: Params) { if ( !(item.isElem('path') || item.isElem('clip-path')) || // TODO: detect if a path w/ a name can be optimized (i.e. if it isn't being morphed) item.hasAttr('android:name') || !item.hasAttr('android:pathData') ) { return item; } precision = params.floatPrecision; error = +Math.pow(0.1, precision).toFixed(precision); roundData = precision > 0 && precision < 20 ? strongRound : round; if (params.makeArcs) { arcThreshold = params.makeArcs.threshold; arcTolerance = params.makeArcs.tolerance; } let data = path2js(item); if (!data.length) { return item; } convertToRelative(data); data = filters(data, params); if (params.utilizeAbsolute) { data = convertToMixed(data, params); } js2path(item, data, params); return item; } function filters( // TODO: avoid this caching hackery? pathRes: Array<PathItem & { sdata?: number[] }>, params: Params, ) { const stringify = data2Path.bind(undefined, params); const relSubpoint = [0, 0]; const pathBase = [0, 0]; let prev: any = {}; pathRes = pathRes.filter(function(item, index, path) { let instruction = item.instruction; let data = item.data; let next = path[index + 1]; if (data) { let sdata = data; let circle: Circle; if (instruction === 's') { sdata = [0, 0].concat(data); if ('cs'.indexOf(prev.instruction) > -1) { const pdata = prev.data; const n = pdata.length; // (-x, -y) of the prev tangent point relative to the current point sdata[0] = pdata[n - 2] - pdata[n - 4]; sdata[1] = pdata[n - 1] - pdata[n - 3]; } } // convert curves to arcs if possible if ( params.makeArcs && (instruction === 'c' || instruction === 's') && isConvex(sdata) && (circle = findCircle(sdata as Curve)) ) { const r = roundData([circle.radius])[0]; let angle = findArcAngle(sdata, circle); const sweep = sdata[5] * sdata[0] - sdata[4] * sdata[1] > 0 ? 1 : 0; let arc = { instruction: 'a', data: [r, r, 0, 0, sweep, sdata[4], sdata[5]], coords: item.coords.slice(), base: item.base, }; const output = [arc]; // relative coordinates to adjust the found circle const relCenter: Point = [ circle.center[0] - sdata[4], circle.center[1] - sdata[5], ]; const relCircle = { center: relCenter, radius: circle.radius }; const arcCurves = [item]; let hasPrev = 0; let suffix = ''; let nextLonghand; if ( (prev.instruction === 'c' && isConvex(prev.data) && isArcPrev(prev.data, circle)) || (prev.instruction === 'a' && prev.sdata && isArcPrev(prev.sdata, circle)) ) { arcCurves.unshift(prev); arc.base = prev.base; arc.data[5] = arc.coords[0] - arc.base[0]; arc.data[6] = arc.coords[1] - arc.base[1]; const prevData = prev.instruction === 'a' ? prev.sdata : prev.data; angle += findArcAngle(prevData, { center: [prevData[4] + relCenter[0], prevData[5] + relCenter[1]], radius: circle.radius, }); if (angle > Math.PI) { arc.data[3] = 1; } hasPrev = 1; } // check if next curves are fitting the arc let j = index; // tslint:disable-next-line:no-bitwise for (; (next = path[++j]) && ~'cs'.indexOf(next.instruction); ) { let nextDataTemp = next.data; if (next.instruction === 's') { nextLonghand = makeLonghand( { instruction: 's', data: next.data.slice() }, path[j - 1].data, ); nextDataTemp = nextLonghand.data; nextLonghand.data = nextDataTemp.slice(0, 2); suffix = stringify([nextLonghand]); } const nextData = nextDataTemp as Curve; if (isConvex(nextData) && isArc(nextData, relCircle)) { angle += findArcAngle(nextData, relCircle); if (angle - 2 * Math.PI > 1e-3) { break; // more than 360° } if (angle > Math.PI) { arc.data[3] = 1; } arcCurves.push(next); if (2 * Math.PI - angle > 1e-3) { // less than 360° arc.coords = next.coords; arc.data[5] = arc.coords[0] - arc.base[0]; arc.data[6] = arc.coords[1] - arc.base[1]; } else { // full circle, make a half-circle arc and add a second one arc.data[5] = 2 * (relCircle.center[0] - nextData[4]); arc.data[6] = 2 * (relCircle.center[1] - nextData[5]); arc.coords = [ arc.base[0] + arc.data[5], arc.base[1] + arc.data[6], ]; arc = { instruction: 'a', data: [ r, r, 0, 0, sweep, next.coords[0] - arc.coords[0], next.coords[1] - arc.coords[1], ], coords: next.coords, base: arc.coords, }; output.push(arc); j++; break; } relCenter[0] -= nextData[4]; relCenter[1] -= nextData[5]; } else { break; } } if ((stringify(output) + suffix).length < stringify(arcCurves).length) { if (path[j] && path[j].instruction === 's') { makeLonghand(path[j], path[j - 1].data); } if (hasPrev) { const prevArc = output.shift(); roundData(prevArc.data); relSubpoint[0] += prevArc.data[5] - prev.data[prev.data.length - 2]; relSubpoint[1] += prevArc.data[6] - prev.data[prev.data.length - 1]; prev.instruction = 'a'; prev.data = prevArc.data; item.base = prev.coords = prevArc.coords; } arc = output.shift(); if (arcCurves.length === 1) { item.sdata = sdata.slice(); // preserve curve data for future checks } else if (arcCurves.length - 1 - hasPrev > 0) { // filter out consumed next items path.splice.apply(path, [ index + 1, arcCurves.length - 1 - hasPrev, ...output, ]); } if (!arc) { return false; } instruction = 'a'; data = arc.data; item.coords = arc.coords; } } // Rounding relative coordinates, taking in account accummulating error // to get closer to absolute coordinates. Sum of rounded value remains same: // l .25 3 .25 2 .25 3 .25 2 -> l .3 3 .2 2 .3 3 .2 2 if ('mltqsc'.indexOf(instruction) > -1) { for (let i = data.length; i--; ) { data[i] += item.base[i % 2] - relSubpoint[i % 2]; } } else if (instruction === 'h') { data[0] += item.base[0] - relSubpoint[0]; } else if (instruction === 'v') { data[0] += item.base[1] - relSubpoint[1]; } else if (instruction === 'a') { data[5] += item.base[0] - relSubpoint[0]; data[6] += item.base[1] - relSubpoint[1]; } roundData(data); if (instruction === 'h') { relSubpoint[0] += data[0]; } else if (instruction === 'v') { relSubpoint[1] += data[0]; } else { relSubpoint[0] += data[data.length - 2]; relSubpoint[1] += data[data.length - 1]; } roundData(relSubpoint); if (instruction.toLowerCase() === 'm') { pathBase[0] = relSubpoint[0]; pathBase[1] = relSubpoint[1]; } // convert straight curves into lines segments if (params.straightCurves) { if ( (instruction === 'c' && isCurveStraightLine(data)) || (instruction === 's' && isCurveStraightLine(sdata)) ) { if (next && next.instruction === 's') { makeLonghand(next, data); // fix up next curve } instruction = 'l'; data = data.slice(-2); } else if (instruction === 'q' && isCurveStraightLine(data)) { if (next && next.instruction === 't') { makeLonghand(next, data); // fix up next curve } instruction = 'l'; data = data.slice(-2); } else if ( instruction === 't' && prev.instruction !== 'q' && prev.instruction !== 't' ) { instruction = 'l'; data = data.slice(-2); } else if (instruction === 'a' && (data[0] === 0 || data[1] === 0)) { instruction = 'l'; data = data.slice(-2); } } // horizontal and vertical line shorthands // l 50 0 → h 50 // l 0 50 → v 50 if (params.lineShorthands && instruction === 'l') { if (data[1] === 0) { instruction = 'h'; data.pop(); } else if (data[0] === 0) { instruction = 'v'; data.shift(); } } // collapse repeated commands // h 20 h 30 -> h 50 if ( params.collapseRepeated && 'mhv'.indexOf(instruction) > -1 && prev.instruction && instruction === prev.instruction.toLowerCase() && ((instruction !== 'h' && instruction !== 'v') || prev.data[0] >= 0 === item.data[0] >= 0) ) { prev.data[0] += data[0]; if (instruction !== 'h' && instruction !== 'v') { prev.data[1] += data[1]; } prev.coords = item.coords; path[index] = prev; return false; } // convert curves into smooth shorthands if (params.curveSmoothShorthands && prev.instruction) { // curveto if (instruction === 'c') { // c + c → c + s if ( prev.instruction === 'c' && data[0] === -(prev.data[2] - prev.data[4]) && data[1] === -(prev.data[3] - prev.data[5]) ) { instruction = 's'; data = data.slice(2); } else if ( prev.instruction === 's' && data[0] === -(prev.data[0] - prev.data[2]) && data[1] === -(prev.data[1] - prev.data[3]) ) { // s + c → s + s instruction = 's'; data = data.slice(2); } else if ( 'cs'.indexOf(prev.instruction) === -1 && data[0] === 0 && data[1] === 0 ) { // [^cs] + c → [^cs] + s instruction = 's'; data = data.slice(2); } } else if (instruction === 'q') { // quadratic Bézier curveto // q + q → q + t if ( prev.instruction === 'q' && data[0] === prev.data[2] - prev.data[0] && data[1] === prev.data[3] - prev.data[1] ) { instruction = 't'; data = data.slice(2); } else if ( prev.instruction === 't' && data[2] === prev.data[0] && data[3] === prev.data[1] ) { // t + q → t + t instruction = 't'; data = data.slice(2); } } } // remove useless non-first path segments if (params.removeUseless) { // l 0,0 / h 0 / v 0 / q 0,0 0,0 / t 0,0 / c 0,0 0,0 0,0 / s 0,0 0,0 if ('lhvqtcs'.indexOf(instruction) > -1 && data.every(i => i === 0)) { path[index] = prev; return false; } // a 25,25 -30 0,1 0,0 if (instruction === 'a' && data[5] === 0 && data[6] === 0) { path[index] = prev; return false; } } item.instruction = instruction; item.data = data; prev = item; } else { // z resets coordinates relSubpoint[0] = pathBase[0]; relSubpoint[1] = pathBase[1]; if (prev.instruction === 'z') { return false; } prev = item; } return true; }); return pathRes; } /** * Writes data in shortest form using absolute or relative coordinates. * @param {Array} data input path data * @return {Boolean} output */ function convertToMixed(path: PathItem[], params: Params) { let prev = path[0]; path = path.filter(function(item, index) { if (index === 0) { return true; } if (!item.data) { prev = item; return true; } const instruction = item.instruction; const data = item.data; const adata = data && data.slice(0); if ('mltqsc'.indexOf(instruction) > -1) { for (let i = adata.length; i--; ) { adata[i] += item.base[i % 2]; } } else if (instruction === 'h') { adata[0] += item.base[0]; } else if (instruction === 'v') { adata[0] += item.base[1]; } else if (instruction === 'a') { adata[5] += item.base[0]; adata[6] += item.base[1]; } roundData(adata); const absoluteDataStr = cleanupOutData(adata, params); const relativeDataStr = cleanupOutData(data, params); // Convert to absolute coordinates if it's shorter. // v-20 -> V0 // Don't convert if it fits following previous instruction. // l20 30-10-50 instead of l20 30L20 30 if ( absoluteDataStr.length < relativeDataStr.length && !( params.negativeExtraSpace && instruction === prev.instruction && prev.instruction.charCodeAt(0) > 96 && absoluteDataStr.length === relativeDataStr.length - 1 && (data[0] < 0 || (/^0\./.test(String(data[0])) && prev.data[prev.data.length - 1] % 1)) ) ) { item.instruction = instruction.toUpperCase(); item.data = adata; } prev = item; return true; }); return path; } /** * Checks if curve is convex. Control points of such a curve must form * a convex quadrilateral with diagonals crosspoint inside of it. * * @param {Array} data input path data * @return {Boolean} output */ function isConvex(data: number[]) { const center = getIntersection([ 0, 0, data[2], data[3], data[0], data[1], data[4], data[5], ]); return ( center && data[2] < center[0] === center[0] < 0 && data[3] < center[1] === center[1] < 0 && data[4] < center[0] === center[0] < data[0] && data[5] < center[1] === center[1] < data[1] ); } /** * Computes lines equations by two points and returns their intersection point. * * @param {Array} coords 8 numbers for 4 pairs of coordinates (x,y) * @return {Array|undefined} output coordinate of lines' crosspoint */ function getIntersection(coords: number[]): Point | undefined { // Prev line equation parameters. const a1 = coords[1] - coords[3]; // y1 - y2 const b1 = coords[2] - coords[0]; // x2 - x1 const c1 = coords[0] * coords[3] - coords[2] * coords[1]; // x1 * y2 - x2 * y1 // Next line equation parameters const a2 = coords[5] - coords[7]; // y1 - y2 const b2 = coords[6] - coords[4]; // x2 - x1 const c2 = coords[4] * coords[7] - coords[5] * coords[6]; // x1 * y2 - x2 * y1 const denom = a1 * b2 - a2 * b1; if (!denom) { return undefined; // parallel lines havn't an intersection } const cross: Point = [ (b1 * c2 - b2 * c1) / denom, (a1 * c2 - a2 * c1) / -denom, ]; if ( !isNaN(cross[0]) && !isNaN(cross[1]) && isFinite(cross[0]) && isFinite(cross[1]) ) { return cross; } return undefined; } /** * Decrease accuracy of floating-point numbers * in path data keeping a specified number of decimals. * Smart rounds values like 2.3491 to 2.35 instead of 2.349. * Doesn't apply "smartness" if the number precision fits already. * * @param {Array} data input data array * @return {Array} output data array */ function strongRound(data: number[]) { for (let i = data.length; i-- > 0; ) { if (+data[i].toFixed(precision) !== data[i]) { const rounded = +data[i].toFixed(precision - 1); data[i] = +Math.abs(rounded - data[i]).toFixed(precision + 1) >= error ? +data[i].toFixed(precision) : rounded; } } return data; } /** * Checks if a curve is a straight line by measuring distance * from middle points to the line formed by end points. */ function isCurveStraightLine(data: number[]) { // Get line equation a·x + b·y + c = 0 coefficients a, b (c = 0) by start and end points. let i = data.length - 2; const a = -data[i + 1]; // y1 − y2 (y1 = 0) const b = data[i]; // x2 − x1 (x1 = 0) const d = 1 / (a * a + b * b); // same part for all points if (i <= 1 || !isFinite(d)) { // Curve that ends at start point isn't the case. return false; } // Distance from point (x0, y0) to the line is sqrt((c − a·x0 − b·y0)² / (a² + b²)) while ((i -= 2) >= 0) { if (Math.sqrt(Math.pow(a * data[i] + b * data[i + 1], 2) * d) > error) { return false; } } return true; } /** * Converts next curve from shorthand to full form using the current curve data. */ function makeLonghand<T extends { instruction: string; data?: number[] }>( item: T, data: number[], ) { switch (item.instruction) { case 's': item.instruction = 'c'; break; case 't': item.instruction = 'q'; break; } item.data.unshift( data[data.length - 2] - data[data.length - 4], data[data.length - 1] - data[data.length - 3], ); return item; } /** * Returns distance between two points. */ function getDistance(p1: Point, p2: Point) { return Math.sqrt(Math.pow(p1[0] - p2[0], 2) + Math.pow(p1[1] - p2[1], 2)); } /** * Returns coordinates of the curve point corresponding to the certain t * a·(1 - t)³·p1 + b·(1 - t)²·t·p2 + c·(1 - t)·t²·p3 + d·t³·p4, * where pN are control points and p1 is zero due to relative coordinates. * @param {Array} curve array of curve points coordinates * @param {Number} t parametric position from 0 to 1 * @return {Array} Point coordinates */ function getCubicBezierPoint(curve: Curve, t: number): Point { const sqrT = t * t; const cubT = sqrT * t; const mt = 1 - t; const sqrMt = mt * mt; return [ 3 * sqrMt * t * curve[0] + 3 * mt * sqrT * curve[2] + cubT * curve[4], 3 * sqrMt * t * curve[1] + 3 * mt * sqrT * curve[3] + cubT * curve[5], ]; } /** * Finds circle by 3 points of the curve and checks if the curve fits the found circle. * * @param {Array} curve * @return {Object|undefined} circle */ function findCircle(curve: Curve) { const midPoint = getCubicBezierPoint(curve, 1 / 2); const m1 = [midPoint[0] / 2, midPoint[1] / 2]; const m2 = [(midPoint[0] + curve[4]) / 2, (midPoint[1] + curve[5]) / 2]; const center = getIntersection([ m1[0], m1[1], m1[0] + m1[1], m1[1] - m1[0], m2[0], m2[1], m2[0] + (m2[1] - midPoint[1]), m2[1] - (m2[0] - midPoint[0]), ]); const radius = center && getDistance([0, 0], center); const tolerance = Math.min(arcThreshold * error, arcTolerance * radius / 100); if ( center && [1 / 4, 3 / 4].every(point => { return ( Math.abs( getDistance(getCubicBezierPoint(curve, point), center) - radius, ) <= tolerance ); }) ) { return { center: center, radius: radius }; } return undefined; } /** * Checks if a curve fits the given circle. * @param {Object} circle * @param {Array} curve * @return {Boolean} */ function isArc(curve: Curve, circle: Circle) { const tolerance = Math.min( arcThreshold * error, arcTolerance * circle.radius / 100, ); return [0, 1 / 4, 1 / 2, 3 / 4, 1].every(point => { return ( Math.abs( getDistance(getCubicBezierPoint(curve, point), circle.center) - circle.radius, ) <= tolerance ); }); } /** * Checks if a previous curve fits the given circle. * @param {Object} circle * @param {Array} curve * @return {Boolean} */ function isArcPrev(curve: Curve, circle: Circle) { return isArc(curve, { center: [circle.center[0] + curve[4], circle.center[1] + curve[5]], radius: circle.radius, }); } /** * Finds angle of a curve fitting the given arc. * @param {Array} curve * @param {Object} relCircle * @return {Number} angle */ function findArcAngle(curve: number[], relCircle: Circle) { const x1 = -relCircle.center[0]; const y1 = -relCircle.center[1]; const x2 = curve[4] - relCircle.center[0]; const y2 = curve[5] - relCircle.center[1]; return Math.acos( (x1 * x2 + y1 * y2) / Math.sqrt((x1 * x1 + y1 * y1) * (x2 * x2 + y2 * y2)), ); } /** * Converts given path data to string. * * @param {Object} params * @param {Array} pathData * @return {String} */ function data2Path(params: Params, pathData: PathItem[]) { return pathData.reduce((pathString, item) => { return ( pathString + item.instruction + (item.data ? cleanupOutData(roundData(item.data.slice()), params) : '') ); }, ''); } /** * Simple rounding function if precision is 0. * * @param {Array} data input data array * @return {Array} output data array */ function round(data: number[]) { for (let i = data.length; i-- > 0; ) { data[i] = Math.round(data[i]); } return data; } export const convertPathData: Plugin<Params> = { type: 'perItem', active: true, description: 'optimizes path data: writes in shorter form, applies transformations', params: defaultParams, fn, };
the_stack
import { ConcreteRequest } from "relay-runtime"; import { FragmentRefs } from "relay-runtime"; export type SaleArtworksRailTestsQueryVariables = { saleID?: string | null | undefined; }; export type SaleArtworksRailTestsQueryResponse = { readonly me: { readonly " $fragmentRefs": FragmentRefs<"SaleArtworksRail_me">; } | null; }; export type SaleArtworksRailTestsQuery = { readonly response: SaleArtworksRailTestsQueryResponse; readonly variables: SaleArtworksRailTestsQueryVariables; }; /* query SaleArtworksRailTestsQuery( $saleID: ID ) { me { ...SaleArtworksRail_me_nfIph id } } fragment SaleArtworkTileRailCard_saleArtwork on SaleArtwork { artwork { artistNames date href image { imageURL: url(version: "small") aspectRatio } internalID slug saleMessage title id } counts { bidderPositions } currentBid { display } lotLabel sale { isAuction isClosed displayTimelyAt id } } fragment SaleArtworksRail_me_nfIph on Me { lotsByFollowedArtistsConnection(first: 10, includeArtworksByFollowedArtists: true, saleID: $saleID) { edges { node { id href saleArtwork { ...SaleArtworkTileRailCard_saleArtwork id } } id } } } */ const node: ConcreteRequest = (function(){ var v0 = [ { "defaultValue": null, "kind": "LocalArgument", "name": "saleID" } ], v1 = { "kind": "Variable", "name": "saleID", "variableName": "saleID" }, v2 = { "alias": null, "args": null, "kind": "ScalarField", "name": "id", "storageKey": null }, v3 = { "alias": null, "args": null, "kind": "ScalarField", "name": "href", "storageKey": null }, v4 = { "enumValues": null, "nullable": false, "plural": false, "type": "ID" }, v5 = { "enumValues": null, "nullable": true, "plural": false, "type": "Artwork" }, v6 = { "enumValues": null, "nullable": true, "plural": false, "type": "String" }, v7 = { "enumValues": null, "nullable": true, "plural": false, "type": "Boolean" }; return { "fragment": { "argumentDefinitions": (v0/*: any*/), "kind": "Fragment", "metadata": null, "name": "SaleArtworksRailTestsQuery", "selections": [ { "alias": null, "args": null, "concreteType": "Me", "kind": "LinkedField", "name": "me", "plural": false, "selections": [ { "args": [ (v1/*: any*/) ], "kind": "FragmentSpread", "name": "SaleArtworksRail_me" } ], "storageKey": null } ], "type": "Query", "abstractKey": null }, "kind": "Request", "operation": { "argumentDefinitions": (v0/*: any*/), "kind": "Operation", "name": "SaleArtworksRailTestsQuery", "selections": [ { "alias": null, "args": null, "concreteType": "Me", "kind": "LinkedField", "name": "me", "plural": false, "selections": [ { "alias": null, "args": [ { "kind": "Literal", "name": "first", "value": 10 }, { "kind": "Literal", "name": "includeArtworksByFollowedArtists", "value": true }, (v1/*: any*/) ], "concreteType": "SaleArtworksConnection", "kind": "LinkedField", "name": "lotsByFollowedArtistsConnection", "plural": false, "selections": [ { "alias": null, "args": null, "concreteType": "SaleArtwork", "kind": "LinkedField", "name": "edges", "plural": true, "selections": [ { "alias": null, "args": null, "concreteType": "Artwork", "kind": "LinkedField", "name": "node", "plural": false, "selections": [ (v2/*: any*/), (v3/*: any*/), { "alias": null, "args": null, "concreteType": "SaleArtwork", "kind": "LinkedField", "name": "saleArtwork", "plural": false, "selections": [ { "alias": null, "args": null, "concreteType": "Artwork", "kind": "LinkedField", "name": "artwork", "plural": false, "selections": [ { "alias": null, "args": null, "kind": "ScalarField", "name": "artistNames", "storageKey": null }, { "alias": null, "args": null, "kind": "ScalarField", "name": "date", "storageKey": null }, (v3/*: any*/), { "alias": null, "args": null, "concreteType": "Image", "kind": "LinkedField", "name": "image", "plural": false, "selections": [ { "alias": "imageURL", "args": [ { "kind": "Literal", "name": "version", "value": "small" } ], "kind": "ScalarField", "name": "url", "storageKey": "url(version:\"small\")" }, { "alias": null, "args": null, "kind": "ScalarField", "name": "aspectRatio", "storageKey": null } ], "storageKey": null }, { "alias": null, "args": null, "kind": "ScalarField", "name": "internalID", "storageKey": null }, { "alias": null, "args": null, "kind": "ScalarField", "name": "slug", "storageKey": null }, { "alias": null, "args": null, "kind": "ScalarField", "name": "saleMessage", "storageKey": null }, { "alias": null, "args": null, "kind": "ScalarField", "name": "title", "storageKey": null }, (v2/*: any*/) ], "storageKey": null }, { "alias": null, "args": null, "concreteType": "SaleArtworkCounts", "kind": "LinkedField", "name": "counts", "plural": false, "selections": [ { "alias": null, "args": null, "kind": "ScalarField", "name": "bidderPositions", "storageKey": null } ], "storageKey": null }, { "alias": null, "args": null, "concreteType": "SaleArtworkCurrentBid", "kind": "LinkedField", "name": "currentBid", "plural": false, "selections": [ { "alias": null, "args": null, "kind": "ScalarField", "name": "display", "storageKey": null } ], "storageKey": null }, { "alias": null, "args": null, "kind": "ScalarField", "name": "lotLabel", "storageKey": null }, { "alias": null, "args": null, "concreteType": "Sale", "kind": "LinkedField", "name": "sale", "plural": false, "selections": [ { "alias": null, "args": null, "kind": "ScalarField", "name": "isAuction", "storageKey": null }, { "alias": null, "args": null, "kind": "ScalarField", "name": "isClosed", "storageKey": null }, { "alias": null, "args": null, "kind": "ScalarField", "name": "displayTimelyAt", "storageKey": null }, (v2/*: any*/) ], "storageKey": null }, (v2/*: any*/) ], "storageKey": null } ], "storageKey": null }, (v2/*: any*/) ], "storageKey": null } ], "storageKey": null }, (v2/*: any*/) ], "storageKey": null } ] }, "params": { "id": "e4f15be0d39fa5372896f18775584fc4", "metadata": { "relayTestingSelectionTypeInfo": { "me": { "enumValues": null, "nullable": true, "plural": false, "type": "Me" }, "me.id": (v4/*: any*/), "me.lotsByFollowedArtistsConnection": { "enumValues": null, "nullable": true, "plural": false, "type": "SaleArtworksConnection" }, "me.lotsByFollowedArtistsConnection.edges": { "enumValues": null, "nullable": true, "plural": true, "type": "SaleArtwork" }, "me.lotsByFollowedArtistsConnection.edges.id": (v4/*: any*/), "me.lotsByFollowedArtistsConnection.edges.node": (v5/*: any*/), "me.lotsByFollowedArtistsConnection.edges.node.href": (v6/*: any*/), "me.lotsByFollowedArtistsConnection.edges.node.id": (v4/*: any*/), "me.lotsByFollowedArtistsConnection.edges.node.saleArtwork": { "enumValues": null, "nullable": true, "plural": false, "type": "SaleArtwork" }, "me.lotsByFollowedArtistsConnection.edges.node.saleArtwork.artwork": (v5/*: any*/), "me.lotsByFollowedArtistsConnection.edges.node.saleArtwork.artwork.artistNames": (v6/*: any*/), "me.lotsByFollowedArtistsConnection.edges.node.saleArtwork.artwork.date": (v6/*: any*/), "me.lotsByFollowedArtistsConnection.edges.node.saleArtwork.artwork.href": (v6/*: any*/), "me.lotsByFollowedArtistsConnection.edges.node.saleArtwork.artwork.id": (v4/*: any*/), "me.lotsByFollowedArtistsConnection.edges.node.saleArtwork.artwork.image": { "enumValues": null, "nullable": true, "plural": false, "type": "Image" }, "me.lotsByFollowedArtistsConnection.edges.node.saleArtwork.artwork.image.aspectRatio": { "enumValues": null, "nullable": false, "plural": false, "type": "Float" }, "me.lotsByFollowedArtistsConnection.edges.node.saleArtwork.artwork.image.imageURL": (v6/*: any*/), "me.lotsByFollowedArtistsConnection.edges.node.saleArtwork.artwork.internalID": (v4/*: any*/), "me.lotsByFollowedArtistsConnection.edges.node.saleArtwork.artwork.saleMessage": (v6/*: any*/), "me.lotsByFollowedArtistsConnection.edges.node.saleArtwork.artwork.slug": (v4/*: any*/), "me.lotsByFollowedArtistsConnection.edges.node.saleArtwork.artwork.title": (v6/*: any*/), "me.lotsByFollowedArtistsConnection.edges.node.saleArtwork.counts": { "enumValues": null, "nullable": true, "plural": false, "type": "SaleArtworkCounts" }, "me.lotsByFollowedArtistsConnection.edges.node.saleArtwork.counts.bidderPositions": { "enumValues": null, "nullable": true, "plural": false, "type": "FormattedNumber" }, "me.lotsByFollowedArtistsConnection.edges.node.saleArtwork.currentBid": { "enumValues": null, "nullable": true, "plural": false, "type": "SaleArtworkCurrentBid" }, "me.lotsByFollowedArtistsConnection.edges.node.saleArtwork.currentBid.display": (v6/*: any*/), "me.lotsByFollowedArtistsConnection.edges.node.saleArtwork.id": (v4/*: any*/), "me.lotsByFollowedArtistsConnection.edges.node.saleArtwork.lotLabel": (v6/*: any*/), "me.lotsByFollowedArtistsConnection.edges.node.saleArtwork.sale": { "enumValues": null, "nullable": true, "plural": false, "type": "Sale" }, "me.lotsByFollowedArtistsConnection.edges.node.saleArtwork.sale.displayTimelyAt": (v6/*: any*/), "me.lotsByFollowedArtistsConnection.edges.node.saleArtwork.sale.id": (v4/*: any*/), "me.lotsByFollowedArtistsConnection.edges.node.saleArtwork.sale.isAuction": (v7/*: any*/), "me.lotsByFollowedArtistsConnection.edges.node.saleArtwork.sale.isClosed": (v7/*: any*/) } }, "name": "SaleArtworksRailTestsQuery", "operationKind": "query", "text": null } }; })(); (node as any).hash = '89b5df5f731ff60fa7470ca0f2af5291'; export default node;
the_stack
import { assert, expect } from "chai"; import { join } from "path"; import { AccessToken, Guid, Mutable } from "@itwin/core-bentley"; import { ChangesetFileProps, ChangesetType } from "@itwin/core-common"; import { LockProps, LockState } from "../../BackendHubAccess"; import { BriefcaseManager } from "../../BriefcaseManager"; import { IModelHost } from "../../IModelHost"; import { IModelJsFs } from "../../IModelJsFs"; import { HubMock } from "../HubMock"; import { IModelTestUtils } from "../IModelTestUtils"; import { KnownTestLocations } from "../KnownTestLocations"; import { LockStatusExclusive, LockStatusShared } from "../LocalHub"; describe("HubMock", () => { const tmpDir = join(KnownTestLocations.outputDir, "HubMockTest"); const iTwinId = Guid.createValue(); const version0 = IModelTestUtils.resolveAssetFile("test.bim"); const accessToken: AccessToken = "fake token"; before(async () => { HubMock.startup("HubMockTest"); }); after(() => { HubMock.shutdown(); }); it("should be able to create HubMock", async () => { const iModelId = await IModelHost.hubAccess.createNewIModel({ iTwinId, iModelName: "test imodel", version0 }); const localHub = HubMock.findLocalHub(iModelId); let checkpoints = localHub.getCheckpoints(); assert.equal(checkpoints.length, 1); assert.equal(checkpoints[0], 0); const cp1 = join(tmpDir, "cp-1.bim"); localHub.downloadCheckpoint({ changeset: { index: 0 }, targetFile: cp1 }); const stat1 = IModelJsFs.lstatSync(cp1); const statRev0 = IModelJsFs.lstatSync(version0); assert.equal(stat1?.size, statRev0?.size); assert.equal(2, localHub.acquireNewBriefcaseId("user1", "user1 briefcase 1")); assert.equal(3, localHub.acquireNewBriefcaseId("user2", "user2 briefcase 1")); assert.equal(4, localHub.acquireNewBriefcaseId("user3", "user3 briefcase 1")); let briefcases = localHub.getBriefcases(); assert.equal(briefcases.length, 3); assert.deepEqual(briefcases[0], { id: 2, user: "user1", alias: "user1 briefcase 1", assigned: true }); assert.deepEqual(briefcases[1], { id: 3, user: "user2", alias: "user2 briefcase 1", assigned: true }); assert.deepEqual(briefcases[2], { id: 4, user: "user3", alias: "user3 briefcase 1", assigned: true }); // releasing a briefcaseId should mark it as unassigned localHub.releaseBriefcaseId(2); briefcases = localHub.getBriefcases(false); assert.equal(briefcases.length, 3); assert.deepEqual(briefcases[0], { id: 2, user: "user1", alias: "user1 briefcase 1", assigned: false }); assert.deepEqual(briefcases[1], { id: 3, user: "user2", alias: "user2 briefcase 1", assigned: true }); assert.deepEqual(briefcases[2], { id: 4, user: "user3", alias: "user3 briefcase 1", assigned: true }); localHub.releaseBriefcaseId(4); briefcases = localHub.getBriefcases(); assert.equal(briefcases.length, 1); assert.deepEqual(briefcases[0], { id: 3, user: "user2", alias: "user2 briefcase 1", assigned: true }); assert.equal(5, localHub.acquireNewBriefcaseId("user4")); briefcases = localHub.getBriefcases(); assert.equal(briefcases.length, 2); assert.deepEqual(briefcases[0], { id: 3, user: "user2", alias: "user2 briefcase 1", assigned: true }); assert.deepEqual(briefcases[1], { id: 5, user: "user4", alias: "user4 (5)", assigned: true }); // try pushing changesets const cs1: ChangesetFileProps = { id: "changeset0", description: "first changeset", changesType: ChangesetType.Regular, parentId: "", briefcaseId: 5, pushDate: "", index: 0, userCreated: "user1", pathname: IModelTestUtils.resolveAssetFile("CloneTest.01.00.00.ecschema.xml"), }; cs1.index = localHub.addChangeset(cs1); // first changeset const changesets1 = localHub.queryChangesets(); assert.equal(changesets1.length, 1); assert.equal(changesets1[0].id, cs1.id); assert.equal(changesets1[0].description, cs1.description); assert.equal(changesets1[0].changesType, cs1.changesType); assert.equal(changesets1[0].index, 1); assert.equal(changesets1[0].briefcaseId, 5); assert.isAtLeast(changesets1[0].size!, 1); assert.equal(changesets1[0].parentId, ""); assert.isDefined(changesets1[0].pushDate); assert.equal(cs1.id, localHub.getLatestChangeset().id); const cs2: ChangesetFileProps = { id: "changeset1", parentId: cs1.id, description: "second changeset", changesType: ChangesetType.Schema, briefcaseId: 5, pushDate: "", index: 0, userCreated: "user2", pathname: IModelTestUtils.resolveAssetFile("CloneTest.01.00.01.ecschema.xml"), }; cs2.index = localHub.addChangeset(cs2); // second changeset, parent = cs1 const changesets2 = localHub.queryChangesets(); assert.equal(changesets2.length, 2); assert.deepEqual(changesets1[0], changesets2[0]); assert.equal(changesets2[1].id, cs2.id); assert.equal(changesets2[1].parentId, cs2.parentId); assert.equal(changesets2[1].description, cs2.description); assert.equal(changesets2[1].changesType, cs2.changesType); assert.equal(changesets2[1].index, 2); assert.equal(changesets2[1].briefcaseId, 5); assert.isAtLeast(changesets2[1].size!, 1); assert.isDefined(changesets2[1].pushDate); assert.equal(cs2.id, localHub.getLatestChangeset().id); localHub.uploadCheckpoint({ changesetIndex: cs2.index, localFile: version0 }); checkpoints = localHub.getCheckpoints(); assert.equal(checkpoints.length, 2); assert.equal(checkpoints[1], 2); // test named versions const version1 = "release 1"; const version2 = "release 2"; localHub.addNamedVersion({ versionName: version1, csIndex: cs1.index }); localHub.addNamedVersion({ versionName: version2, csIndex: cs2.index }); assert.equal(localHub.findNamedVersion(version1).index, cs1.index); expect(() => localHub.findNamedVersion("not there")).throws("not found"); expect(() => localHub.addNamedVersion({ versionName: version2, csIndex: cs2.index })).throws("insert"); localHub.deleteNamedVersion(version1); expect(() => localHub.findNamedVersion(version1)).throws("not found"); // test for duplicate changeset id fails const cs3: ChangesetFileProps = { id: "changeset0", parentId: "changeset1", description: "third changeset", changesType: ChangesetType.Regular, pathname: cs1.pathname, briefcaseId: 500, userCreated: "", pushDate: "", index: 0 }; expect(() => localHub.addChangeset(cs3)).throws("no briefcase with that id"); cs3.briefcaseId = 5; expect(() => localHub.addChangeset(cs3)).throws("can't insert"); // now test for valid changeset id, but bad parentId const cs4 = { ...cs3, id: "changeset4", parentId: "bad", description: "fourth changeset" }; expect(() => localHub.addChangeset(cs4)).throws("bad not found"); cs3.id = "changeset3"; cs3.parentId = cs2.id; cs3.index = localHub.addChangeset(cs3); assert.equal(0, localHub.queryPreviousCheckpoint(0)); assert.equal(0, localHub.queryPreviousCheckpoint(cs1.index)); assert.equal(cs2.index, localHub.queryPreviousCheckpoint(cs2.index)); assert.equal(cs2.index, localHub.queryPreviousCheckpoint(cs3.index)); // downloading changesets const cSets = localHub.downloadChangesets({ range: { first: cs1.index, end: cs2.index }, targetDir: tmpDir }); assert.equal(cSets.length, 2); assert.equal(cSets[0].id, cs1.id); assert.equal(cSets[0].changesType, cs1.changesType); assert.equal(cSets[0].userCreated, cs1.userCreated); assert.equal(cSets[0].parentId, cs1.parentId); assert.equal(cSets[0].description, cs1.description); assert.equal(cSets[1].id, cs2.id); assert.equal(cSets[1].changesType, cs2.changesType); assert.equal(cSets[1].userCreated, cs2.userCreated); assert.equal(cSets[1].parentId, cs2.parentId); assert.equal(cSets[1].description, cs2.description); const orig1 = IModelJsFs.readFileSync(cs1.pathname); const downloaded1 = IModelJsFs.readFileSync(cSets[0].pathname); assert.deepEqual(orig1, downloaded1); const orig2 = IModelJsFs.readFileSync(cs2.pathname); const downloaded2 = IModelJsFs.readFileSync(cSets[1].pathname); assert.deepEqual(orig2, downloaded2); assert.notDeepEqual(orig1, orig2); // test locks const lock1: Mutable<LockProps> = { state: LockState.Shared, id: "0x12", }; // get a new briefcaseId for some locks assert.equal(6, localHub.acquireNewBriefcaseId("user5", "alias for 5")); localHub.acquireLock(lock1, { briefcaseId: 3, changeset: cs1 }); assert.equal(localHub.countSharedLocks(), 1); assert.equal(localHub.countLocks(), 1); let lockStat = localHub.queryLockStatus(lock1.id); assert.equal(lockStat.state, LockState.Shared); assert.equal((lockStat as LockStatusShared).sharedBy.size, 1); assert.isTrue((lockStat as LockStatusShared).sharedBy.has(3)); assert.isUndefined(lockStat.lastCsIndex); localHub.acquireLock(lock1, { briefcaseId: 5, changeset: cs1 }); assert.equal(localHub.countSharedLocks(), 2); assert.equal(localHub.countLocks(), 1); lockStat = localHub.queryLockStatus(lock1.id); assert.equal((lockStat as LockStatusShared).sharedBy.size, 2); assert.isTrue((lockStat as LockStatusShared).sharedBy.has(3)); assert.isTrue((lockStat as LockStatusShared).sharedBy.has(5)); expect(() => localHub.acquireLock({ ...lock1, state: LockState.Exclusive }, { briefcaseId: 6, changeset: { id: "cs1" } })).to.throw("shared lock is held").include({ briefcaseId: 3, briefcaseAlias: "user2 briefcase 1" }); expect(() => localHub.releaseLocks([lock1], { briefcaseId: 9, changesetIndex: cs1.index })).to.throw("shared lock not held"); localHub.releaseLocks([lock1], { briefcaseId: 3, changesetIndex: cs1.index }); assert.equal(localHub.countSharedLocks(), 1); assert.equal(localHub.countLocks(), 1); lockStat = localHub.queryLockStatus(lock1.id); assert.equal((lockStat as LockStatusShared).sharedBy.size, 1); localHub.releaseLocks([lock1], { briefcaseId: 5, changesetIndex: cs1.index }); assert.equal(localHub.countSharedLocks(), 0); assert.equal(localHub.countLocks(), 1); lockStat = localHub.queryLockStatus(lock1.id); assert.equal(lockStat.state, LockState.None); lock1.state = LockState.Exclusive; localHub.acquireLock(lock1, { briefcaseId: 6, changeset: cs1 }); lockStat = localHub.queryLockStatus(lock1.id); assert.equal(lockStat.state, LockState.Exclusive); localHub.acquireLock(lock1, { briefcaseId: 6, changeset: cs1 }); assert.equal(localHub.countSharedLocks(), 0); assert.equal(localHub.countLocks(), 1); expect(() => localHub.acquireLock(lock1, { briefcaseId: 5, changeset: cs1 })).to.throw("exclusive lock is already held").include({ briefcaseId: 6, briefcaseAlias: "alias for 5" }); expect(() => localHub.acquireLock({ ...lock1, state: LockState.Shared }, { briefcaseId: 5, changeset: cs1 })).to.throw("exclusive lock is already held").include({ briefcaseId: 6, briefcaseAlias: "alias for 5" }); localHub.releaseLocks([lock1], { briefcaseId: 6, changesetIndex: cs2.index }); assert.equal(localHub.countLocks(), 1); lockStat = localHub.queryLockStatus(lock1.id); assert.equal(lockStat.state, LockState.None); assert.equal(lockStat.lastCsIndex, cs2.index); expect(() => localHub.acquireLock(lock1, { briefcaseId: 5, changeset: cs1 })).to.throw("pull is required"); localHub.acquireLock(lock1, { briefcaseId: 5, changeset: cs2 }); lockStat = localHub.queryLockStatus(lock1.id); assert.equal(lockStat.state, LockState.Exclusive); assert.equal((lockStat as LockStatusExclusive).briefcaseId, 5); assert.equal(lockStat.lastCsIndex, cs2.index); localHub.acquireLock({ state: LockState.Exclusive, id: "0x22" }, { briefcaseId: 5, changeset: cs1 }); lockStat = localHub.queryLockStatus("0x22"); assert.equal(lockStat.state, LockState.Exclusive); assert.equal((lockStat as LockStatusExclusive).briefcaseId, 5); assert.isUndefined(lockStat.lastCsIndex); localHub.acquireLock({ state: LockState.Exclusive, id: "0x23" }, { briefcaseId: 6, changeset: cs1 }); localHub.acquireLock({ state: LockState.Shared, id: "0x24" }, { briefcaseId: 6, changeset: cs1 }); localHub.acquireLock({ state: LockState.Shared, id: "0x24" }, { briefcaseId: 5, changeset: cs1 }); let locks = localHub.queryAllLocks(5); assert.equal(locks.length, 3); localHub.releaseBriefcaseId(5); // releasing a briefcaseId with held locks should release them lockStat = localHub.queryLockStatus("0x22"); locks = localHub.queryAllLocks(5); assert.equal(locks.length, 0); assert.equal(localHub.countSharedLocks(), 1); localHub.releaseAllLocks({ briefcaseId: 6, changesetIndex: 3 }); assert.equal(localHub.countSharedLocks(), 0); assert.equal(localHub.countLocks(), 4); lockStat = localHub.queryLockStatus("0x23"); assert.equal(lockStat.lastCsIndex, 3); assert.equal(lockStat.state, 0); lockStat = localHub.queryLockStatus("0x24"); assert.equal(lockStat.lastCsIndex, undefined); assert.equal(lockStat.state, 0); await IModelHost.hubAccess.deleteIModel({ iTwinId, iModelId }); }); it("use HubMock with BriefcaseManager", async () => { const iModelId = await IModelHost.hubAccess.createNewIModel({ iTwinId, iModelName: "test imodel", version0 }); const briefcase = await BriefcaseManager.downloadBriefcase({ accessToken, iTwinId, iModelId }); assert.equal(briefcase.briefcaseId, 2); assert.equal(briefcase.changeset.id, ""); assert.equal(briefcase.iModelId, iModelId); assert.equal(briefcase.iTwinId, iTwinId); await IModelHost.hubAccess.deleteIModel({ iTwinId, iModelId }); }); });
the_stack
import { message as msg, CODE } from "./messages.ts"; import type ZStream from "./zstream.ts"; import * as trees from "./trees.ts"; import adler32 from "./adler32.ts"; import { crc32 } from "./crc32.ts"; import STATUS from "./status.ts"; /* Return codes for the compression/decompression functions. Negative values * are errors, positive values are used for special but normal events. */ const Z_OK = 0; const Z_STREAM_END = 1; //const Z_NEED_DICT = 2; //const Z_ERRNO = -1; const Z_STREAM_ERROR = -2; const Z_DATA_ERROR = -3; //const Z_MEM_ERROR = -4; const Z_BUF_ERROR = -5; //const Z_VERSION_ERROR = -6; /* compression levels */ //const Z_NO_COMPRESSION = 0; //const Z_BEST_SPEED = 1; //const Z_BEST_COMPRESSION = 9; const Z_DEFAULT_COMPRESSION = -1; const Z_FILTERED = 1; const Z_HUFFMAN_ONLY = 2; const Z_RLE = 3; const Z_FIXED = 4; const Z_DEFAULT_STRATEGY = 0; /* Possible values of the data_type field (though see inflate()) */ //const Z_BINARY = 0; //const Z_TEXT = 1; //const Z_ASCII = 1; // = Z_TEXT const Z_UNKNOWN = 2; /* The deflate compression method */ const Z_DEFLATED = 8; const MAX_MEM_LEVEL = 9; /* Maximum value for memLevel in deflateInit2 */ const MAX_WBITS = 15; /* 32K LZ77 window */ const DEF_MEM_LEVEL = 8; const LENGTH_CODES = 29; /* number of length codes, not counting the special END_BLOCK code */ const LITERALS = 256; /* number of literal bytes 0..255 */ const L_CODES = LITERALS + 1 + LENGTH_CODES; /* number of Literal or Length codes, including the END_BLOCK code */ const D_CODES = 30; /* number of distance codes */ const BL_CODES = 19; /* number of codes used to transfer the bit lengths */ const HEAP_SIZE = 2 * L_CODES + 1; /* maximum heap size */ const MAX_BITS = 15; /* All codes must not exceed MAX_BITS bits */ const MIN_MATCH = 3; const MAX_MATCH = 258; const MIN_LOOKAHEAD = (MAX_MATCH + MIN_MATCH + 1); const PRESET_DICT = 0x20; const INIT_STATE = 42; const EXTRA_STATE = 69; const NAME_STATE = 73; const COMMENT_STATE = 91; const HCRC_STATE = 103; const BUSY_STATE = 113; const FINISH_STATE = 666; const BS_NEED_MORE = 1; /* block not completed, need more input or more output */ const BS_BLOCK_DONE = 2; /* block flush performed */ const BS_FINISH_STARTED = 3; /* finish started, need only more output at next deflate */ const BS_FINISH_DONE = 4; /* finish done, accept no more input or output */ const OS_CODE = 0x03; // Unix :) . Don't detect, use this default. export interface Header { text: boolean; time: number; os: number; extra: string[]; name: string; comment: string; hcrc: boolean; } function err(strm: ZStream, errorCode: CODE) { strm.msg = msg[errorCode]; return errorCode; } function rank(f: number): number { return ((f) << 1) - ((f) > 4 ? 9 : 0); } function zero(buf: Uint8Array | Uint16Array) { buf.fill(0, 0, buf.length); } /* ========================================================================= * Flush as much pending output as possible. All deflate() output goes * through this function so some applications may wish to modify it * to avoid allocating a large strm->output buffer and copying into it. * (See also read_buf()). */ function flush_pending(strm: ZStream) { let s = strm.state as DeflateState; //_tr_flush_bits(s); let len = s.pending; if (len > strm.avail_out) { len = strm.avail_out; } if (len === 0) return; strm.output!.set( s.pending_buf.subarray(s.pending_out, s.pending_out + len), strm.next_out, ); strm.next_out += len; s.pending_out += len; strm.total_out += len; strm.avail_out -= len; s.pending -= len; if (s.pending === 0) { s.pending_out = 0; } } function flush_block_only(s: DeflateState, last: any) { trees._tr_flush_block( s, (s.block_start >= 0 ? s.block_start : -1), s.strstart - s.block_start, last, ); s.block_start = s.strstart; flush_pending(s.strm!); } function put_byte(s: any, b: any) { s.pending_buf[s.pending++] = b; } /* ========================================================================= * Put a short in the pending buffer. The 16-bit value is put in MSB order. * IN assertion: the stream state is correct and there is enough room in * pending_buf. */ function putShortMSB(s: any, b: any) { // put_byte(s, (Byte)(b >> 8)); // put_byte(s, (Byte)(b & 0xff)); s.pending_buf[s.pending++] = (b >>> 8) & 0xff; s.pending_buf[s.pending++] = b & 0xff; } /* =========================================================================== * Read a new buffer from the current input stream, update the adler32 * and total number of bytes read. All deflate() input goes through * this function so some applications may wish to modify it to avoid * allocating a large strm->input buffer and copying from it. * (See also flush_pending()). */ function read_buf(strm: any, buf: any, start: any, size: any) { let len = strm.avail_in; if (len > size) len = size; if (len === 0) return 0; strm.avail_in -= len; // zmemcpy(buf, strm->next_in, len); buf.set(strm.input.subarray(strm.next_in, strm.next_in + len), start); if (strm.state.wrap === 1) { strm.adler = adler32(strm.adler, buf, len, start); } else if (strm.state.wrap === 2) { strm.adler = crc32(strm.adler, buf, len, start); } strm.next_in += len; strm.total_in += len; return len; } /* =========================================================================== * Set match_start to the longest match starting at the given string and * return its length. Matches shorter or equal to prev_length are discarded, * in which case the result is equal to prev_length and match_start is * garbage. * IN assertions: cur_match is the head of the hash chain for the current * string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1 * OUT assertion: the match length is not greater than s->lookahead. */ function longest_match(s: any, cur_match: any) { let chain_length = s.max_chain_length; /* max hash chain length */ let scan = s.strstart; /* current string */ let match; /* matched string */ let len; /* length of current match */ let best_len = s.prev_length; /* best match length so far */ let nice_match = s.nice_match; /* stop if match long enough */ let limit = (s.strstart > (s.w_size - MIN_LOOKAHEAD)) ? s.strstart - (s.w_size - MIN_LOOKAHEAD) : 0 /*NIL*/; let _win = s.window; // shortcut let wmask = s.w_mask; let prev = s.prev; /* Stop when cur_match becomes <= limit. To simplify the code, * we prevent matches with the string of window index 0. */ let strend = s.strstart + MAX_MATCH; let scan_end1 = _win[scan + best_len - 1]; let scan_end = _win[scan + best_len]; /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16. * It is easy to get rid of this optimization if necessary. */ // Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever"); /* Do not waste too much time if we already have a good match: */ if (s.prev_length >= s.good_match) { chain_length >>= 2; } /* Do not look for matches beyond the end of the input. This is necessary * to make deflate deterministic. */ if (nice_match > s.lookahead) nice_match = s.lookahead; // Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead"); do { // Assert(cur_match < s->strstart, "no future"); match = cur_match; /* Skip to next match if the match length cannot increase * or if the match length is less than 2. Note that the checks below * for insufficient lookahead only occur occasionally for performance * reasons. Therefore uninitialized memory will be accessed, and * conditional jumps will be made that depend on those values. * However the length of the match is limited to the lookahead, so * the output of deflate is not affected by the uninitialized values. */ if ( _win[match + best_len] !== scan_end || _win[match + best_len - 1] !== scan_end1 || _win[match] !== _win[scan] || _win[++match] !== _win[scan + 1] ) { continue; } /* The check at best_len-1 can be removed because it will be made * again later. (This heuristic is not always a win.) * It is not necessary to compare scan[2] and match[2] since they * are always equal when the other bytes match, given that * the hash keys are equal and that HASH_BITS >= 8. */ scan += 2; match++; // Assert(*scan == *match, "match[2]?"); /* We check for insufficient lookahead only every 8th comparison; * the 256th check will be made at strstart+258. */ do { /*jshint noempty:false*/ } while ( _win[++scan] === _win[++match] && _win[++scan] === _win[++match] && _win[++scan] === _win[++match] && _win[++scan] === _win[++match] && _win[++scan] === _win[++match] && _win[++scan] === _win[++match] && _win[++scan] === _win[++match] && _win[++scan] === _win[++match] && scan < strend ); // Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan"); len = MAX_MATCH - (strend - scan); scan = strend - MAX_MATCH; if (len > best_len) { s.match_start = cur_match; best_len = len; if (len >= nice_match) { break; } scan_end1 = _win[scan + best_len - 1]; scan_end = _win[scan + best_len]; } } while ( (cur_match = prev[cur_match & wmask]) > limit && --chain_length !== 0 ); if (best_len <= s.lookahead) { return best_len; } return s.lookahead; } /* =========================================================================== * Fill the window when the lookahead becomes insufficient. * Updates strstart and lookahead. * * IN assertion: lookahead < MIN_LOOKAHEAD * OUT assertions: strstart <= window_size-MIN_LOOKAHEAD * At least one byte has been read, or avail_in == 0; reads are * performed for at least two bytes (required for the zip translate_eol * option -- not supported here). */ function fill_window(s: any) { let _w_size = s.w_size; let p, n, m, more, str; //Assert(s->lookahead < MIN_LOOKAHEAD, "already enough lookahead"); do { more = s.window_size - s.lookahead - s.strstart; // JS ints have 32 bit, block below not needed /* Deal with !@#$% 64K limit: */ //if (sizeof(int) <= 2) { // if (more == 0 && s->strstart == 0 && s->lookahead == 0) { // more = wsize; // // } else if (more == (unsigned)(-1)) { // /* Very unlikely, but possible on 16 bit machine if // * strstart == 0 && lookahead == 1 (input done a byte at time) // */ // more--; // } //} /* If the window is almost full and there is insufficient lookahead, * move the upper half to the lower one to make room in the upper half. */ if (s.strstart >= _w_size + (_w_size - MIN_LOOKAHEAD)) { s.window.set(s.window.subarray(_w_size, _w_size + _w_size), 0); s.match_start -= _w_size; s.strstart -= _w_size; /* we now have strstart >= MAX_DIST */ s.block_start -= _w_size; /* Slide the hash table (could be avoided with 32 bit values at the expense of memory usage). We slide even when level == 0 to keep the hash table consistent if we switch back to level > 0 later. (Using level 0 permanently is not an optimal usage of zlib, so we don't care about this pathological case.) */ n = s.hash_size; p = n; do { m = s.head[--p]; s.head[p] = (m >= _w_size ? m - _w_size : 0); } while (--n); n = _w_size; p = n; do { m = s.prev[--p]; s.prev[p] = (m >= _w_size ? m - _w_size : 0); /* If n is not on any hash chain, prev[n] is garbage but * its value will never be used. */ } while (--n); more += _w_size; } if (s.strm.avail_in === 0) { break; } /* If there was no sliding: * strstart <= WSIZE+MAX_DIST-1 && lookahead <= MIN_LOOKAHEAD - 1 && * more == window_size - lookahead - strstart * => more >= window_size - (MIN_LOOKAHEAD-1 + WSIZE + MAX_DIST-1) * => more >= window_size - 2*WSIZE + 2 * In the BIG_MEM or MMAP case (not yet supported), * window_size == input_size + MIN_LOOKAHEAD && * strstart + s->lookahead <= input_size => more >= MIN_LOOKAHEAD. * Otherwise, window_size == 2*WSIZE so more >= 2. * If there was sliding, more >= WSIZE. So in all cases, more >= 2. */ //Assert(more >= 2, "more < 2"); n = read_buf(s.strm, s.window, s.strstart + s.lookahead, more); s.lookahead += n; /* Initialize the hash value now that we have some input: */ if (s.lookahead + s.insert >= MIN_MATCH) { str = s.strstart - s.insert; s.ins_h = s.window[str]; /* UPDATE_HASH(s, s->ins_h, s->window[str + 1]); */ s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[str + 1]) & s.hash_mask; //#if MIN_MATCH != 3 // Call update_hash() MIN_MATCH-3 more times //#endif while (s.insert) { /* UPDATE_HASH(s, s->ins_h, s->window[str + MIN_MATCH-1]); */ s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[str + MIN_MATCH - 1]) & s.hash_mask; s.prev[str & s.w_mask] = s.head[s.ins_h]; s.head[s.ins_h] = str; str++; s.insert--; if (s.lookahead + s.insert < MIN_MATCH) { break; } } } /* If the whole input has less than MIN_MATCH bytes, ins_h is garbage, * but this is not important since only literal bytes will be emitted. */ } while (s.lookahead < MIN_LOOKAHEAD && s.strm.avail_in !== 0); /* If the WIN_INIT bytes after the end of the current data have never been * written, then zero those bytes in order to avoid memory check reports of * the use of uninitialized (or uninitialised as Julian writes) bytes by * the longest match routines. Update the high water mark for the next * time through here. WIN_INIT is set to MAX_MATCH since the longest match * routines allow scanning to strstart + MAX_MATCH, ignoring lookahead. */ // if (s.high_water < s.window_size) { // let curr = s.strstart + s.lookahead; // let init = 0; // // if (s.high_water < curr) { // /* Previous high water mark below current data -- zero WIN_INIT // * bytes or up to end of window, whichever is less. // */ // init = s.window_size - curr; // if (init > WIN_INIT) // init = WIN_INIT; // zmemzero(s->window + curr, (unsigned)init); // s->high_water = curr + init; // } // else if (s->high_water < (ulg)curr + WIN_INIT) { // /* High water mark at or above current data, but below current data // * plus WIN_INIT -- zero out to current data plus WIN_INIT, or up // * to end of window, whichever is less. // */ // init = (ulg)curr + WIN_INIT - s->high_water; // if (init > s->window_size - s->high_water) // init = s->window_size - s->high_water; // zmemzero(s->window + s->high_water, (unsigned)init); // s->high_water += init; // } // } // // Assert((ulg)s->strstart <= s->window_size - MIN_LOOKAHEAD, // "not enough room for search"); } /* =========================================================================== * Copy without compression as much as possible from the input stream, return * the current block state. * This function does not insert new strings in the dictionary since * uncompressible data is probably not useful. This function is used * only for the level=0 compression option. * NOTE: this function should be optimized to avoid extra copying from * window to pending_buf. */ function deflate_stored(s: any, flush: any) { /* Stored blocks are limited to 0xffff bytes, pending_buf is limited * to pending_buf_size, and each stored block has a 5 byte header: */ let max_block_size = 0xffff; if (max_block_size > s.pending_buf_size - 5) { max_block_size = s.pending_buf_size - 5; } /* Copy as much as possible from input to output: */ for (;;) { /* Fill the window as much as possible: */ if (s.lookahead <= 1) { //Assert(s->strstart < s->w_size+MAX_DIST(s) || // s->block_start >= (long)s->w_size, "slide too late"); // if (!(s.strstart < s.w_size + (s.w_size - MIN_LOOKAHEAD) || // s.block_start >= s.w_size)) { // throw new Error("slide too late"); // } fill_window(s); if (s.lookahead === 0 && flush === STATUS.Z_NO_FLUSH) { return BS_NEED_MORE; } if (s.lookahead === 0) { break; } /* flush the current block */ } //Assert(s->block_start >= 0L, "block gone"); // if (s.block_start < 0) throw new Error("block gone"); s.strstart += s.lookahead; s.lookahead = 0; /* Emit a stored block if pending_buf will be full: */ let max_start = s.block_start + max_block_size; if (s.strstart === 0 || s.strstart >= max_start) { /* strstart == 0 is possible when wraparound on 16-bit machine */ s.lookahead = s.strstart - max_start; s.strstart = max_start; /*** FLUSH_BLOCK(s, 0); ***/ flush_block_only(s, false); if (s.strm.avail_out === 0) { return BS_NEED_MORE; } /***/ } /* Flush if we may have to slide, otherwise block_start may become * negative and the data will be gone: */ if (s.strstart - s.block_start >= (s.w_size - MIN_LOOKAHEAD)) { /*** FLUSH_BLOCK(s, 0); ***/ flush_block_only(s, false); if (s.strm.avail_out === 0) { return BS_NEED_MORE; } /***/ } } s.insert = 0; if (flush === STATUS.Z_FINISH) { /*** FLUSH_BLOCK(s, 1); ***/ flush_block_only(s, true); if (s.strm.avail_out === 0) { return BS_FINISH_STARTED; } /***/ return BS_FINISH_DONE; } if (s.strstart > s.block_start) { /*** FLUSH_BLOCK(s, 0); ***/ flush_block_only(s, false); if (s.strm.avail_out === 0) { return BS_NEED_MORE; } /***/ } return BS_NEED_MORE; } /* =========================================================================== * Compress as much as possible from the input stream, return the current * block state. * This function does not perform lazy evaluation of matches and inserts * new strings in the dictionary only for unmatched strings or for short * matches. It is used only for the fast compression options. */ function deflate_fast(s: any, flush: any) { let hash_head; /* head of the hash chain */ let bflush; /* set if current block must be flushed */ for (;;) { /* Make sure that we always have enough lookahead, except * at the end of the input file. We need MAX_MATCH bytes * for the next match, plus MIN_MATCH bytes to insert the * string following the next match. */ if (s.lookahead < MIN_LOOKAHEAD) { fill_window(s); if (s.lookahead < MIN_LOOKAHEAD && flush === STATUS.Z_NO_FLUSH) { return BS_NEED_MORE; } if (s.lookahead === 0) { break; /* flush the current block */ } } /* Insert the string window[strstart .. strstart+2] in the * dictionary, and set hash_head to the head of the hash chain: */ hash_head = 0 /*NIL*/; if (s.lookahead >= MIN_MATCH) { /*** INSERT_STRING(s, s.strstart, hash_head); ***/ s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask; hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h]; s.head[s.ins_h] = s.strstart; /***/ } /* Find the longest match, discarding those <= prev_length. * At this point we have always match_length < MIN_MATCH */ if ( hash_head !== 0 /*NIL*/ && ((s.strstart - hash_head) <= (s.w_size - MIN_LOOKAHEAD)) ) { /* To simplify the code, we prevent matches with the string * of window index 0 (in particular we have to avoid a match * of the string with itself at the start of the input file). */ s.match_length = longest_match(s, hash_head); /* longest_match() sets match_start */ } if (s.match_length >= MIN_MATCH) { // check_match(s, s.strstart, s.match_start, s.match_length); // for debug only /*** _tr_tally_dist(s, s.strstart - s.match_start, s.match_length - MIN_MATCH, bflush); ***/ bflush = trees._tr_tally( s, s.strstart - s.match_start, s.match_length - MIN_MATCH, ); s.lookahead -= s.match_length; /* Insert new strings in the hash table only if the match length * is not too large. This saves time but degrades compression. */ if ( s.match_length <= s.max_lazy_match /*max_insert_length*/ && s.lookahead >= MIN_MATCH ) { s.match_length--; /* string at strstart already in table */ do { s.strstart++; /*** INSERT_STRING(s, s.strstart, hash_head); ***/ s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask; hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h]; s.head[s.ins_h] = s.strstart; /***/ /* strstart never exceeds WSIZE-MAX_MATCH, so there are * always MIN_MATCH bytes ahead. */ } while (--s.match_length !== 0); s.strstart++; } else { s.strstart += s.match_length; s.match_length = 0; s.ins_h = s.window[s.strstart]; /* UPDATE_HASH(s, s.ins_h, s.window[s.strstart+1]); */ s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + 1]) & s.hash_mask; //#if MIN_MATCH != 3 // Call UPDATE_HASH() MIN_MATCH-3 more times //#endif /* If lookahead < MIN_MATCH, ins_h is garbage, but it does not * matter since it will be recomputed at next deflate call. */ } } else { /* No match, output a literal byte */ //Tracevv((stderr,"%c", s.window[s.strstart])); /*** _tr_tally_lit(s, s.window[s.strstart], bflush); ***/ bflush = trees._tr_tally(s, 0, s.window[s.strstart]); s.lookahead--; s.strstart++; } if (bflush) { /*** FLUSH_BLOCK(s, 0); ***/ flush_block_only(s, false); if (s.strm.avail_out === 0) { return BS_NEED_MORE; } /***/ } } s.insert = ((s.strstart < (MIN_MATCH - 1)) ? s.strstart : MIN_MATCH - 1); if (flush === STATUS.Z_FINISH) { /*** FLUSH_BLOCK(s, 1); ***/ flush_block_only(s, true); if (s.strm.avail_out === 0) { return BS_FINISH_STARTED; } /***/ return BS_FINISH_DONE; } if (s.last_lit) { /*** FLUSH_BLOCK(s, 0); ***/ flush_block_only(s, false); if (s.strm.avail_out === 0) { return BS_NEED_MORE; } /***/ } return BS_BLOCK_DONE; } /* =========================================================================== * Same as above, but achieves better compression. We use a lazy * evaluation for matches: a match is finally adopted only if there is * no better match at the next window position. */ function deflate_slow(s: any, flush: any) { let hash_head; /* head of hash chain */ let bflush; /* set if current block must be flushed */ let max_insert; /* Process the input block. */ for (;;) { /* Make sure that we always have enough lookahead, except * at the end of the input file. We need MAX_MATCH bytes * for the next match, plus MIN_MATCH bytes to insert the * string following the next match. */ if (s.lookahead < MIN_LOOKAHEAD) { fill_window(s); if (s.lookahead < MIN_LOOKAHEAD && flush === STATUS.Z_NO_FLUSH) { return BS_NEED_MORE; } if (s.lookahead === 0) break; /* flush the current block */ } /* Insert the string window[strstart .. strstart+2] in the * dictionary, and set hash_head to the head of the hash chain: */ hash_head = 0 /*NIL*/; if (s.lookahead >= MIN_MATCH) { /*** INSERT_STRING(s, s.strstart, hash_head); ***/ s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask; hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h]; s.head[s.ins_h] = s.strstart; /***/ } /* Find the longest match, discarding those <= prev_length. */ s.prev_length = s.match_length; s.prev_match = s.match_start; s.match_length = MIN_MATCH - 1; if ( hash_head !== 0 /*NIL*/ && s.prev_length < s.max_lazy_match && s.strstart - hash_head <= (s.w_size - MIN_LOOKAHEAD) /*MAX_DIST(s)*/ ) { /* To simplify the code, we prevent matches with the string * of window index 0 (in particular we have to avoid a match * of the string with itself at the start of the input file). */ s.match_length = longest_match(s, hash_head); /* longest_match() sets match_start */ if ( s.match_length <= 5 && (s.strategy === Z_FILTERED || (s.match_length === MIN_MATCH && s.strstart - s.match_start > 4096 /*TOO_FAR*/)) ) { /* If prev_match is also MIN_MATCH, match_start is garbage * but we will ignore the current match anyway. */ s.match_length = MIN_MATCH - 1; } } /* If there was a match at the previous step and the current * match is not better, output the previous match: */ if (s.prev_length >= MIN_MATCH && s.match_length <= s.prev_length) { max_insert = s.strstart + s.lookahead - MIN_MATCH; /* Do not insert strings in hash table beyond this. */ //check_match(s, s.strstart-1, s.prev_match, s.prev_length); /***_tr_tally_dist(s, s.strstart - 1 - s.prev_match, s.prev_length - MIN_MATCH, bflush);***/ bflush = trees._tr_tally( s, s.strstart - 1 - s.prev_match, s.prev_length - MIN_MATCH, ); /* Insert in hash table all strings up to the end of the match. * strstart-1 and strstart are already inserted. If there is not * enough lookahead, the last two strings are not inserted in * the hash table. */ s.lookahead -= s.prev_length - 1; s.prev_length -= 2; do { if (++s.strstart <= max_insert) { /*** INSERT_STRING(s, s.strstart, hash_head); ***/ s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask; hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h]; s.head[s.ins_h] = s.strstart; /***/ } } while (--s.prev_length !== 0); s.match_available = 0; s.match_length = MIN_MATCH - 1; s.strstart++; if (bflush) { /*** FLUSH_BLOCK(s, 0); ***/ flush_block_only(s, false); if (s.strm.avail_out === 0) { return BS_NEED_MORE; } /***/ } } else if (s.match_available) { /* If there was no match at the previous position, output a * single literal. If there was a match but the current match * is longer, truncate the previous match to a single literal. */ //Tracevv((stderr,"%c", s->window[s->strstart-1])); /*** _tr_tally_lit(s, s.window[s.strstart-1], bflush); ***/ bflush = trees._tr_tally(s, 0, s.window[s.strstart - 1]); if (bflush) { /*** FLUSH_BLOCK_ONLY(s, 0) ***/ flush_block_only(s, false); /***/ } s.strstart++; s.lookahead--; if (s.strm.avail_out === 0) { return BS_NEED_MORE; } } else { /* There is no previous match to compare with, wait for * the next step to decide. */ s.match_available = 1; s.strstart++; s.lookahead--; } } //Assert (flush != Z_NO_FLUSH, "no flush?"); if (s.match_available) { //Tracevv((stderr,"%c", s->window[s->strstart-1])); /*** _tr_tally_lit(s, s.window[s.strstart-1], bflush); ***/ bflush = trees._tr_tally(s, 0, s.window[s.strstart - 1]); s.match_available = 0; } s.insert = s.strstart < MIN_MATCH - 1 ? s.strstart : MIN_MATCH - 1; if (flush === STATUS.Z_FINISH) { /*** FLUSH_BLOCK(s, 1); ***/ flush_block_only(s, true); if (s.strm.avail_out === 0) { return BS_FINISH_STARTED; } /***/ return BS_FINISH_DONE; } if (s.last_lit) { /*** FLUSH_BLOCK(s, 0); ***/ flush_block_only(s, false); if (s.strm.avail_out === 0) { return BS_NEED_MORE; } /***/ } return BS_BLOCK_DONE; } /* =========================================================================== * For Z_RLE, simply look for runs of bytes, generate matches only of distance * one. Do not maintain a hash table. (It will be regenerated if this run of * deflate switches away from Z_RLE.) */ function deflate_rle(s: any, flush: any) { let bflush; /* set if current block must be flushed */ let prev; /* byte at distance one to match */ let scan, strend; /* scan goes up to strend for length of run */ let _win = s.window; for (;;) { /* Make sure that we always have enough lookahead, except * at the end of the input file. We need MAX_MATCH bytes * for the longest run, plus one for the unrolled loop. */ if (s.lookahead <= MAX_MATCH) { fill_window(s); if (s.lookahead <= MAX_MATCH && flush === STATUS.Z_NO_FLUSH) { return BS_NEED_MORE; } if (s.lookahead === 0) break; /* flush the current block */ } /* See how many times the previous byte repeats */ s.match_length = 0; if (s.lookahead >= MIN_MATCH && s.strstart > 0) { scan = s.strstart - 1; prev = _win[scan]; if ( prev === _win[++scan] && prev === _win[++scan] && prev === _win[++scan] ) { strend = s.strstart + MAX_MATCH; do { /*jshint noempty:false*/ } while ( prev === _win[++scan] && prev === _win[++scan] && prev === _win[++scan] && prev === _win[++scan] && prev === _win[++scan] && prev === _win[++scan] && prev === _win[++scan] && prev === _win[++scan] && scan < strend ); s.match_length = MAX_MATCH - (strend - scan); if (s.match_length > s.lookahead) { s.match_length = s.lookahead; } } //Assert(scan <= s->window+(uInt)(s->window_size-1), "wild scan"); } /* Emit match if have run of MIN_MATCH or longer, else emit literal */ if (s.match_length >= MIN_MATCH) { //check_match(s, s.strstart, s.strstart - 1, s.match_length); /*** _tr_tally_dist(s, 1, s.match_length - MIN_MATCH, bflush); ***/ bflush = trees._tr_tally(s, 1, s.match_length - MIN_MATCH); s.lookahead -= s.match_length; s.strstart += s.match_length; s.match_length = 0; } else { /* No match, output a literal byte */ //Tracevv((stderr,"%c", s->window[s->strstart])); /*** _tr_tally_lit(s, s.window[s.strstart], bflush); ***/ bflush = trees._tr_tally(s, 0, s.window[s.strstart]); s.lookahead--; s.strstart++; } if (bflush) { /*** FLUSH_BLOCK(s, 0); ***/ flush_block_only(s, false); if (s.strm.avail_out === 0) { return BS_NEED_MORE; } /***/ } } s.insert = 0; if (flush === STATUS.Z_FINISH) { /*** FLUSH_BLOCK(s, 1); ***/ flush_block_only(s, true); if (s.strm.avail_out === 0) { return BS_FINISH_STARTED; } /***/ return BS_FINISH_DONE; } if (s.last_lit) { /*** FLUSH_BLOCK(s, 0); ***/ flush_block_only(s, false); if (s.strm.avail_out === 0) { return BS_NEED_MORE; } /***/ } return BS_BLOCK_DONE; } /* =========================================================================== * For Z_HUFFMAN_ONLY, do not look for matches. Do not maintain a hash table. * (It will be regenerated if this run of deflate switches away from Huffman.) */ function deflate_huff(s: any, flush: any) { let bflush; /* set if current block must be flushed */ for (;;) { /* Make sure that we have a literal to write. */ if (s.lookahead === 0) { fill_window(s); if (s.lookahead === 0) { if (flush === STATUS.Z_NO_FLUSH) { return BS_NEED_MORE; } break; /* flush the current block */ } } /* Output a literal byte */ s.match_length = 0; //Tracevv((stderr,"%c", s->window[s->strstart])); /*** _tr_tally_lit(s, s.window[s.strstart], bflush); ***/ bflush = trees._tr_tally(s, 0, s.window[s.strstart]); s.lookahead--; s.strstart++; if (bflush) { /*** FLUSH_BLOCK(s, 0); ***/ flush_block_only(s, false); if (s.strm.avail_out === 0) { return BS_NEED_MORE; } /***/ } } s.insert = 0; if (flush === STATUS.Z_FINISH) { /*** FLUSH_BLOCK(s, 1); ***/ flush_block_only(s, true); if (s.strm.avail_out === 0) { return BS_FINISH_STARTED; } /***/ return BS_FINISH_DONE; } if (s.last_lit) { /*** FLUSH_BLOCK(s, 0); ***/ flush_block_only(s, false); if (s.strm.avail_out === 0) { return BS_NEED_MORE; } /***/ } return BS_BLOCK_DONE; } /* Values for max_lazy_match, good_match and max_chain_length, depending on * the desired pack level (0..9). The values given below have been tuned to * exclude worst case performance for pathological files. Better values may be * found for specific files. */ class Config { good_length: any; max_lazy: any; nice_length: any; max_chain: any; func: any; constructor( good_length: any, max_lazy: any, nice_length: any, max_chain: any, func: any, ) { this.good_length = good_length; this.max_lazy = max_lazy; this.nice_length = nice_length; this.max_chain = max_chain; this.func = func; } } let configuration_table: any; configuration_table = [ /* good lazy nice chain */ new Config(0, 0, 0, 0, deflate_stored), /* 0 store only */ new Config(4, 4, 8, 4, deflate_fast), /* 1 max speed, no lazy matches */ new Config(4, 5, 16, 8, deflate_fast), /* 2 */ new Config(4, 6, 32, 32, deflate_fast), /* 3 */ new Config(4, 4, 16, 16, deflate_slow), /* 4 lazy matches */ new Config(8, 16, 32, 32, deflate_slow), /* 5 */ new Config(8, 16, 128, 128, deflate_slow), /* 6 */ new Config(8, 32, 128, 256, deflate_slow), /* 7 */ new Config(32, 128, 258, 1024, deflate_slow), /* 8 */ new Config(32, 258, 258, 4096, deflate_slow), /* 9 max compression */ ]; /* =========================================================================== * Initialize the "longest match" routines for a new zlib stream */ function lm_init(s: any) { s.window_size = 2 * s.w_size; /*** CLEAR_HASH(s); ***/ zero(s.head); // Fill with NIL (= 0); /* Set the default configuration parameters: */ s.max_lazy_match = configuration_table[s.level].max_lazy; s.good_match = configuration_table[s.level].good_length; s.nice_match = configuration_table[s.level].nice_length; s.max_chain_length = configuration_table[s.level].max_chain; s.strstart = 0; s.block_start = 0; s.lookahead = 0; s.insert = 0; s.match_length = s.prev_length = MIN_MATCH - 1; s.match_available = 0; s.ins_h = 0; } export class DeflateState { strm: ZStream | null = null; /* pointer back to this zlib stream */ status = 0; /* as the name implies */ pending_buf: any = null; /* output still pending */ pending_buf_size = 0; /* size of pending_buf */ pending_out = 0; /* next pending byte to output to the stream */ pending = 0; /* nb of bytes in the pending buffer */ wrap = 0; /* bit 0 true for zlib, bit 1 true for gzip */ gzhead: Header | null = null; /* gzip header information to write */ gzindex = 0; /* where in extra, name, or comment */ method = Z_DEFLATED; /* can only be DEFLATED */ last_flush = -1; /* value of flush param for previous deflate call */ w_size = 0; /* LZ77 window size (32K by default) */ w_bits = 0; /* log2(w_size) (8..16) */ w_mask = 0; /* w_size - 1 */ window: any = null; /* Sliding window. Input bytes are read into the second half of the window, * and move to the first half later to keep a dictionary of at least wSize * bytes. With this organization, matches are limited to a distance of * wSize-MAX_MATCH bytes, but this ensures that IO is always * performed with a length multiple of the block size. */ window_size = 0; /* Actual size of window: 2*wSize, except when the user input buffer * is directly used as sliding window. */ prev: any = null; /* Link to older string with same hash index. To limit the size of this * array to 64K, this link is maintained only for the last 32K strings. * An index in this array is thus a window index modulo 32K. */ head: any = null; /* Heads of the hash chains or NIL. */ ins_h = 0; /* hash index of string to be inserted */ hash_size = 0; /* number of elements in hash table */ hash_bits = 0; /* log2(hash_size) */ hash_mask = 0; /* hash_size-1 */ hash_shift = 0; /* Number of bits by which ins_h must be shifted at each input * step. It must be such that after MIN_MATCH steps, the oldest * byte no longer takes part in the hash key, that is: * hash_shift * MIN_MATCH >= hash_bits */ block_start = 0; /* Window position at the beginning of the current output block. Gets * negative when the window is moved backwards. */ match_length = 0; /* length of best match */ prev_match = 0; /* previous match */ match_available = 0; /* set if previous match exists */ strstart = 0; /* start of string to insert */ match_start = 0; /* start of matching string */ lookahead = 0; /* number of valid bytes ahead in window */ prev_length = 0; /* Length of the best match at previous step. Matches not greater than this * are discarded. This is used in the lazy match evaluation. */ max_chain_length = 0; /* To speed up deflation, hash chains are never searched beyond this * length. A higher limit improves compression ratio but degrades the * speed. */ max_lazy_match = 0; /* Attempt to find a better match only when the current match is strictly * smaller than this value. This mechanism is used only for compression * levels >= 4. */ // That's alias to max_lazy_match, don't use directly //this.max_insert_length = 0; /* Insert new strings in the hash table only if the match length is not * greater than this length. This saves time but degrades compression. * max_insert_length is used only for compression levels <= 3. */ level = 0; /* compression level (1..9) */ strategy = 0; /* favor or force Huffman coding*/ good_match = 0; /* Use a faster search when the previous match is longer than this */ nice_match = 0; /* Stop searching when current match exceeds this */ /* used by trees.c: */ /* Didn't use ct_data typedef below to suppress compiler warning */ // struct ct_data_s dyn_ltree[HEAP_SIZE]; /* literal and length tree */ // struct ct_data_s dyn_dtree[2*D_CODES+1]; /* distance tree */ // struct ct_data_s bl_tree[2*BL_CODES+1]; /* Huffman tree for bit lengths */ // Use flat array of DOUBLE size, with interleaved fata, // because JS does not support effective dyn_ltree = new Uint16Array(HEAP_SIZE * 2); dyn_dtree = new Uint16Array((2 * D_CODES + 1) * 2); bl_tree = new Uint16Array((2 * BL_CODES + 1) * 2); l_desc = null; /* desc. for literal tree */ d_desc = null; /* desc. for distance tree */ bl_desc = null; /* desc. for bit length tree */ //ush bl_count[MAX_BITS+1]; bl_count = new Uint16Array(MAX_BITS + 1); /* number of codes at each bit length for an optimal tree */ //int heap[2*L_CODES+1]; /* heap used to build the Huffman trees */ heap = new Uint16Array( 2 * L_CODES + 1, ); /* heap used to build the Huffman trees */ heap_len = 0; /* number of elements in the heap */ heap_max = 0; /* element of largest frequency */ /* The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used. * The same heap array is used to build all trees. */ depth = new Uint16Array(2 * L_CODES + 1); //uch depth[2*L_CODES+1]; /* Depth of each subtree used as tie breaker for trees of equal frequency */ l_buf = 0; /* buffer index for literals or lengths */ lit_bufsize = 0; /* Size of match buffer for literals/lengths. There are 4 reasons for * limiting lit_bufsize to 64K: * - frequencies can be kept in 16 bit counters * - if compression is not successful for the first block, all input * data is still in the window so we can still emit a stored block even * when input comes from standard input. (This can also be done for * all blocks if lit_bufsize is not greater than 32K.) * - if compression is not successful for a file smaller than 64K, we can * even emit a stored file instead of a stored block (saving 5 bytes). * This is applicable only for zip (not gzip or zlib). * - creating new Huffman trees less frequently may not provide fast * adaptation to changes in the input data statistics. (Take for * example a binary file with poorly compressible code followed by * a highly compressible string table.) Smaller buffer sizes give * fast adaptation but have of course the overhead of transmitting * trees more frequently. * - I can't count above 4 */ last_lit = 0; /* running index in l_buf */ d_buf = 0; /* Buffer index for distances. To simplify the code, d_buf and l_buf have * the same number of elements. To use different lengths, an extra flag * array would be necessary. */ opt_len = 0; /* bit length of current block with optimal trees */ static_len = 0; /* bit length of current block with static trees */ matches = 0; /* number of string matches in current block */ insert = 0; /* bytes at end of window left to insert */ bi_buf = 0; /* Output buffer. bits are inserted starting at the bottom (least * significant bits). */ bi_valid = 0; /* Number of valid bits in bi_buf. All bits above the last valid bit * are always zero. */ // Used for window memory init. We safely ignore it for JS. That makes // sense only for pointers and memory check tools. //this.high_water = 0; /* High water mark offset in window for initialized bytes -- bytes above * this are set to zero in order to avoid memory check warnings when * longest match routines access bytes past the input. This is then * updated to the new high water mark. */ constructor() { zero(this.dyn_ltree); zero(this.dyn_dtree); zero(this.bl_tree); zero(this.heap); zero(this.depth); } } function deflateResetKeep(strm: ZStream) { let s; if (!strm || !strm.state) { return err(strm, STATUS.Z_STREAM_ERROR.toString() as CODE); } strm.total_in = strm.total_out = 0; strm.data_type = Z_UNKNOWN; s = strm.state; s.pending = 0; s.pending_out = 0; if (s.wrap < 0) { s.wrap = -s.wrap; /* was made negative by deflate(..., Z_FINISH); */ } s.status = (s.wrap ? INIT_STATE : BUSY_STATE); strm.adler = (s.wrap === 2) ? 0 // crc32(0, Z_NULL, 0) : 1; // adler32(0, Z_NULL, 0) s.last_flush = STATUS.Z_NO_FLUSH; trees._tr_init(s); return Z_OK; } function deflateReset(strm: ZStream) { let ret = deflateResetKeep(strm); if (ret === Z_OK) { lm_init(strm.state); } return ret; } export function deflateSetHeader(strm: ZStream, head: Header) { if (!strm || !strm.state) return Z_STREAM_ERROR; if (strm.state.wrap !== 2) return Z_STREAM_ERROR; strm.state.gzhead = head; return Z_OK; } export function deflateInit2( strm: ZStream, level: number, method: number, windowBits: number, memLevel: number, strategy: number, ): CODE { if (!strm) { // === Z_NULL return STATUS.Z_STREAM_ERROR as CODE; } let wrap = 1; if (level === Z_DEFAULT_COMPRESSION) { level = 6; } if (windowBits < 0) { /* suppress zlib wrapper */ wrap = 0; windowBits = -windowBits; } else if (windowBits > 15) { wrap = 2; /* write gzip wrapper instead */ windowBits -= 16; } if ( memLevel < 1 || memLevel > MAX_MEM_LEVEL || method !== Z_DEFLATED || windowBits < 8 || windowBits > 15 || level < 0 || level > 9 || strategy < 0 || strategy > Z_FIXED ) { return err(strm, STATUS.Z_STREAM_ERROR.toString() as CODE); } if (windowBits === 8) { windowBits = 9; } /* until 256-byte window bug fixed */ let s = new DeflateState(); strm.state = s; s.strm = strm; s.wrap = wrap; s.gzhead = null; s.w_bits = windowBits; s.w_size = 1 << s.w_bits; s.w_mask = s.w_size - 1; s.hash_bits = memLevel + 7; s.hash_size = 1 << s.hash_bits; s.hash_mask = s.hash_size - 1; s.hash_shift = ~~((s.hash_bits + MIN_MATCH - 1) / MIN_MATCH); s.window = new Uint8Array(s.w_size * 2); s.head = new Uint16Array(s.hash_size); s.prev = new Uint16Array(s.w_size); // Don't need mem init magic for JS. //s.high_water = 0; /* nothing written to s->window yet */ s.lit_bufsize = 1 << (memLevel + 6); /* 16K elements by default */ s.pending_buf_size = s.lit_bufsize * 4; //overlay = (ushf *) ZALLOC(strm, s->lit_bufsize, sizeof(ush)+2); //s->pending_buf = (uchf *) overlay; s.pending_buf = new Uint8Array(s.pending_buf_size); // It is offset from `s.pending_buf` (size is `s.lit_bufsize * 2`) //s->d_buf = overlay + s->lit_bufsize/sizeof(ush); s.d_buf = 1 * s.lit_bufsize; //s->l_buf = s->pending_buf + (1+sizeof(ush))*s->lit_bufsize; s.l_buf = (1 + 2) * s.lit_bufsize; s.level = level; s.strategy = strategy; s.method = method; return deflateReset(strm); } function deflateInit(strm: ZStream, level: number) { return deflateInit2( strm, level, Z_DEFLATED, MAX_WBITS, DEF_MEM_LEVEL, Z_DEFAULT_STRATEGY, ); } export function deflate(strm: ZStream, flush: number) { let old_flush, s; let beg, val; // for gzip header write only if ( !strm || !strm.state || flush > STATUS.Z_BLOCK || flush < 0 ) { return strm ? err(strm, STATUS.Z_STREAM_ERROR as CODE) : Z_STREAM_ERROR; } s = strm.state; if ( !strm.output || (!strm.input && strm.avail_in !== 0) || (s.status === FINISH_STATE && flush !== STATUS.Z_FINISH) ) { return err( strm, (strm.avail_out === 0 ? STATUS.Z_BUF_ERROR : STATUS.Z_STREAM_ERROR) as CODE, ); } s.strm = strm; /* just in case */ old_flush = s.last_flush; s.last_flush = flush; /* Write the header */ if (s.status === INIT_STATE) { if (s.wrap === 2) { // GZIP header strm.adler = 0; //crc32(0L, Z_NULL, 0); put_byte(s, 31); put_byte(s, 139); put_byte(s, 8); if (!s.gzhead) { // s->gzhead == Z_NULL put_byte(s, 0); put_byte(s, 0); put_byte(s, 0); put_byte(s, 0); put_byte(s, 0); put_byte( s, s.level === 9 ? 2 : (s.strategy >= Z_HUFFMAN_ONLY || s.level < 2 ? 4 : 0), ); put_byte(s, OS_CODE); s.status = BUSY_STATE; } else { put_byte( s, (s.gzhead.text ? 1 : 0) + (s.gzhead.hcrc ? 2 : 0) + (!s.gzhead.extra ? 0 : 4) + (!s.gzhead.name ? 0 : 8) + (!s.gzhead.comment ? 0 : 16), ); put_byte(s, s.gzhead.time & 0xff); put_byte(s, (s.gzhead.time >> 8) & 0xff); put_byte(s, (s.gzhead.time >> 16) & 0xff); put_byte(s, (s.gzhead.time >> 24) & 0xff); put_byte( s, s.level === 9 ? 2 : (s.strategy >= Z_HUFFMAN_ONLY || s.level < 2 ? 4 : 0), ); put_byte(s, s.gzhead.os & 0xff); if (s.gzhead.extra && s.gzhead.extra.length) { put_byte(s, s.gzhead.extra.length & 0xff); put_byte(s, (s.gzhead.extra.length >> 8) & 0xff); } if (s.gzhead.hcrc) { strm.adler = crc32(strm.adler, s.pending_buf, s.pending, 0); } s.gzindex = 0; s.status = EXTRA_STATE; } } // DEFLATE header else { let header = (Z_DEFLATED + ((s.w_bits - 8) << 4)) << 8; let level_flags = -1; if (s.strategy >= Z_HUFFMAN_ONLY || s.level < 2) { level_flags = 0; } else if (s.level < 6) { level_flags = 1; } else if (s.level === 6) { level_flags = 2; } else { level_flags = 3; } header |= (level_flags << 6); if (s.strstart !== 0) header |= PRESET_DICT; header += 31 - (header % 31); s.status = BUSY_STATE; putShortMSB(s, header); /* Save the adler32 of the preset dictionary: */ if (s.strstart !== 0) { putShortMSB(s, strm.adler >>> 16); putShortMSB(s, strm.adler & 0xffff); } strm.adler = 1; // adler32(0L, Z_NULL, 0); } } //#ifdef GZIP if (s.status === EXTRA_STATE) { if (s.gzhead!.extra /* != Z_NULL*/) { beg = s.pending; /* start of bytes to update crc */ while (s.gzindex < (s.gzhead!.extra.length & 0xffff)) { if (s.pending === s.pending_buf_size) { if (s.gzhead!.hcrc && s.pending > beg) { strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); } flush_pending(strm); beg = s.pending; if (s.pending === s.pending_buf_size) { break; } } put_byte(s, s.gzhead!.extra[s.gzindex] & 0xff); s.gzindex++; } if (s.gzhead!.hcrc && s.pending > beg) { strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); } if (s.gzindex === s.gzhead!.extra.length) { s.gzindex = 0; s.status = NAME_STATE; } } else { s.status = NAME_STATE; } } if (s.status === NAME_STATE) { if (s.gzhead!.name /* != Z_NULL*/) { beg = s.pending; /* start of bytes to update crc */ //int val; do { if (s.pending === s.pending_buf_size) { if (s.gzhead!.hcrc && s.pending > beg) { strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); } flush_pending(strm); beg = s.pending; if (s.pending === s.pending_buf_size) { val = 1; break; } } // JS specific: little magic to add zero terminator to end of string if (s.gzindex < s.gzhead!.name.length) { val = s.gzhead!.name.charCodeAt(s.gzindex++) & 0xff; } else { val = 0; } put_byte(s, val); } while (val !== 0); if (s.gzhead!.hcrc && s.pending > beg) { strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); } if (val === 0) { s.gzindex = 0; s.status = COMMENT_STATE; } } else { s.status = COMMENT_STATE; } } if (s.status === COMMENT_STATE) { if (s.gzhead!.comment /* != Z_NULL*/) { beg = s.pending; /* start of bytes to update crc */ //int val; do { if (s.pending === s.pending_buf_size) { if (s.gzhead!.hcrc && s.pending > beg) { strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); } flush_pending(strm); beg = s.pending; if (s.pending === s.pending_buf_size) { val = 1; break; } } // JS specific: little magic to add zero terminator to end of string if (s.gzindex < s.gzhead!.comment.length) { val = s.gzhead!.comment.charCodeAt(s.gzindex++) & 0xff; } else { val = 0; } put_byte(s, val); } while (val !== 0); if (s.gzhead!.hcrc && s.pending > beg) { strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); } if (val === 0) { s.status = HCRC_STATE; } } else { s.status = HCRC_STATE; } } if (s.status === HCRC_STATE) { if (s.gzhead!.hcrc) { if (s.pending + 2 > s.pending_buf_size) { flush_pending(strm); } if (s.pending + 2 <= s.pending_buf_size) { put_byte(s, strm.adler & 0xff); put_byte(s, (strm.adler >> 8) & 0xff); strm.adler = 0; //crc32(0L, Z_NULL, 0); s.status = BUSY_STATE; } } else { s.status = BUSY_STATE; } } //#endif /* Flush as much pending output as possible */ if (s.pending !== 0) { flush_pending(strm); if (strm.avail_out === 0) { /* Since avail_out is 0, deflate will be called again with * more output space, but possibly with both pending and * avail_in equal to zero. There won't be anything to do, * but this is not an error situation so make sure we * return OK instead of BUF_ERROR at next call of deflate: */ s.last_flush = -1; return Z_OK; } /* Make sure there is something to do and avoid duplicate consecutive * flushes. For repeated and useless calls with Z_FINISH, we keep * returning Z_STREAM_END instead of Z_BUF_ERROR. */ } else if ( strm.avail_in === 0 && rank(flush) <= rank(old_flush) && flush !== STATUS.Z_FINISH ) { return err(strm, STATUS.Z_BUF_ERROR as CODE); } /* User must not provide more input after the first FINISH: */ if (s.status === FINISH_STATE && strm.avail_in !== 0) { return err(strm, STATUS.Z_BUF_ERROR as CODE); } /* Start a new block or continue the current one. */ if ( strm.avail_in !== 0 || s.lookahead !== 0 || (flush !== STATUS.Z_NO_FLUSH && s.status !== FINISH_STATE) ) { let bstate = (s.strategy === Z_HUFFMAN_ONLY) ? deflate_huff(s, flush) : (s.strategy === Z_RLE ? deflate_rle(s, flush) : configuration_table[s.level].func(s, flush)); if (bstate === BS_FINISH_STARTED || bstate === BS_FINISH_DONE) { s.status = FINISH_STATE; } if (bstate === BS_NEED_MORE || bstate === BS_FINISH_STARTED) { if (strm.avail_out === 0) { s.last_flush = -1; /* avoid BUF_ERROR next call, see above */ } return STATUS.Z_OK; /* If flush != Z_NO_FLUSH && avail_out == 0, the next call * of deflate should use the same flush parameter to make sure * that the flush is complete. So we don't have to output an * empty block here, this will be done at next call. This also * ensures that for a very small output buffer, we emit at most * one empty block. */ } if (bstate === BS_BLOCK_DONE) { if (flush === STATUS.Z_PARTIAL_FLUSH) { trees._tr_align(s); } else if (flush !== STATUS.Z_BLOCK) { /* FULL_FLUSH or SYNC_FLUSH */ trees._tr_stored_block(s, 0, 0, false); /* For a full flush, this empty block will be recognized * as a special marker by inflate_sync(). */ if (flush === STATUS.Z_FULL_FLUSH) { /*** CLEAR_HASH(s); ***/ /* forget history */ zero(s.head!); // Fill with NIL (= 0); if (s.lookahead === 0) { s.strstart = 0; s.block_start = 0; s.insert = 0; } } } flush_pending(strm); if (strm.avail_out === 0) { s.last_flush = -1; /* avoid BUF_ERROR at next call, see above */ return STATUS.Z_OK; } } } //Assert(strm->avail_out > 0, "bug2"); //if (strm.avail_out <= 0) { throw new Error("bug2");} if (flush !== STATUS.Z_FINISH) return STATUS.Z_OK; if (s.wrap <= 0) return STATUS.Z_STREAM_END; /* Write the trailer */ if (s.wrap === 2) { put_byte(s, strm.adler & 0xff); put_byte(s, (strm.adler >> 8) & 0xff); put_byte(s, (strm.adler >> 16) & 0xff); put_byte(s, (strm.adler >> 24) & 0xff); put_byte(s, strm.total_in & 0xff); put_byte(s, (strm.total_in >> 8) & 0xff); put_byte(s, (strm.total_in >> 16) & 0xff); put_byte(s, (strm.total_in >> 24) & 0xff); } else { putShortMSB(s, strm.adler >>> 16); putShortMSB(s, strm.adler & 0xffff); } flush_pending(strm); /* If avail_out is zero, the application will call deflate again * to flush the rest. */ if (s.wrap > 0) s.wrap = -s.wrap; /* write the trailer only once! */ return s.pending !== 0 ? Z_OK : Z_STREAM_END; } export function deflateEnd(strm: ZStream): any { let status; if (!strm /*== Z_NULL*/ || !strm.state /*== Z_NULL*/) { return Z_STREAM_ERROR; } status = strm.state.status; if ( status !== INIT_STATE && status !== EXTRA_STATE && status !== NAME_STATE && status !== COMMENT_STATE && status !== HCRC_STATE && status !== BUSY_STATE && status !== FINISH_STATE ) { return err(strm, STATUS.Z_STREAM_ERROR as CODE); } strm.state = null; return status === BUSY_STATE ? err(strm, STATUS.Z_DATA_ERROR as CODE) : Z_OK; } /* ========================================================================= * Initializes the compression dictionary from the given byte * sequence without producing any compressed output. */ export function deflateSetDictionary( strm: ZStream, dictionary: Uint8Array, ): any { let dictLength = dictionary.length; let s; let str, n; let wrap; let avail; let next; let input; let tmpDict; if (!strm /*== Z_NULL*/ || !strm.state /*== Z_NULL*/) { return Z_STREAM_ERROR; } s = strm.state; wrap = s.wrap; if (wrap === 2 || (wrap === 1 && s.status !== INIT_STATE) || s.lookahead) { return Z_STREAM_ERROR; } /* when using zlib wrappers, compute Adler-32 for provided dictionary */ if (wrap === 1) { /* adler32(strm->adler, dictionary, dictLength); */ strm.adler = adler32(strm.adler, dictionary, dictLength, 0); } s.wrap = 0; /* avoid computing Adler-32 in read_buf */ /* if dictionary would fill window, just replace the history */ if (dictLength >= s.w_size) { if (wrap === 0) { /* already empty otherwise */ /*** CLEAR_HASH(s); ***/ zero(s.head!); // Fill with NIL (= 0); s.strstart = 0; s.block_start = 0; s.insert = 0; } /* use the tail */ // dictionary = dictionary.slice(dictLength - s.w_size); tmpDict = new Uint8Array(s.w_size); tmpDict.set(dictionary.subarray(dictLength - s.w_size, dictLength), 0); dictionary = tmpDict; dictLength = s.w_size; } /* insert dictionary into window and hash */ avail = strm.avail_in; next = strm.next_in; input = strm.input; strm.avail_in = dictLength; strm.next_in = 0; strm.input = dictionary; fill_window(s); while (s.lookahead >= MIN_MATCH) { str = s.strstart; n = s.lookahead - (MIN_MATCH - 1); do { /* UPDATE_HASH(s, s->ins_h, s->window[str + MIN_MATCH-1]); */ s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window![str + MIN_MATCH - 1]) & s.hash_mask; s.prev![str & s.w_mask] = s.head![s.ins_h]; s.head![s.ins_h] = str; str++; } while (--n); s.strstart = str; s.lookahead = MIN_MATCH - 1; fill_window(s); } s.strstart += s.lookahead; s.block_start = s.strstart; s.insert = s.lookahead; s.lookahead = 0; s.match_length = s.prev_length = MIN_MATCH - 1; s.match_available = 0; strm.next_in = next; strm.input = input; strm.avail_in = avail; s.wrap = wrap; return Z_OK; }
the_stack
import {jsx, Global} from '@emotion/core' import type {InterpolationWithTheme} from '@emotion/core' import React from 'react' import type {History} from 'history' import facepaint from 'facepaint' import {ThemeProvider, useTheme as useEmotionTheme} from 'emotion-theming' import preval from 'preval.macro' import {ErrorBoundary} from 'react-error-boundary' import type {FallbackProps} from 'react-error-boundary' import {Router, Switch, Route, Link, useParams} from 'react-router-dom' import {Tabs, TabList, Tab, TabPanels, TabPanel} from '@reach/tabs' import { RiToolsLine, RiFlagLine, RiExternalLinkLine, RiArrowRightSLine, RiArrowLeftSLine, RiMoonClearLine, RiSunLine, RiEdit2Fill, } from 'react-icons/ri' import { CgDice1, CgDice2, CgDice3, CgDice4, CgDice5, CgDice6, } from 'react-icons/cg' import {FaDiceD20} from 'react-icons/fa' import Logo from './assets/logo' import getTheme, {prismThemeLight, prismThemeDark} from './theme' import type {Theme} from './theme' import type {FileInfo, LazyComponents} from './types' const styleTag = document.createElement('style') styleTag.innerHTML = [ preval`module.exports = require('../other/css-file-to-string')('@reach/tabs/styles.css')`, ].join('\n') document.head.prepend(styleTag) const extrIcons = [null, CgDice1, CgDice2, CgDice3, CgDice4, CgDice5, CgDice6] const getDiceIcon = (number: number) => extrIcons[number] ?? FaDiceD20 function getDistanceFromTopOfPage(element: HTMLElement | null) { let distance = 0 while (element) { distance += element.offsetTop - element.scrollTop + element.clientTop element = element.offsetParent as HTMLElement | null } return distance } const totallyCenteredStyles = { minWidth: '100%', minHeight: '100%', display: 'grid', } const visuallyHiddenStyles: InterpolationWithTheme<Theme> = { border: '0', clip: 'rect(0 0 0 0)', height: '1px', margin: '-1px', overflow: 'hidden', padding: '0', position: 'absolute', width: '1px', } const exerciseTypes = ['final', 'exercise', 'instruction'] as const const isExerciseType = (type: string): type is typeof exerciseTypes[number] => // .includes *should* allow you to pass any type, but it does not :-( exerciseTypes.includes(type as typeof exerciseTypes[number]) type ExerciseInfo = { id: string title: string number: number exercise: Array<FileInfo> final: Array<FileInfo> instruction: FileInfo next?: ExerciseInfo previous?: ExerciseInfo } type DarkModeState = 'dark' | 'light' type SetDarkModeState = React.Dispatch<React.SetStateAction<DarkModeState>> function renderReactApp({ history, projectTitle, filesInfo, lazyComponents, gitHubRepoUrl, render, }: { history: History projectTitle: string filesInfo: Array<FileInfo> lazyComponents: LazyComponents gitHubRepoUrl: string render: (ui: React.ReactElement) => void }) { const useTheme = () => useEmotionTheme<Theme>() const exerciseInfo: Array<ExerciseInfo> = [] for (const fileInfo of filesInfo) { const type = fileInfo.type if (isExerciseType(type)) { exerciseInfo[fileInfo.number] = exerciseInfo[fileInfo.number] ?? { exercise: [], final: [], } const info = exerciseInfo[fileInfo.number] if (type === 'instruction') { info.instruction = fileInfo const {title, number, id} = fileInfo Object.assign(info, {title, number, id}) } else { info[type].push(fileInfo) } } } for (const info of exerciseInfo.filter(Boolean)) { info.next = exerciseInfo[info.number + 1] info.previous = exerciseInfo[info.number - 1] } const mq = facepaint([ '@media(min-width: 576px)', '@media(min-width: 768px)', '@media(min-width: 992px)', '@media(min-width: 1200px)', ]) const tabStyles = ({ theme, }: { theme: Theme }): InterpolationWithTheme<Theme> => ({ background: theme.backgroundLight, borderTop: `1px solid ${theme.sky}`, height: '100%', position: 'relative', zIndex: 10, '[data-reach-tab]': { padding: '0.5rem 1.25rem', ':hover': { color: theme.primary, }, }, '[data-reach-tab][data-selected]': { background: theme.backgroundLight, border: 'none', svg: {fill: theme.primary, color: theme.primary}, ':hover': { color: 'inherit', }, }, }) function FileTabs({ isOpen, files, }: { isOpen: boolean files: Array<FileInfo> }) { const theme = useTheme() const [tabIndex, setTabIndex] = React.useState(0) const renderedTabs = React.useRef<Set<number>>() if (!renderedTabs.current) { renderedTabs.current = new Set([0]) } function handleTabChange(index: number) { setTabIndex(index) renderedTabs.current?.add(index) } if (files.length == 1) { const {title, extraCreditTitle, isolatedPath} = files[0] return ( <Sandbox isOpen={isOpen} isolatedPath={isolatedPath} isolatedPathLinkContent="Open on isolated page" title={extraCreditTitle ?? title} > {renderedTabs.current.has(0) ? ( <iframe title={extraCreditTitle ?? title} src={isolatedPath} css={{border: 'none', width: '100%', height: '100%'}} /> ) : null} </Sandbox> ) } return isOpen ? ( <Tabs index={tabIndex} onChange={handleTabChange} css={tabStyles({theme})} > <TabList css={{ height: 50, background: theme.skyLight, overflowX: 'auto', whiteSpace: 'nowrap', }} > {files.map( ({id, filename, extraCreditNumber = -1, isExtraCredit, type}) => ( <Tab key={id} css={{display: 'flex', alignItems: 'center'}}> {isExtraCredit ? ( <> {React.createElement(getDiceIcon(extraCreditNumber), { size: 20, style: {marginRight: 5}, })} <span>Extra Credit</span> </> ) : type === 'final' ? ( 'Solution' ) : type === 'exercise' ? ( 'Exercise' ) : ( filename )} </Tab> ), )} </TabList> <TabPanels> {files.map(({title, extraCreditTitle, isolatedPath, id}, index) => ( <TabPanel key={id}> <Sandbox isOpen={tabIndex === index} isolatedPath={isolatedPath} isolatedPathLinkContent="Open on isolated page" title={extraCreditTitle ?? title} > {renderedTabs.current?.has(0) ? ( <iframe title={extraCreditTitle ?? title} src={isolatedPath} css={{border: 'none', width: '100%', height: '100%'}} /> ) : null} </Sandbox> </TabPanel> ))} </TabPanels> </Tabs> ) : null } FileTabs.displayName = 'FileTabs' function Sandbox({ isOpen, isolatedPath, isolatedPathLinkContent, title, children, }: { isOpen: boolean isolatedPath: string isolatedPathLinkContent: string title: string children: React.ReactNode }) { const renderContainerRef = React.useRef(null) const [height, setHeight] = React.useState(0) React.useLayoutEffect(() => { if (isOpen) { setHeight(getDistanceFromTopOfPage(renderContainerRef.current)) } }, [isOpen]) return ( <> <div css={{ display: 'flex', justifyContent: 'space-between', width: '100%', padding: '1rem', }} > <div>{title}</div> <a css={{ display: 'flex', justifyContent: 'flex-end', textDecoration: 'none', }} href={isolatedPath} target="_blank" rel="noreferrer" > <RiExternalLinkLine css={{marginRight: '0.25rem'}} />{' '} {isolatedPathLinkContent} </a> </div> <div ref={renderContainerRef} css={[ totallyCenteredStyles, mq({ color: '#19212a', background: 'white', minHeight: 500, height: ['auto', 'auto', `calc(100vh - ${height}px)`], overflowY: ['auto', 'auto', 'scroll'], }), ]} > <div className="final-container render-container">{children}</div> </div> </> ) } Sandbox.displayName = 'Sandbox' function ExerciseContainer(props: { mode: DarkModeState setMode: SetDarkModeState }) { const theme = useTheme() const {exerciseNumber: exerciseNumberString} = useParams<{ exerciseNumber: string }>() const exerciseNumber = Number(exerciseNumberString) const [tabIndex, setTabIndex] = React.useState(0) const renderedTabs = React.useRef<Set<number>>() if (!renderedTabs.current) { renderedTabs.current = new Set([0]) } function handleTabChange(index: number) { setTabIndex(index) renderedTabs.current?.add(index) } // allow the user to continue to the next exercise with the left/right keys React.useEffect(() => { const handleKeyup = (e: KeyboardEvent) => { if (e.target !== document.body) return if (e.key === 'ArrowRight') { const {number} = exerciseInfo[exerciseNumber + 1] || exerciseInfo[1] history.push(`/${number}`) } else if (e.key === 'ArrowLeft') { const {number} = exerciseInfo[exerciseNumber - 1] || exerciseInfo[exerciseInfo.length - 1] history.push(`/${number}`) } } document.body.addEventListener('keyup', handleKeyup) return () => document.body.removeEventListener('keyup', handleKeyup) }, [exerciseNumber]) if (isNaN(exerciseNumber) || !exerciseInfo[exerciseNumber]) { return <NotFound /> } const {instruction, exercise, final} = exerciseInfo[exerciseNumber] let instructionElement const comp = lazyComponents[instruction.id] if (comp) { instructionElement = React.createElement(comp) } return ( <> <Navigation exerciseNumber={exerciseNumber} mode={props.mode} setMode={props.setMode} /> <div css={{minHeight: 'calc(100vh - 60px)'}}> <div css={mq({ display: 'grid', gridTemplateColumns: ['100%', '100%', '50% 50%'], gridTemplateRows: 'auto', })} > <div css={mq({ position: 'relative', gridRow: [2, 2, 'auto'], height: ['auto', 'auto', 'calc(100vh - 60px)'], overflowY: ['auto', 'auto', 'scroll'], padding: '1rem 2rem 3rem 2rem', borderTop: `1px solid ${theme.sky}`, '::-webkit-scrollbar': { background: theme.skyLight, borderLeft: `1px solid ${theme.sky}`, borderRight: `1px solid ${theme.sky}`, width: 10, }, '::-webkit-scrollbar-thumb': { background: theme.skyDark, }, 'p, li': { fontSize: 18, lineHeight: 1.5, }, blockquote: { borderLeft: `2px solid ${theme.primary}`, margin: 0, paddingLeft: '1.5rem', }, pre: { background: theme.sky, fontSize: '80%', margin: '0 -2rem', padding: '2rem', }, ul: {padding: 0, listStylePosition: 'inside'}, 'ul ul': {paddingLeft: '2rem'}, 'p > code': { background: theme.sky, color: theme.text, fontSize: '85%', padding: '3px 5px', }, })} > <React.Suspense fallback={<div css={totallyCenteredStyles}>Loading...</div>} > <div css={{ position: 'absolute', top: 20, right: 20, fontSize: '1.2rem', color: theme.textLightest, }} > <a href={`${gitHubRepoUrl}/edit/main/${instruction.filePath}`} title="edit docs (in the original repo, e.g. to fix typos)" target="_blank" rel="noopener noreferrer nofollow" > <span aria-label="edit"> <RiEdit2Fill /> </span> </a> </div> <div className="instruction-container"> {instructionElement} </div> </React.Suspense> </div> <div css={{background: theme.background}}> <Tabs index={tabIndex} onChange={handleTabChange} css={tabStyles({theme})} > <TabList css={{height: 50, background: theme.skyLight}}> <Tab css={{display: 'flex', alignItems: 'center'}}> <RiToolsLine size="20" color={theme.textLightest} css={{marginRight: 5}} /> <span>Exercise {exerciseNumber}</span> </Tab> <Tab css={{display: 'flex', alignItems: 'center'}}> <RiFlagLine size="18" color={theme.textLightest} css={{marginRight: 5}} /> Final </Tab> </TabList> <TabPanels> <TabPanel> <FileTabs key={exerciseNumber} isOpen={tabIndex === 0} files={exercise} /> </TabPanel> <TabPanel> <FileTabs key={exerciseNumber} isOpen={tabIndex === 1} files={final} /> </TabPanel> </TabPanels> </Tabs> </div> </div> </div> </> ) } ExerciseContainer.displayName = 'ExerciseContainer' function Navigation({ exerciseNumber, mode, setMode, }: { exerciseNumber?: number mode: DarkModeState setMode: SetDarkModeState }) { const theme = useTheme() const info = exerciseNumber ? exerciseInfo[exerciseNumber] : null return ( <div css={mq({ a: {textDecoration: 'none'}, alignItems: 'center', background: theme.backgroundLight, boxShadow: '0 0.9px 1.5px -18px rgba(0, 0, 0, 0.024), 0 2.4px 4.1px -18px rgba(0, 0, 0, 0.035), 0 5.7px 9.9px -18px rgba(0, 0, 0, 0.046), 0 19px 33px -18px rgba(0, 0, 0, 0.07)', display: 'grid', gridTemplateColumns: exerciseNumber ? ['3fr .5fr', '1fr 2fr', '1fr 1fr'] : '1fr 1fr', height: 60, padding: ['0 1rem', '0 1.75rem'], width: '100%', 'span[role="img"]': { fontSize: [24, 24, 'inherit'], }, '.exercise-title': { color: theme.text, display: ['none', 'inline-block', 'inline-block'], fontSize: 15, opacity: 0.9, ':hover': { opacity: 1, }, }, })} > <div css={{display: 'flex', alignItems: 'center'}}> <Link to="/" css={{display: 'flex', alignItems: 'center', color: 'inherit'}} > <Logo css={{marginRight: '.5rem'}} strokeWidth={0.8} /> <div css={{display: 'flex', flexDirection: 'column'}}> <h1 css={{fontSize: 16, margin: 0}}>{projectTitle}</h1> <span css={{fontSize: 14, opacity: '.8'}}>Epic React</span> </div> </Link> </div> <div css={{ alignItems: 'center', display: 'grid', gridTemplateColumns: exerciseNumber ? '3fr 2fr 3fr 3rem' : '1fr', paddingLeft: '1rem', width: '100%', }} > {info ? ( <> <div> {info.previous ? ( <Link to={`/${info.previous.number}`} css={{display: 'flex', alignItems: 'center'}} > <RiArrowLeftSLine size={20} /> <span className="exercise-title"> {info.previous.title} </span> </Link> ) : null} </div> <div css={{ display: 'flex', alignItems: 'center', justifyContent: 'center', }} > {exerciseInfo.map(e => ( <React.Fragment key={e.id}> <input id={`exercise-dot-${e.id}`} type="radio" name="exercise-dots" checked={e.id === info.id} onChange={() => history.push(`/${e.number}`)} css={visuallyHiddenStyles} /> <label htmlFor={`exercise-dot-${e.id}`} title={e.title}> <span css={visuallyHiddenStyles}>{e.title}</span> <span css={{ cursor: 'pointer', display: 'block', background: e.id === info.id ? theme.primary : theme.skyDark, borderRadius: '50%', height: 12, width: 12, margin: '0 6px', }} /> </label> </React.Fragment> ))} </div> <div css={{textAlign: 'right'}}> {info.next ? ( <Link to={`/${info.next.number}`} css={{ alignItems: 'center', display: 'flex', justifyContent: 'flex-end', }} > <span className="exercise-title">{info.next.title}</span>{' '} <RiArrowRightSLine size={20} /> </Link> ) : null} </div> </> ) : null} <div css={{ display: 'flex', alignItems: 'center', justifyContent: 'flex-end', }} > <button css={{ cursor: 'pointer', border: 'none', background: 'transparent', color: theme.text, textAlign: 'right', }} onClick={() => setMode(mode === 'light' ? 'dark' : 'light')} > {mode === 'light' ? ( <RiMoonClearLine size="1.25rem" color="currentColor" /> ) : ( <RiSunLine size="1.25rem" color="currentColor" /> )} </button> </div> </div> </div> ) } Navigation.displayName = 'Navigation' function Home(props: {mode: DarkModeState; setMode: SetDarkModeState}) { const theme = useTheme() return ( <> <Navigation mode={props.mode} setMode={props.setMode} /> <div css={mq({ width: '100%', maxWidth: 800, minHeight: '85vh', margin: '0 auto', padding: '1rem', display: 'flex', flexDirection: 'column', alignItems: 'center', justifyContent: 'center', })} > <Logo size={120} color={theme.skyDark} strokeWidth={0.7} css={mq({opacity: 0.5, marginTop: ['3rem', 0]})} /> <h1 css={mq({ textAlign: 'center', marginBottom: ['4rem', '4rem'], marginTop: '3rem', })} > {projectTitle} </h1> <div css={mq({ width: '100%', display: 'grid', gridTemplateColumns: ['auto', 'auto'], gridGap: '1rem', })} > {exerciseInfo .filter(Boolean) .map(({id, number, title, final, exercise}) => { return ( <div key={id} css={mq({ alignItems: 'center', background: theme.backgroundLight, borderRadius: 5, boxShadow: '0 0px 1.7px -7px rgba(0, 0, 0, 0.02), 0 0px 4px -7px rgba(0, 0, 0, 0.028), 0 0px 7.5px -7px rgba(0, 0, 0, 0.035), 0 0px 13.4px -7px rgba(0, 0, 0, 0.042), 0 0px 25.1px -7px rgba(0, 0, 0, 0.05), 0 0px 60px -7px rgba(0, 0, 0, 0.07)', display: 'grid', fontSize: '18px', gridTemplateColumns: ['auto', '60% 40%'], position: 'relative', ':hover': { background: theme.skyLight, small: { opacity: 1, }, '::before': { background: theme.primary, border: `2px solid ${theme.primary}`, color: theme.background, }, }, '::before': { alignItems: 'center', background: theme.backgroundLight, border: `2px solid ${theme.skyDark}`, borderRadius: 12, color: theme.textLightest, content: `"${number}"`, display: ['none', 'flex'], fontSize: 12, fontWeight: 600, height: 24, justifyContent: 'center', marginLeft: 23, marginTop: 0, paddingTop: 1, paddingLeft: 1, position: 'absolute', textAlign: 'center', width: 24, zIndex: 1, }, '::after': { content: '""', position: 'absolute', display: ['none', 'block'], width: 2, height: 'calc(100% + 1rem)', background: theme.skyDark, marginLeft: 34, }, ':first-of-type': { '::after': { content: '""', position: 'absolute', display: ['none', 'block'], width: 2, height: 'calc(50% + 1rem)', background: theme.skyDark, marginLeft: 34, marginTop: '4rem', }, }, ':last-of-type': { '::after': { content: '""', position: 'absolute', display: ['none', 'block'], width: 2, height: 'calc(50% + 1rem)', background: theme.skyDark, marginLeft: 34, marginBottom: '4rem', }, }, })} > <Link to={`/${number}`} css={mq({ padding: ['2rem 2rem 0 2rem', '2rem 2.5rem 2rem 2rem'], display: 'flex', alignItems: 'center', textDecoration: 'none', color: 'inherit', ':hover': { h3: { textDecoration: 'underline', textDecorationColor: 'rgba(0,0,0,0.3)', }, }, })} > <small css={mq({ display: ['block', 'none'], opacity: 0.7, fontSize: 14, })} > {number} </small> <h3 css={mq({ fontSize: [24, 20], fontWeight: [600, 500], margin: 0, marginLeft: ['1rem', '2rem'], })} > {title} </h3> </Link> <div css={mq({ width: '100%', display: 'flex', flexDirection: ['column', 'row'], height: ['auto', 48], padding: ['1.5rem 1rem', '8px 15px'], alignItems: 'center', })} > <a href={exercise[0].isolatedPath} title="exercise" css={mq({ width: '100%', display: 'flex', alignItems: 'center', justifyContent: ['flex-start', 'center'], color: 'inherit', padding: ['.7rem 1rem', 0], fontSize: 16, height: [48, 56], textDecoration: 'none', borderRadius: 5, ':hover': { background: theme.backgroundLight, svg: {fill: theme.primary}, }, })} > <RiToolsLine size="20" color={theme.textLightest} css={{marginRight: 5}} /> <span>Exercise</span> </a> <a href={final[0].isolatedPath} title="final version" css={mq({ width: '100%', display: 'flex', alignItems: 'center', justifyContent: ['flex-start', 'center'], color: 'inherit', padding: ['.7rem 1rem', 0], height: [48, 56], fontSize: 16, textDecoration: 'none', borderRadius: 5, ':hover': { background: theme.backgroundLight, svg: {fill: theme.primary}, }, })} > <RiFlagLine size="18" color={theme.textLightest} css={{marginRight: 5}} /> <span>Final Version</span> </a> </div> </div> ) })} </div> </div> </> ) } Home.displayName = 'Home' function NotFound() { const theme = useTheme() return ( <div css={{ height: '100vh', display: 'flex', alignItems: 'center', justifyContent: 'center', textAlign: 'center', }} > <div> <Logo size={120} color={theme.skyDark} strokeWidth={0.7} css={{opacity: 0.7}} /> <h1>{`Sorry... nothing here.`}</h1> {`To open one of the exercises, go to `} <code>{`/exerciseNumber`}</code> {`, for example: `} <Link to="/1"> <code>{`/1`}</code> </Link> <div css={{marginTop: '2rem', a: {textDecoration: 'none'}}}> <Link to="/" css={{ display: 'flex', alignItems: 'center', justifyContent: 'center', }} > <RiArrowLeftSLine /> Back home </Link> </div> </div> </div> ) } NotFound.displayName = 'NotFound' function useDarkMode() { const preferDarkQuery = '(prefers-color-scheme: dark)' const [mode, setMode] = React.useState<DarkModeState>(() => { const lsVal = window.localStorage.getItem('colorMode') if (lsVal) { return lsVal === 'dark' ? 'dark' : 'light' } else { return window.matchMedia(preferDarkQuery).matches ? 'dark' : 'light' } }) React.useEffect(() => { const mediaQuery = window.matchMedia(preferDarkQuery) const handleChange = () => { setMode(mediaQuery.matches ? 'dark' : 'light') } mediaQuery.addListener(handleChange) return () => mediaQuery.removeListener(handleChange) }, []) React.useEffect(() => { window.localStorage.setItem('colorMode', mode) }, [mode]) // we're doing it this way instead of as an effect so we only // set the localStorage value if they explicitly change the default return [mode, setMode] as const } function DelayedTransition() { // we have it this way so dark mode is rendered immediately rather than // transitioning to it on initial page load. const [renderStyles, setRender] = React.useState(false) React.useEffect(() => { const timeout = setTimeout(() => { setRender(true) }, 450) return () => clearTimeout(timeout) }, []) return renderStyles ? ( <Global styles={{ '*, *::before, *::after': { // for the theme change transition: `background 0.4s, background-color 0.4s, border-color 0.4s`, }, }} /> ) : null } function App() { const [mode, setMode] = useDarkMode() const theme = getTheme(mode) React.useLayoutEffect(() => { document.getElementById('root')?.classList.add('react-workshop-app') }) return ( <ThemeProvider theme={theme}> <Router history={history}> <Switch> <Route exact path="/"> <Home mode={mode} setMode={setMode} /> </Route> <Route exact path="/:exerciseNumber(\d+)"> <ExerciseContainer mode={mode} setMode={setMode} /> </Route> <Route> <NotFound /> </Route> </Switch> </Router> <Global styles={{ 'html, body, #root': { background: theme.background, color: theme.text, }, '::selection': { background: theme.primary, color: 'white', }, '[data-reach-tab]': { cursor: 'pointer', }, a: { color: theme.primary, }, /* This will hide the focus indicator if the element receives focus via the mouse, but it will still show up on keyboard focus. */ '*:focus:not(:focus-visible)': { outline: 'none', }, hr: {background: theme.textLightest}, }} /> <Global styles={` ${mode === 'light' ? prismThemeLight : prismThemeDark} `} /> <DelayedTransition /> </ThemeProvider> ) } function ErrorFallback({error, resetErrorBoundary}: FallbackProps) { return ( <div css={{ display: 'flex', alignItems: 'center', flexDirection: 'column', marginTop: '50px', }} > <p>Oh no! Something went wrong!</p> <div> <p>{`Here's the error:`}</p> <pre css={{color: 'red', overflowY: 'scroll'}}>{error.message}</pre> </div> <div> <p>Try doing one of these things to fix this:</p> <ol> <li> <button onClick={resetErrorBoundary}>Rerender the app</button> </li> <li> <button onClick={() => window.location.reload()}> Refresh the page </button> </li> <li>Update your code to fix the problem</li> </ol> </div> </div> ) } render( <ErrorBoundary FallbackComponent={ErrorFallback}> <App /> </ErrorBoundary>, ) } export {renderReactApp} /* eslint max-statements: "off", @typescript-eslint/no-non-null-assertion: "off" */
the_stack
import { h, ref, defineComponent, computed, PropType, provide, CSSProperties, watch, toRef, ComponentPublicInstance, VNode, nextTick, withDirectives, vShow, watchEffect, ExtractPropTypes, cloneVNode } from 'vue' import { VResizeObserver, VXScroll, VXScrollInst } from 'vueuc' import { throttle } from 'lodash-es' import { useCompitable, onFontsReady, useMergedState } from 'vooks' import { useConfig, useTheme } from '../../_mixins' import type { ThemeProps } from '../../_mixins' import { createKey, call, flatten, warnOnce } from '../../_utils' import type { MaybeArray, ExtractPublicPropTypes } from '../../_utils' import { tabsLight } from '../styles' import type { TabsTheme } from '../styles' import type { Addable, OnClose, OnCloseImpl, OnBeforeLeave, TabsType, TabsInst, OnUpdateValue, OnUpdateValueImpl } from './interface' import { tabsInjectionKey } from './interface' import Tab from './Tab' import { tabPaneProps } from './TabPane' import style from './styles/index.cssr' type TabPaneProps = ExtractPropTypes<typeof tabPaneProps> & { 'display-directive': 'if' | 'show' | 'show:lazy' } const tabsProps = { ...(useTheme.props as ThemeProps<TabsTheme>), value: [String, Number] as PropType<string | number>, defaultValue: [String, Number] as PropType<string | number>, type: { type: String as PropType<TabsType>, default: 'bar' }, closable: Boolean, justifyContent: String as PropType< 'space-between' | 'space-around' | 'space-evenly' >, size: { type: String as PropType<'small' | 'medium' | 'large'>, default: 'medium' }, tabStyle: [String, Object] as PropType<string | CSSProperties>, paneClass: String, paneStyle: [String, Object] as PropType<string | CSSProperties>, addable: [Boolean, Object] as PropType<Addable>, tabsPadding: { type: Number, default: 0 }, onBeforeLeave: Function as PropType<OnBeforeLeave>, onAdd: Function as PropType<() => void>, 'onUpdate:value': [Function, Array] as PropType<MaybeArray<OnUpdateValue>>, onUpdateValue: [Function, Array] as PropType<MaybeArray<OnUpdateValue>>, onClose: [Function, Array] as PropType<MaybeArray<OnClose>>, // deprecated labelSize: String as PropType<'small' | 'medium' | 'large'>, activeName: [String, Number] as PropType<string | number>, onActiveNameChange: [Function, Array] as PropType< MaybeArray<(value: string & number) => void> > } as const export type TabsProps = ExtractPublicPropTypes<typeof tabsProps> export default defineComponent({ name: 'Tabs', props: tabsProps, setup (props, { slots }) { if (__DEV__) { watchEffect(() => { if (props.labelSize !== undefined) { warnOnce( 'tabs', '`label-size` is deprecated, please use `size` instead.' ) } if (props.activeName !== undefined) { warnOnce( 'tabs', '`active-name` is deprecated, please use `value` instead.' ) } if (props.onActiveNameChange !== undefined) { warnOnce( 'tabs', '`on-active-name-change` is deprecated, please use `on-update:value` instead.' ) } }) } const { mergedClsPrefixRef } = useConfig(props) const themeRef = useTheme( 'Tabs', 'Tabs', style, tabsLight, props, mergedClsPrefixRef ) const tabsElRef = ref<HTMLElement | null>(null) const barElRef = ref<HTMLElement | null>(null) const scrollWrapperElRef = ref<HTMLElement | null>(null) const addTabInstRef = ref<ComponentPublicInstance | null>(null) const xScrollInstRef = ref<(VXScrollInst & ComponentPublicInstance) | null>( null ) const leftReachedRef = ref(true) const rightReachedRef = ref(true) const compitableSizeRef = useCompitable(props, ['labelSize', 'size']) const compitableValueRef = useCompitable(props, ['activeName', 'value']) const uncontrolledValueRef = ref( compitableValueRef.value ?? props.defaultValue ?? (slots.default ? ((flatten((slots as any).default())[0] as any).props.name as | string | number) : null) ) const mergedValueRef = useMergedState( compitableValueRef, uncontrolledValueRef ) const tabChangeIdRef = { id: 0 } const tabWrapperStyleRef = computed(() => { if (!props.justifyContent || props.type === 'card') return undefined return { display: 'flex', justifyContent: props.justifyContent } }) watch(mergedValueRef, () => { tabChangeIdRef.id = 0 updateCurrentBarStyle() }) function getCurrentEl (): HTMLElement | null { const { value } = mergedValueRef if (value === null) return null const tabEl = tabsElRef.value?.querySelector(`[data-name="${value}"]`) return tabEl as HTMLElement | null } function updateBarStyle (tabEl: HTMLElement): void { if (props.type === 'card') return const { value: barEl } = barElRef if (!barEl) return if (tabEl) { const disabledClassName = `${mergedClsPrefixRef.value}-tabs-bar--disabled` if (tabEl.dataset.disabled === 'true') { barEl.classList.add(disabledClassName) } else { barEl.classList.remove(disabledClassName) } barEl.style.left = `${tabEl.offsetLeft}px` barEl.style.width = '8192px' barEl.style.maxWidth = `${tabEl.offsetWidth + 1}px` } } function updateCurrentBarStyle (): void { if (props.type === 'card') return const tabEl = getCurrentEl() if (tabEl) { updateBarStyle(tabEl) } } function handleTabClick (panelName: string | number): void { doUpdateValue(panelName) } function doUpdateValue (panelName: string | number): void { const { onActiveNameChange, onUpdateValue, 'onUpdate:value': _onUpdateValue } = props if (onActiveNameChange) { call(onActiveNameChange as OnUpdateValueImpl, panelName) } if (onUpdateValue) call(onUpdateValue as OnUpdateValueImpl, panelName) if (_onUpdateValue) call(_onUpdateValue as OnUpdateValueImpl, panelName) uncontrolledValueRef.value = panelName } function handleClose (panelName: string | number): void { const { onClose } = props if (onClose) call(onClose as OnCloseImpl, panelName) } let firstTimeUpdatePosition = true const handleNavResize = throttle(function handleNavResize () { const { type } = props if ( (type === 'line' || type === 'bar') && (firstTimeUpdatePosition || props.justifyContent) ) { const { value: barEl } = barElRef if (!barEl) return if (!firstTimeUpdatePosition) firstTimeUpdatePosition = false const disableTransitionClassName = `${mergedClsPrefixRef.value}-tabs-bar--transition-disabled` barEl.classList.add(disableTransitionClassName) updateCurrentBarStyle() // here we don't need to force layout after update bar style // since deriveScrollShadow will force layout barEl.classList.remove(disableTransitionClassName) } if (type !== 'segment') { deriveScrollShadow(xScrollInstRef.value?.$el) } }, 64) const addTabFixedRef = ref(false) function _handleTabsResize (entry: ResizeObserverEntry): void { const { target, contentRect: { width } } = entry // eslint-disable-next-line @typescript-eslint/no-non-null-assertion const containerWidth = target.parentElement!.offsetWidth if (!addTabFixedRef.value) { if (containerWidth < width) { addTabFixedRef.value = true } } else { const { value: addTabInst } = addTabInstRef if (!addTabInst) return if (containerWidth - width > addTabInst.$el.offsetWidth) { addTabFixedRef.value = false } } deriveScrollShadow(xScrollInstRef.value?.$el) } const handleTabsResize = throttle(_handleTabsResize, 64) function handleAdd (): void { const { onAdd } = props if (onAdd) onAdd() void nextTick(() => { const currentEl = getCurrentEl() const { value: xScrollInst } = xScrollInstRef if (!currentEl || !xScrollInst) return xScrollInst.scrollTo({ left: currentEl.offsetLeft, top: 0, behavior: 'smooth' }) }) } function deriveScrollShadow (el: HTMLElement | null): void { if (!el) return const { scrollLeft, scrollWidth, offsetWidth } = el leftReachedRef.value = scrollLeft <= 0 rightReachedRef.value = scrollLeft + offsetWidth >= scrollWidth } const handleScroll = throttle((e: Event) => { deriveScrollShadow(e.target as HTMLElement) }, 64) provide(tabsInjectionKey, { tabStyleRef: toRef(props, 'tabStyle'), paneClassRef: toRef(props, 'paneClass'), paneStyleRef: toRef(props, 'paneStyle'), mergedClsPrefixRef, typeRef: toRef(props, 'type'), closableRef: toRef(props, 'closable'), valueRef: mergedValueRef, tabChangeIdRef, onBeforeLeaveRef: toRef(props, 'onBeforeLeave'), handleTabClick, handleClose, handleAdd }) onFontsReady(() => { updateCurrentBarStyle() }) // avoid useless rerender watchEffect(() => { const { value: el } = scrollWrapperElRef if (!el) return const { value: clsPrefix } = mergedClsPrefixRef const shadowBeforeClass = `${clsPrefix}-tabs-nav-scroll-wrapper--shadow-before` const shadowAfterClass = `${clsPrefix}-tabs-nav-scroll-wrapper--shadow-after` if (leftReachedRef.value) { el.classList.remove(shadowBeforeClass) } else { el.classList.add(shadowBeforeClass) } if (rightReachedRef.value) { el.classList.remove(shadowAfterClass) } else { el.classList.add(shadowAfterClass) } }) const exposedMethods: TabsInst = { syncBarPosition: () => { updateCurrentBarStyle() } } return { mergedClsPrefix: mergedClsPrefixRef, mergedValue: mergedValueRef, renderedNames: new Set<NonNullable<TabPaneProps['name']>>(), tabsElRef, barElRef, addTabInstRef, xScrollInstRef, scrollWrapperElRef, addTabFixed: addTabFixedRef, tabWrapperStyle: tabWrapperStyleRef, handleNavResize, mergedSize: compitableSizeRef, handleScroll, handleTabsResize, cssVars: computed(() => { const { value: size } = compitableSizeRef const { type } = props const typeSuffix = ( { card: 'Card', bar: 'Bar', line: 'Line', segment: 'Segment' } as const )[type] const sizeType = `${size}${typeSuffix}` as const const { self: { barColor, closeColor, closeColorHover, closeColorPressed, tabColor, tabBorderColor, paneTextColor, tabFontWeight, tabBorderRadius, tabFontWeightActive, colorSegment, fontWeightStrong, tabColorSegment, [createKey('panePadding', size)]: panePadding, [createKey('tabPadding', sizeType)]: tabPadding, [createKey('tabGap', sizeType)]: tabGap, [createKey('tabTextColor', type)]: tabTextColor, [createKey('tabTextColorActive', type)]: tabTextColorActive, [createKey('tabTextColorHover', type)]: tabTextColorHover, [createKey('tabTextColorDisabled', type)]: tabTextColorDisabled, [createKey('tabFontSize', size)]: tabFontSize }, common: { cubicBezierEaseInOut } } = themeRef.value return { '--bezier': cubicBezierEaseInOut, '--color-segment': colorSegment, '--bar-color': barColor, '--tab-font-size': tabFontSize, '--tab-text-color': tabTextColor, '--tab-text-color-active': tabTextColorActive, '--tab-text-color-disabled': tabTextColorDisabled, '--tab-text-color-hover': tabTextColorHover, '--pane-text-color': paneTextColor, '--tab-border-color': tabBorderColor, '--tab-border-radius': tabBorderRadius, '--close-color': closeColor, '--close-color-hover': closeColorHover, '--close-color-pressed': closeColorPressed, '--tab-color': tabColor, '--tab-font-weight': tabFontWeight, '--tab-font-weight-active': tabFontWeightActive, '--tab-padding': tabPadding, '--tab-gap': tabGap, '--pane-padding': panePadding, '--font-weight-strong': fontWeightStrong, '--tab-color-segment': tabColorSegment } }), ...exposedMethods } }, render () { const { mergedClsPrefix, type, addTabFixed, addable, mergedSize, $slots: { default: defaultSlot, prefix: prefixSlot, suffix: suffixSlot } } = this const tabPaneChildren = defaultSlot ? flatten(defaultSlot()).filter((v) => { return (v.type as any).__TAB_PANE__ === true }) : [] const tabChildren = defaultSlot ? flatten(defaultSlot()).filter((v) => { return (v.type as any).__TAB__ === true }) : [] const showPane = !tabChildren.length const prefix = prefixSlot ? prefixSlot() : null const suffix = suffixSlot ? suffixSlot() : null const isCard = type === 'card' const isSegment = type === 'segment' const mergedJustifyContent = !isCard && !isSegment && this.justifyContent return ( <div class={[ `${mergedClsPrefix}-tabs`, `${mergedClsPrefix}-tabs--${type}-type`, `${mergedClsPrefix}-tabs--${mergedSize}-size`, mergedJustifyContent && `${mergedClsPrefix}-tabs--flex` ]} style={this.cssVars as CSSProperties} > <div class={[ // the class should be applied here since it's possible // to make tabs nested in tabs, style may influence each // other. adding a class will make it easy to write the // style. `${mergedClsPrefix}-tabs-nav--${type}-type`, `${mergedClsPrefix}-tabs-nav` ]} > {prefix ? ( <div class={`${mergedClsPrefix}-tabs-nav__prefix`}>{prefix}</div> ) : null} {isSegment ? ( <div class={`${mergedClsPrefix}-tabs-rail`}> {showPane ? tabPaneChildren.map((tabPaneVNode: any, index: number) => { return ( <Tab {...tabPaneVNode.props} internalLeftPadded={index !== 0} > {tabPaneVNode.children ? { default: tabPaneVNode.children.tab } : undefined} </Tab> ) }) : tabChildren.map((tabVNode: any, index: number) => { if (index === 0) { return tabVNode } else { return createLeftPaddedTabVNode(tabVNode) } })} </div> ) : ( <VResizeObserver onResize={this.handleNavResize}> {{ default: () => ( <div class={`${mergedClsPrefix}-tabs-nav-scroll-wrapper`} ref="scrollWrapperElRef" > <VXScroll ref="xScrollInstRef" onScroll={this.handleScroll}> {{ default: () => { const rawWrappedTabs = ( <div style={this.tabWrapperStyle} class={`${mergedClsPrefix}-tabs-wrapper`} > {mergedJustifyContent ? null : ( <div class={`${mergedClsPrefix}-tabs-scroll-padding`} style={{ width: `${this.tabsPadding}px` }} /> )} {showPane ? tabPaneChildren.map( (tabPaneVNode: any, index: number) => { return justifyTabDynamicProps( <Tab {...tabPaneVNode.props} internalLeftPadded={ index !== 0 && !mergedJustifyContent } > {tabPaneVNode.children ? { default: tabPaneVNode.children.tab } : undefined} </Tab> ) } ) : tabChildren.map( (tabVNode: any, index: number) => { if ( index !== 0 && !mergedJustifyContent ) { return justifyTabDynamicProps( createLeftPaddedTabVNode(tabVNode) ) } else { return justifyTabDynamicProps(tabVNode) } } )} {!addTabFixed && addable && isCard ? createAddTag( addable, (showPane ? tabPaneChildren.length : tabChildren.length) !== 0 ) : null} {mergedJustifyContent ? null : ( <div class={`${mergedClsPrefix}-tabs-scroll-padding`} style={{ width: `${this.tabsPadding}px` }} /> )} </div> ) let wrappedTabs = rawWrappedTabs if (isCard && addable) { wrappedTabs = ( <VResizeObserver onResize={this.handleTabsResize}> {{ default: () => rawWrappedTabs }} </VResizeObserver> ) } return ( <div ref="tabsElRef" class={`${mergedClsPrefix}-tabs-nav-scroll-content`} > {wrappedTabs} {isCard ? ( <div class={`${mergedClsPrefix}-tabs-pad`} /> ) : null} {isCard ? null : ( <div ref="barElRef" class={`${mergedClsPrefix}-tabs-bar`} /> )} </div> ) } }} </VXScroll> </div> ) }} </VResizeObserver> )} {addTabFixed && addable && isCard ? createAddTag(addable, true) : null} {suffix ? ( <div class={`${mergedClsPrefix}-tabs-nav__suffix`}>{suffix}</div> ) : null} </div> {showPane && filterMapTabPanes( tabPaneChildren, this.mergedValue, this.renderedNames )} </div> ) } }) function filterMapTabPanes ( tabPaneVNodes: VNode[], value: string | number | null, renderedNames: Set<string | number> ): VNode[] { const children: VNode[] = [] tabPaneVNodes.forEach((vNode) => { const { name, displayDirective, 'display-directive': _displayDirective } = vNode.props as TabPaneProps const matchDisplayDirective = ( directive: TabPaneProps['displayDirective'] ): boolean => displayDirective === directive || _displayDirective === directive const show = value === name if (vNode.key !== undefined) { vNode.key = name } if ( show || matchDisplayDirective('show') || (matchDisplayDirective('show:lazy') && renderedNames.has(name)) ) { if (!renderedNames.has(name)) { renderedNames.add(name) } const useVShow = !matchDisplayDirective('if') children.push(useVShow ? withDirectives(vNode, [[vShow, show]]) : vNode) } }) return children } function createAddTag (addable: Addable, internalLeftPadded: boolean): VNode { return ( <Tab ref="addTabInstRef" key="__addable" name="__addable" internalAddable internalLeftPadded={internalLeftPadded} disabled={typeof addable === 'object' && addable.disabled} /> ) } function createLeftPaddedTabVNode (tabVNode: VNode): VNode { const modifiedVNode = cloneVNode(tabVNode) if (modifiedVNode.props) { modifiedVNode.props.internalLeftPadded = true } else { modifiedVNode.props = { internalLeftPadded: true } } return modifiedVNode } function justifyTabDynamicProps ( tabVNode: { dynamicProps?: string[] } & VNode ): VNode { if (Array.isArray(tabVNode.dynamicProps)) { if (!tabVNode.dynamicProps.includes('internalLeftPadded')) { tabVNode.dynamicProps.push('internalLeftPadded') } } else { tabVNode.dynamicProps = ['internalLeftPadded'] } return tabVNode }
the_stack
* @module iModels */ import { CompressedId64Set, Id64, Id64String, IModelStatus, Logger } from "@itwin/core-bentley"; import { AxisAlignedBox3d, Base64EncodedString, ElementAspectProps, ElementProps, EntityProps, IModel, IModelError, ModelProps, PrimitiveTypeCode, PropertyMetaData, RelatedElement, SubCategoryProps, } from "@itwin/core-common"; import { TransformerLoggerCategory } from "./TransformerLoggerCategory"; import { ElementAspect, ElementMultiAspect, Entity, IModelDb, Model, Relationship, RelationshipProps, SourceAndTarget, SubCategory } from "@itwin/core-backend"; import type { IModelTransformOptions } from "./IModelTransformer"; const loggerCategory: string = TransformerLoggerCategory.IModelImporter; /** Options provided to [[IModelImporter.optimizeGeometry]] specifying post-processing optimizations to be applied to the iModel's geometry. * @beta */ export interface OptimizeGeometryOptions { /** If true, identify any [GeometryPart]($backend)s that are referenced exactly once. For each such part, * replace the reference in the element's geometry stream with the part's own geometry stream, then delete the part. */ inlineUniqueGeometryParts?: boolean; } /** Options provided to the [[IModelImporter]] constructor. * @beta */ export interface IModelImportOptions { /** If `true` (the default), compute the projectExtents of the target iModel after elements are imported. * The computed projectExtents will either include or exclude *outliers* depending on the `excludeOutliers` flag that defaults to `false`. * @see [[IModelImporter.autoExtendProjectExtents]] * @see [IModelImporter Options]($docs/learning/transformer/index.md#IModelImporter) */ autoExtendProjectExtents?: boolean | { excludeOutliers: boolean }; /** @see [IModelTransformOptions]($transformer) */ preserveElementIdsForFiltering?: boolean; /** If `true`, simplify the element geometry for visualization purposes. For example, convert b-reps into meshes. * @default false */ simplifyElementGeometry?: boolean; } /** Base class for importing data into an iModel. * @see [iModel Transformation and Data Exchange]($docs/learning/transformer/index.md) * @see [IModelExporter]($transformer) * @see [IModelTransformer]($transformer) * @beta */ export class IModelImporter implements Required<IModelImportOptions> { /** The read/write target iModel. */ public readonly targetDb: IModelDb; /** resolved initialization options for the importer * @beta */ public readonly options: Required<IModelImportOptions>; /** If `true` (the default), compute the projectExtents of the target iModel after elements are imported. * The computed projectExtents will either include or exclude *outliers* depending on the `excludeOutliers` flag that defaults to `false`. * @see [[IModelImportOptions.autoExtendProjectExtents]] * @see [IModelImporter Options]($docs/learning/transformer/index.md#IModelImporter) * @deprecated Use [[IModelImporter.options.autoExtendProjectExtents]] instead */ public get autoExtendProjectExtents(): Required<IModelImportOptions>["autoExtendProjectExtents"] { return this.options.autoExtendProjectExtents; } public set autoExtendProjectExtents(val: Required<IModelImportOptions>["autoExtendProjectExtents"]) { this.options.autoExtendProjectExtents = val; } /** * @see [IModelTransformOptions.preserveElementIdsForFiltering]($transformer) * @deprecated Use [[IModelImporter.options.preserveElementIdsForFiltering]] instead */ public get preserveElementIdsForFiltering(): boolean { return this.options.preserveElementIdsForFiltering; } public set preserveElementIdsForFiltering(val: boolean) { this.options.preserveElementIdsForFiltering = val; } /** * @see [[IModelImportOptions.simplifyElementGeometry]] * @deprecated Use [[IModelImporter.options.simplifyElementGeometry]] instead */ public get simplifyElementGeometry(): boolean { return this.options.simplifyElementGeometry; } public set simplifyElementGeometry(val: boolean) { this.options.simplifyElementGeometry = val; } /** The set of elements that should not be updated by this IModelImporter. * @note Adding an element to this set is typically necessary when remapping a source element to one that already exists in the target and already has the desired properties. */ public readonly doNotUpdateElementIds = new Set<Id64String>(); /** The number of entity changes before incremental progress should be reported via the [[onProgress]] callback. */ public progressInterval: number = 1000; /** Tracks the current total number of entity changes. */ private _progressCounter: number = 0; /** */ private _modelPropertiesToIgnore = new Set<string>(); /** Construct a new IModelImporter * @param targetDb The target IModelDb * @param options The options that specify how the import should be done. */ public constructor(targetDb: IModelDb, options?: IModelImportOptions) { this.targetDb = targetDb; this.options = { autoExtendProjectExtents: options?.autoExtendProjectExtents ?? true, preserveElementIdsForFiltering: options?.preserveElementIdsForFiltering ?? false, simplifyElementGeometry: options?.simplifyElementGeometry ?? false, }; // Add in the elements that are always present (even in an "empty" iModel) and therefore do not need to be updated this.doNotUpdateElementIds.add(IModel.rootSubjectId); this.doNotUpdateElementIds.add(IModel.dictionaryId); this.doNotUpdateElementIds.add("0xe"); // RealityDataSources LinkPartition this._modelPropertiesToIgnore.add("geometryGuid"); // cannot compare GeometricModel.GeometryGuid values across iModels } /** Import the specified ModelProps (either as an insert or an update) into the target iModel. */ public importModel(modelProps: ModelProps): void { if ((undefined === modelProps.id) || !Id64.isValidId64(modelProps.id)) throw new IModelError(IModelStatus.InvalidId, "Model Id not provided, should be the same as the ModeledElementId"); if (this.doNotUpdateElementIds.has(modelProps.id)) { Logger.logInfo(loggerCategory, `Do not update target model ${modelProps.id}`); return; } try { const model: Model = this.targetDb.models.getModel(modelProps.id); // throws IModelError.NotFound if model does not exist if (hasEntityChanged(model, modelProps, this._modelPropertiesToIgnore)) { this.onUpdateModel(modelProps); } } catch (error) { // catch NotFound error and insertModel if ((error instanceof IModelError) && (error.errorNumber === IModelStatus.NotFound)) { this.onInsertModel(modelProps); return; } throw error; } } /** Create a new Model from the specified ModelProps and insert it into the target iModel. * @note A subclass may override this method to customize insert behavior but should call `super.onInsertModel`. */ protected onInsertModel(modelProps: ModelProps): Id64String { try { const modelId: Id64String = this.targetDb.models.insertModel(modelProps); Logger.logInfo(loggerCategory, `Inserted ${this.formatModelForLogger(modelProps)}`); this.trackProgress(); return modelId; } catch (error) { if (!this.targetDb.containsClass(modelProps.classFullName)) { // replace standard insert error with something more helpful const errorMessage = `Model class "${modelProps.classFullName}" not found in the target iModel. Was the latest version of the schema imported?`; throw new IModelError(IModelStatus.InvalidName, errorMessage); } throw error; // throw original error } } /** Update an existing Model in the target iModel from the specified ModelProps. * @note A subclass may override this method to customize update behavior but should call `super.onUpdateModel`. */ protected onUpdateModel(modelProps: ModelProps): void { this.targetDb.models.updateModel(modelProps); Logger.logInfo(loggerCategory, `Updated ${this.formatModelForLogger(modelProps)}`); this.trackProgress(); } /** Format a Model for the Logger. */ private formatModelForLogger(modelProps: ModelProps): string { return `${modelProps.classFullName} [${modelProps.id!}]`; } /** Import the specified ElementProps (either as an insert or an update) into the target iModel. */ public importElement(elementProps: ElementProps): Id64String { if (undefined !== elementProps.id && this.doNotUpdateElementIds.has(elementProps.id)) { Logger.logInfo(loggerCategory, `Do not update target element ${elementProps.id}`); return elementProps.id; } if (this.options.preserveElementIdsForFiltering) { if (elementProps.id === undefined) { throw new IModelError(IModelStatus.BadElement, `elementProps.id must be defined during a preserveIds operation`); } // Categories are the only element that onInserted will immediately insert a new element (their default subcategory) // since default subcategories always exist and always will be inserted after their categories, we treat them as an update // to prevent duplicate inserts. // Otherwise we always insert during a preserveElementIdsForFiltering operation if (isSubCategory(elementProps) && isDefaultSubCategory(elementProps)) { this.onUpdateElement(elementProps); } else { this.onInsertElement(elementProps); } } else { if (undefined !== elementProps.id) { this.onUpdateElement(elementProps); } else { this.onInsertElement(elementProps); // targetElementProps.id assigned by insertElement } } return elementProps.id!; } /** Create a new Element from the specified ElementProps and insert it into the target iModel. * @returns The Id of the newly inserted Element. * @note A subclass may override this method to customize insert behavior but should call `super.onInsertElement`. */ protected onInsertElement(elementProps: ElementProps): Id64String { try { const elementId = this.targetDb.nativeDb.insertElement( elementProps, { forceUseId: this.options.preserveElementIdsForFiltering }, ); // set the id like [IModelDb.insertElement]($backend), does, the raw nativeDb method does not elementProps.id = elementId; Logger.logInfo(loggerCategory, `Inserted ${this.formatElementForLogger(elementProps)}`); this.trackProgress(); if (this.options.simplifyElementGeometry) { this.targetDb.nativeDb.simplifyElementGeometry({ id: elementId, convertBReps: true }); Logger.logInfo(loggerCategory, `Simplified element geometry for ${this.formatElementForLogger(elementProps)}`); } return elementId; } catch (error) { if (!this.targetDb.containsClass(elementProps.classFullName)) { // replace standard insert error with something more helpful const errorMessage = `Element class "${elementProps.classFullName}" not found in the target iModel. Was the latest version of the schema imported?`; throw new IModelError(IModelStatus.InvalidName, errorMessage); } throw error; // throw original error } } /** Update an existing Element in the target iModel from the specified ElementProps. * @note A subclass may override this method to customize update behavior but should call `super.onUpdateElement`. */ protected onUpdateElement(elementProps: ElementProps): void { if (!elementProps.id) { throw new IModelError(IModelStatus.InvalidId, "ElementId not provided"); } this.targetDb.elements.updateElement(elementProps); Logger.logInfo(loggerCategory, `Updated ${this.formatElementForLogger(elementProps)}`); this.trackProgress(); if (this.options.simplifyElementGeometry) { this.targetDb.nativeDb.simplifyElementGeometry({ id: elementProps.id, convertBReps: true }); Logger.logInfo(loggerCategory, `Simplified element geometry for ${this.formatElementForLogger(elementProps)}`); } } /** Delete the specified Element from the target iModel. * @note A subclass may override this method to customize delete behavior but should call `super.onDeleteElement`. */ protected onDeleteElement(elementId: Id64String): void { this.targetDb.elements.deleteElement(elementId); Logger.logInfo(loggerCategory, `Deleted element ${elementId}`); this.trackProgress(); } /** Delete the specified Element from the target iModel. */ public deleteElement(elementId: Id64String): void { if (this.doNotUpdateElementIds.has(elementId)) { Logger.logInfo(loggerCategory, `Do not delete target element ${elementId}`); return; } this.onDeleteElement(elementId); } /** Format an Element for the Logger. */ private formatElementForLogger(elementProps: ElementProps): string { const namePiece: string = elementProps.code.value ? `${elementProps.code.value} ` : elementProps.userLabel ? `${elementProps.userLabel} ` : ""; return `${elementProps.classFullName} ${namePiece}[${elementProps.id}]`; } /** Import an ElementUniqueAspect into the target iModel. */ public importElementUniqueAspect(aspectProps: ElementAspectProps): void { const aspects: ElementAspect[] = this.targetDb.elements.getAspects(aspectProps.element.id, aspectProps.classFullName); if (aspects.length === 0) { this.onInsertElementAspect(aspectProps); } else if (hasEntityChanged(aspects[0], aspectProps)) { aspectProps.id = aspects[0].id; this.onUpdateElementAspect(aspectProps); } } /** Import the collection of ElementMultiAspects into the target iModel. * @param aspectPropsArray The ElementMultiAspects to import * @param filterFunc Optional filter func that is used to exclude target ElementMultiAspects that were added during iModel transformation from the update detection logic. * @note For insert vs. update reasons, it is important to process all ElementMultiAspects owned by an Element at once since we don't have aspect-specific provenance. */ public importElementMultiAspects(aspectPropsArray: ElementAspectProps[], filterFunc?: (a: ElementMultiAspect) => boolean): void { if (aspectPropsArray.length === 0) { return; } const elementId: Id64String = aspectPropsArray[0].element.id; // Determine the set of ElementMultiAspect classes to consider const aspectClassFullNames = new Set<string>(); aspectPropsArray.forEach((aspectsProps: ElementAspectProps): void => { aspectClassFullNames.add(aspectsProps.classFullName); }); // Handle ElementMultiAspects in groups by class aspectClassFullNames.forEach((aspectClassFullName: string) => { const proposedAspects = aspectPropsArray.filter((aspectProps) => aspectClassFullName === aspectProps.classFullName); let currentAspects: ElementMultiAspect[] = this.targetDb.elements.getAspects(elementId, aspectClassFullName); if (filterFunc) { currentAspects = currentAspects.filter((a) => filterFunc(a)); // any aspects added by IModelTransformer must not be considered for update } if (proposedAspects.length >= currentAspects.length) { let index = 0; proposedAspects.forEach((aspectProps: ElementAspectProps) => { if (index < currentAspects.length) { aspectProps.id = currentAspects[index].id; if (hasEntityChanged(currentAspects[index], aspectProps)) { this.onUpdateElementAspect(aspectProps); } } else { this.onInsertElementAspect(aspectProps); } index++; }); } else { let index = 0; currentAspects.forEach((aspect: ElementMultiAspect) => { if (index < proposedAspects.length) { proposedAspects[index].id = aspect.id; if (hasEntityChanged(aspect, proposedAspects[index])) { this.onUpdateElementAspect(proposedAspects[index]); } } else { this.onDeleteElementAspect(aspect); } index++; }); } }); } /** Insert the ElementAspect into the target iModel. * @note A subclass may override this method to customize insert behavior but should call `super.onInsertElementAspect`. */ protected onInsertElementAspect(aspectProps: ElementAspectProps): void { try { this.targetDb.elements.insertAspect(aspectProps); Logger.logInfo(loggerCategory, `Inserted ${this.formatElementAspectForLogger(aspectProps)}`); this.trackProgress(); } catch (error) { if (!this.targetDb.containsClass(aspectProps.classFullName)) { // replace standard insert error with something more helpful const errorMessage = `ElementAspect class "${aspectProps.classFullName}" not found in the target iModel. Was the latest version of the schema imported?`; throw new IModelError(IModelStatus.InvalidName, errorMessage); } throw error; // throw original error } } /** Update the ElementAspect within the target iModel. * @note A subclass may override this method to customize update behavior but should call `super.onUpdateElementAspect`. */ protected onUpdateElementAspect(aspectProps: ElementAspectProps): void { this.targetDb.elements.updateAspect(aspectProps); Logger.logInfo(loggerCategory, `Updated ${this.formatElementAspectForLogger(aspectProps)}`); this.trackProgress(); } /** Delete the specified ElementAspect from the target iModel. * @note A subclass may override this method to customize delete behavior but should call `super.onDeleteElementAspect`. */ protected onDeleteElementAspect(targetElementAspect: ElementAspect): void { this.targetDb.elements.deleteAspect(targetElementAspect.id); Logger.logInfo(loggerCategory, `Deleted ${this.formatElementAspectForLogger(targetElementAspect)}`); this.trackProgress(); } /** Format an ElementAspect for the Logger. */ private formatElementAspectForLogger(elementAspectProps: ElementAspectProps | ElementAspect): string { return `${elementAspectProps.classFullName} elementId=[${elementAspectProps.element.id}]`; } /** Import the specified RelationshipProps (either as an insert or an update) into the target iModel. * @returns The instance Id of the inserted or updated Relationship. */ public importRelationship(relationshipProps: RelationshipProps): Id64String { if ((undefined === relationshipProps.sourceId) || !Id64.isValidId64(relationshipProps.sourceId)) { Logger.logInfo(loggerCategory, `Ignoring ${relationshipProps.classFullName} instance because of invalid RelationshipProps.sourceId`); return Id64.invalid; } if ((undefined === relationshipProps.targetId) || !Id64.isValidId64(relationshipProps.targetId)) { Logger.logInfo(loggerCategory, `Ignoring ${relationshipProps.classFullName} instance because of invalid RelationshipProps.targetId`); return Id64.invalid; } // check for an existing relationship const relSourceAndTarget: SourceAndTarget = { sourceId: relationshipProps.sourceId, targetId: relationshipProps.targetId }; const relationship: Relationship | undefined = this.targetDb.relationships.tryGetInstance(relationshipProps.classFullName, relSourceAndTarget); if (undefined !== relationship) { // if relationship found, update it relationshipProps.id = relationship.id; if (hasEntityChanged(relationship, relationshipProps)) { this.onUpdateRelationship(relationshipProps); } return relationshipProps.id; } else { return this.onInsertRelationship(relationshipProps); } } /** Create a new Relationship from the specified RelationshipProps and insert it into the target iModel. * @returns The instance Id of the newly inserted relationship. * @note A subclass may override this method to customize insert behavior but should call `super.onInsertRelationship`. */ protected onInsertRelationship(relationshipProps: RelationshipProps): Id64String { try { const targetRelInstanceId: Id64String = this.targetDb.relationships.insertInstance(relationshipProps); Logger.logInfo(loggerCategory, `Inserted ${this.formatRelationshipForLogger(relationshipProps)}`); this.trackProgress(); return targetRelInstanceId; } catch (error) { if (!this.targetDb.containsClass(relationshipProps.classFullName)) { // replace standard insert error with something more helpful const errorMessage = `Relationship class "${relationshipProps.classFullName}" not found in the target iModel. Was the latest version of the schema imported?`; throw new IModelError(IModelStatus.InvalidName, errorMessage); } throw error; // throw original error } } /** Update an existing Relationship in the target iModel from the specified RelationshipProps. * @note A subclass may override this method to customize update behavior but should call `super.onUpdateRelationship`. */ protected onUpdateRelationship(relationshipProps: RelationshipProps): void { if (!relationshipProps.id) { throw new IModelError(IModelStatus.InvalidId, "Relationship instance Id not provided"); } this.targetDb.relationships.updateInstance(relationshipProps); Logger.logInfo(loggerCategory, `Updated ${this.formatRelationshipForLogger(relationshipProps)}`); this.trackProgress(); } /** Delete the specified Relationship from the target iModel. */ protected onDeleteRelationship(relationshipProps: RelationshipProps): void { this.targetDb.relationships.deleteInstance(relationshipProps); Logger.logInfo(loggerCategory, `Deleted relationship ${this.formatRelationshipForLogger(relationshipProps)}`); this.trackProgress(); } /** Delete the specified Relationship from the target iModel. */ public deleteRelationship(relationshipProps: RelationshipProps): void { this.onDeleteRelationship(relationshipProps); } /** Format a Relationship for the Logger. */ private formatRelationshipForLogger(relProps: RelationshipProps): string { return `${relProps.classFullName} sourceId=[${relProps.sourceId}] targetId=[${relProps.targetId}]`; } /** Tracks incremental progress */ private trackProgress(): void { this._progressCounter++; if (0 === (this._progressCounter % this.progressInterval)) { this.onProgress(); } } /** This method is called when IModelImporter has made incremental progress based on the [[progressInterval]] setting. * @note A subclass may override this method to report custom progress but should call `super.onProgress`. */ protected onProgress(): void { } /** Optionally compute the projectExtents for the target iModel depending on the options for this IModelImporter. * @note This method is automatically called from [IModelTransformer.processChanges]($transformer) and [IModelTransformer.processAll]($transformer). * @see [IModelDb.computeProjectExtents]($backend), [[autoExtendProjectExtents]] */ public computeProjectExtents(): void { const computedProjectExtents = this.targetDb.computeProjectExtents({ reportExtentsWithOutliers: true, reportOutliers: true }); Logger.logInfo(loggerCategory, `Current projectExtents=${JSON.stringify(this.targetDb.projectExtents)}`); Logger.logInfo(loggerCategory, `Computed projectExtents without outliers=${JSON.stringify(computedProjectExtents.extents)}`); Logger.logInfo(loggerCategory, `Computed projectExtents with outliers=${JSON.stringify(computedProjectExtents.extentsWithOutliers)}`); if (this.options.autoExtendProjectExtents) { const excludeOutliers: boolean = typeof this.options.autoExtendProjectExtents === "object" ? this.options.autoExtendProjectExtents.excludeOutliers : false; const newProjectExtents: AxisAlignedBox3d = excludeOutliers ? computedProjectExtents.extents : computedProjectExtents.extentsWithOutliers!; if (!newProjectExtents.isAlmostEqual(this.targetDb.projectExtents)) { this.targetDb.updateProjectExtents(newProjectExtents); Logger.logInfo(loggerCategory, `Updated projectExtents=${JSON.stringify(this.targetDb.projectExtents)}`); } if (!excludeOutliers && computedProjectExtents.outliers && computedProjectExtents.outliers.length > 0) { Logger.logInfo(loggerCategory, `${computedProjectExtents.outliers.length} outliers detected within projectExtents`); } } else { if (!this.targetDb.projectExtents.containsRange(computedProjectExtents.extents)) { Logger.logWarning(loggerCategory, "Current project extents may be too small"); } if (computedProjectExtents.outliers && computedProjectExtents.outliers.length > 0) { Logger.logInfo(loggerCategory, `${computedProjectExtents.outliers.length} outliers detected within projectExtents`); } } } /** Examine the geometry streams of every [GeometricElement3d]($backend) in the target iModel and apply the specified optimizations. * @note This method is automatically called from [[IModelTransformer.processChanges]] and [[IModelTransformer.processAll]] if * [[IModelTransformOptions.optimizeGeometry]] is defined. */ public optimizeGeometry(options: OptimizeGeometryOptions): void { if (options.inlineUniqueGeometryParts) { const result = this.targetDb.nativeDb.inlineGeometryPartReferences(); Logger.logInfo(loggerCategory, `Inlined ${result.numRefsInlined} references to ${result.numCandidateParts} geometry parts and deleted ${result.numPartsDeleted} parts.`); } } /** * You may override this to store arbitrary json state in a exporter state dump, useful for some resumptions * @see [[IModelTransformer.saveStateToFile]] */ protected getAdditionalStateJson(): any { return {}; } /** * You may override this to load arbitrary json state in a transformer state dump, useful for some resumptions * @see [[IModelTransformer.loadStateFromFile]] */ protected loadAdditionalStateJson(_additionalState: any): void {} /** * Reload our state from a JSON object * Intended for [[IModelTransformer.resumeTransformation]] * @internal * You can load custom json from the importer save state for custom importers by overriding [[IModelImporter.loadAdditionalStateJson]] */ public loadStateFromJson(state: IModelImporterState): void { if (state.importerClass !== this.constructor.name) throw Error("resuming from a differently named importer class, it is not necessarily valid to resume with a different importer class"); // ignore readonly since this runs right after construction in [[IModelTransformer.resumeTransformation]] (this.options as IModelTransformOptions) = state.options; if (this.targetDb.iModelId !== state.targetDbId) throw Error("can only load importer state when the same target is reused"); // TODO: fix upstream, looks like a bad case for the linter rule when casting away readonly for this generic // eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion (this.doNotUpdateElementIds as Set<Id64String>) = CompressedId64Set.decompressSet(state.doNotUpdateElementIds); this.loadAdditionalStateJson(state.additionalState); } /** * Serialize state to a JSON object * Intended for [[IModelTransformer.resumeTransformation]] * @internal * You can add custom json to the importer save state for custom importers by overriding [[IModelImporter.getAdditionalStateJson]] */ public saveStateToJson(): IModelImporterState { return { importerClass: this.constructor.name, options: this.options, targetDbId: this.targetDb.iModelId || this.targetDb.nativeDb.getFilePath(), doNotUpdateElementIds: CompressedId64Set.compressSet(this.doNotUpdateElementIds), additionalState: this.getAdditionalStateJson(), }; } } /** * The JSON format of a serialized IModelimporter instance * Used for starting an importer in the middle of an imxport operation, * such as resuming a crashed transformation * * @note Must be kept synchronized with IModelImxporter * @internal */ export interface IModelImporterState { importerClass: string; options: IModelImportOptions; targetDbId: string; doNotUpdateElementIds: CompressedId64Set; additionalState?: any; } /** Returns true if a change within an Entity is detected. * @param entity The current persistent Entity. * @param entityProps The new EntityProps to compare against * @note This method should only be called if changeset information is not available. */ function hasEntityChanged(entity: Entity, entityProps: EntityProps, namesToIgnore?: Set<string>): boolean { let changed: boolean = false; entity.forEachProperty((propertyName: string, propertyMeta: PropertyMetaData) => { if (!changed) { if (namesToIgnore && namesToIgnore.has(propertyName)) { // skip } else if (PrimitiveTypeCode.Binary === propertyMeta.primitiveType) { changed = hasBinaryValueChanged(entity.asAny[propertyName], (entityProps as any)[propertyName]); } else if (propertyMeta.isNavigation) { changed = hasNavigationValueChanged(entity.asAny[propertyName], (entityProps as any)[propertyName]); } else { changed = hasValueChanged(entity.asAny[propertyName], (entityProps as any)[propertyName]); } } }); return changed; } /** Returns true if the specified binary values are different. */ function hasBinaryValueChanged(binaryProperty1: any, binaryProperty2: any): boolean { const jsonString1 = JSON.stringify(binaryProperty1, Base64EncodedString.replacer); const jsonString2 = JSON.stringify(binaryProperty2, Base64EncodedString.replacer); return jsonString1 !== jsonString2; } /** Returns true if the specified navigation property values are different. */ function hasNavigationValueChanged(navigationProperty1: any, navigationProperty2: any): boolean { const relatedElement1 = RelatedElement.fromJSON(navigationProperty1); const relatedElement2 = RelatedElement.fromJSON(navigationProperty2); const jsonString1 = JSON.stringify(relatedElement1); const jsonString2 = JSON.stringify(relatedElement2); return jsonString1 !== jsonString2; } /** Returns true if the specified navigation property values are different. */ function hasValueChanged(property1: any, property2: any): boolean { return JSON.stringify(property1) !== JSON.stringify(property2); } /** check if element props are a subcategory */ function isSubCategory(props: ElementProps): props is SubCategoryProps { return props.classFullName === SubCategory.classFullName; } /** check if element props are a subcategory without loading the element */ function isDefaultSubCategory(props: SubCategoryProps): boolean { if (props.id === undefined) return false; if (!Id64.isId64(props.id)) throw new IModelError(IModelStatus.BadElement, `subcategory had invalid id`); if (props.parent?.id === undefined) throw new IModelError(IModelStatus.BadElement, `subcategory with id ${props.id} had no parent`); return props.id === IModelDb.getDefaultSubCategoryId(props.parent.id); }
the_stack
import { Box } from '../objects/box'; import { Brick } from '../objects/brick'; import { Collectible } from '../objects/collectible'; import { Goomba } from '../objects/goomba'; import { Mario } from '../objects/mario'; import { Platform } from '../objects/platform'; import { Portal } from '../objects/portal'; export class GameScene extends Phaser.Scene { // tilemap private map: Phaser.Tilemaps.Tilemap; private tileset: Phaser.Tilemaps.Tileset; private backgroundLayer: Phaser.Tilemaps.TilemapLayer; private foregroundLayer: Phaser.Tilemaps.TilemapLayer; // game objects private boxes: Phaser.GameObjects.Group; private bricks: Phaser.GameObjects.Group; private collectibles: Phaser.GameObjects.Group; private enemies: Phaser.GameObjects.Group; private platforms: Phaser.GameObjects.Group; private player: Mario; private portals: Phaser.GameObjects.Group; constructor() { super({ key: 'GameScene' }); } init(): void {} create(): void { // ***************************************************************** // SETUP TILEMAP // ***************************************************************** // create our tilemap from Tiled JSON this.map = this.make.tilemap({ key: this.registry.get('level') }); // add our tileset and layers to our tilemap this.tileset = this.map.addTilesetImage('tiles'); this.backgroundLayer = this.map.createLayer( 'backgroundLayer', this.tileset, 0, 0 ); this.foregroundLayer = this.map.createLayer( 'foregroundLayer', this.tileset, 0, 0 ); this.foregroundLayer.setName('foregroundLayer'); // set collision for tiles with the property collide set to true this.foregroundLayer.setCollisionByProperty({ collide: true }); // ***************************************************************** // GAME OBJECTS // ***************************************************************** this.portals = this.add.group({ /*classType: Portal,*/ runChildUpdate: true }); this.boxes = this.add.group({ /*classType: Box,*/ runChildUpdate: true }); this.bricks = this.add.group({ /*classType: Brick,*/ runChildUpdate: true }); this.collectibles = this.add.group({ /*classType: Collectible,*/ runChildUpdate: true }); this.enemies = this.add.group({ runChildUpdate: true }); this.platforms = this.add.group({ /*classType: Platform,*/ runChildUpdate: true }); this.loadObjectsFromTilemap(); // ***************************************************************** // COLLIDERS // ***************************************************************** this.physics.add.collider(this.player, this.foregroundLayer); this.physics.add.collider(this.enemies, this.foregroundLayer); this.physics.add.collider(this.enemies, this.boxes); this.physics.add.collider(this.enemies, this.bricks); this.physics.add.collider(this.player, this.bricks); this.physics.add.collider( this.player, this.boxes, this.playerHitBox, null, this ); this.physics.add.overlap( this.player, this.enemies, this.handlePlayerEnemyOverlap, null, this ); this.physics.add.overlap( this.player, this.portals, this.handlePlayerPortalOverlap, null, this ); this.physics.add.collider( this.player, this.platforms, this.handlePlayerOnPlatform, null, this ); this.physics.add.overlap( this.player, this.collectibles, this.handlePlayerCollectiblesOverlap, null, this ); // ***************************************************************** // CAMERA // ***************************************************************** this.cameras.main.startFollow(this.player); this.cameras.main.setBounds( 0, 0, this.map.widthInPixels, this.map.heightInPixels ); } update(): void { this.player.update(); } private loadObjectsFromTilemap(): void { // get the object layer in the tilemap named 'objects' const objects = this.map.getObjectLayer('objects').objects as any[]; objects.forEach((object) => { if (object.type === 'portal') { this.portals.add( new Portal({ scene: this, x: object.x, y: object.y, height: object.width, width: object.height, spawn: { x: object.properties.marioSpawnX, y: object.properties.marioSpawnY, dir: object.properties.direction } }).setName(object.name) ); } if (object.type === 'player') { this.player = new Mario({ scene: this, x: this.registry.get('spawn').x, y: this.registry.get('spawn').y, texture: 'mario' }); } if (object.type === 'goomba') { this.enemies.add( new Goomba({ scene: this, x: object.x, y: object.y, texture: 'goomba' }) ); } if (object.type === 'brick') { this.bricks.add( new Brick({ scene: this, x: object.x, y: object.y, texture: 'brick', value: 50 }) ); } if (object.type === 'box') { this.boxes.add( new Box({ scene: this, content: object.properties.content, x: object.x, y: object.y, texture: 'box' }) ); } if (object.type === 'collectible') { this.collectibles.add( new Collectible({ scene: this, x: object.x, y: object.y, texture: object.properties.kindOfCollectible, points: 100 }) ); } if (object.type === 'platformMovingUpAndDown') { this.platforms.add( new Platform({ scene: this, x: object.x, y: object.y, texture: 'platform', tweenProps: { y: { value: 50, duration: 1500, ease: 'Power0' } } }) ); } if (object.type === 'platformMovingLeftAndRight') { this.platforms.add( new Platform({ scene: this, x: object.x, y: object.y, texture: 'platform', tweenProps: { x: { value: object.x + 50, duration: 1200, ease: 'Power0' } } }) ); } }); } /** * Player <-> Enemy Overlap * @param _player [Mario] * @param _enemy [Enemy] */ private handlePlayerEnemyOverlap(_player: Mario, _enemy: Goomba): void { if (_player.body.touching.down && _enemy.body.touching.up) { // player hit enemy on top _player.bounceUpAfterHitEnemyOnHead(); _enemy.gotHitOnHead(); this.add.tween({ targets: _enemy, props: { alpha: 0 }, duration: 1000, ease: 'Power0', yoyo: false, onComplete: function () { _enemy.isDead(); } }); } else { // player got hit from the side or on the head if (_player.getVulnerable()) { _player.gotHit(); } } } /** * Player <-> Box Collision * @param _player [Mario] * @param _box [Box] */ private playerHitBox(_player: Mario, _box: Box): void { if (_box.body.touching.down && _box.active) { // ok, mario has really hit a box on the downside _box.yoyoTheBoxUpAndDown(); this.collectibles.add(_box.spawnBoxContent()); switch (_box.getBoxContentString()) { // have a look what is inside the box! Christmas time! case 'coin': { _box.tweenBoxContent({ y: _box.y - 40, alpha: 0 }, 700, function () { _box.getContent().destroy(); }); _box.addCoinAndScore(1, 100); break; } case 'rotatingCoin': { _box.tweenBoxContent({ y: _box.y - 40, alpha: 0 }, 700, function () { _box.getContent().destroy(); }); _box.addCoinAndScore(1, 100); break; } case 'flower': { _box.tweenBoxContent({ y: _box.y - 8 }, 200, function () { _box.getContent().anims.play('flower'); }); break; } case 'mushroom': { _box.popUpCollectible(); break; } case 'star': { _box.popUpCollectible(); break; } default: { break; } } _box.startHitTimeline(); } } private handlePlayerPortalOverlap(_player: Mario, _portal: Portal): void { if ( (_player.getKeys().get('DOWN').isDown && _portal.getPortalDestination().dir === 'down') || (_player.getKeys().get('RIGHT').isDown && _portal.getPortalDestination().dir === 'right') ) { // set new level and new destination for mario this.registry.set('level', _portal.name); this.registry.set('spawn', { x: _portal.getPortalDestination().x, y: _portal.getPortalDestination().y, dir: _portal.getPortalDestination().dir }); // restart the game scene this.scene.restart(); } else if (_portal.name === 'exit') { this.scene.stop('GameScene'); this.scene.stop('HUDScene'); this.scene.start('MenuScene'); } } private handlePlayerCollectiblesOverlap( _player: Mario, _collectible: Collectible ): void { switch (_collectible.texture.key) { case 'flower': { break; } case 'mushroom': { _player.growMario(); break; } case 'star': { break; } default: { break; } } _collectible.collected(); } // TODO!!! private handlePlayerOnPlatform(player: Mario, platform: Platform): void { if ( platform.body.moves && platform.body.touching.up && player.body.touching.down ) { } } }
the_stack
import { AbstractControlOptions, AsyncValidatorFn, FormBuilder, ValidatorFn } from '@angular/forms'; import { ClassConstructor } from 'class-transformer'; import 'reflect-metadata'; import { DEFAULT_CLASS_TRANSFORM_OPTIONS, DEFAULT_CLASS_TRANSFORM_TO_PLAIN_OPTIONS, DEFAULT_CLASS_VALIDATOR_OPTIONS, } from '../constants/default'; import { DynamicFormBuilderOptions } from '../types/dynamic-form-builder-options'; import { DynamicFormGroupConfig, isAbstractControlOptions, isDynamicFormGroupConfig, isLegacyOrOpts, } from '../types/dynamic-form-group-config'; import { FormModel } from '../types/form-model'; import { DynamicFormGroup, getClassValidators } from './dynamic-form-group'; const cloneDeep = require('lodash.clonedeep'); export class DynamicFormBuilder extends FormBuilder { // need for createEmptyObject protected emptyDynamicFormGroup = this.factoryDynamicFormGroup(Object); constructor(protected options?: DynamicFormBuilderOptions) { super(); } // ****************** // Public API group<TModel>( factoryModel: ClassConstructor<TModel>, controlsConfig?: FormModel<TModel> | DynamicFormGroupConfig | { [key: string]: any }, options?: AbstractControlOptions | DynamicFormGroupConfig ): DynamicFormGroup<TModel> { // console.time(factoryModel.toString()); if (!controlsConfig && !options) { options = {}; } // Process the group with the controlsConfig passed into extra instead. (What does this accomplish?) if ( controlsConfig && (isAbstractControlOptions(controlsConfig) || isLegacyOrOpts(controlsConfig) || isDynamicFormGroupConfig(controlsConfig)) && Object.keys(options || {}).length === 0 ) { return this.group(factoryModel, undefined, controlsConfig); } let extra: DynamicFormGroupConfig = cloneDeep(options) as DynamicFormGroupConfig; let validators: ValidatorFn[] | null = null; let asyncValidators: AsyncValidatorFn[] | null = null; let updateOn: any; if (extra != null) { if (isAbstractControlOptions(extra)) { // `extra` are `AbstractControlOptions` validators = extra.validators != null ? extra.validators : null; asyncValidators = extra.asyncValidators != null ? extra.asyncValidators : null; updateOn = extra.updateOn != null ? extra.updateOn : undefined; } if (isLegacyOrOpts(extra)) { // `extra` are legacy form group options validators = validators || []; if (extra.validator != null) { validators.push(extra.validator); } asyncValidators = asyncValidators || []; if (extra.asyncValidator != null) { asyncValidators.push(extra.asyncValidator); } } } else { extra = {}; } if (this.options?.classValidatorOptions && !extra.classValidatorOptions) { extra.classValidatorOptions = this.options?.classValidatorOptions; } if (this.options?.classTransformOptions && !extra.classTransformOptions) { extra.classTransformOptions = this.options?.classTransformOptions; } if (this.options?.classTransformToPlainOptions && !extra.classTransformToPlainOptions) { extra.classTransformToPlainOptions = this.options?.classTransformToPlainOptions; } if (this.options?.validateAllFormFields !== undefined && extra.validateAllFormFields === undefined) { extra.validateAllFormFields = this.options?.validateAllFormFields; } // Set default classValidatorOptions if (!extra.classValidatorOptions) { extra.classValidatorOptions = DEFAULT_CLASS_VALIDATOR_OPTIONS; } if (!extra.classTransformOptions) { extra.classTransformOptions = DEFAULT_CLASS_TRANSFORM_OPTIONS; } if (!extra.classTransformToPlainOptions) { extra.classTransformToPlainOptions = DEFAULT_CLASS_TRANSFORM_TO_PLAIN_OPTIONS; } if (extra.validateAllFormFields === undefined) { extra.validateAllFormFields = false; } let newControlsConfig: FormModel<TModel> | undefined; if (controlsConfig !== undefined) { newControlsConfig = controlsConfig as FormModel<TModel>; } // experimental if (controlsConfig === undefined) { newControlsConfig = { ...this.createEmptyObject(factoryModel, { __experimental__: true }) }; if (newControlsConfig !== undefined) { Object.keys(newControlsConfig).forEach((key) => { if (canCreateGroup() && newControlsConfig) { // recursively create a dynamic group for the nested object newControlsConfig[key] = this.group(newControlsConfig[key].constructor, undefined, { classValidatorOptions: extra.classValidatorOptions, classTransformOptions: extra.classTransformOptions, classTransformToPlainOptions: extra.classTransformToPlainOptions, validateAllFormFields: extra.validateAllFormFields, asyncValidators, updateOn, validators, }); } else { if (canCreateArray() && newControlsConfig) { if (newControlsConfig[key][0].constructor) { // recursively create an array with a group newControlsConfig[key] = super.array( newControlsConfig[key].map((newControlsConfigItem) => this.group(newControlsConfigItem.constructor, undefined, { classValidatorOptions: extra.classValidatorOptions, classTransformOptions: extra.classTransformOptions, classTransformToPlainOptions: extra.classTransformToPlainOptions, validateAllFormFields: extra.validateAllFormFields, asyncValidators, updateOn, validators, }) ) ); } else { // Create an array of form controls newControlsConfig[key] = super.array( newControlsConfig[key].map((newControlsConfigItem) => this.control(newControlsConfigItem)) ); } } } function canCreateGroup() { const candidate = newControlsConfig && newControlsConfig[key]; return ( candidate && !Array.isArray(candidate) && candidate.constructor && typeof candidate === 'object' && (candidate.length === undefined || (candidate.length !== undefined && Object.keys(candidate).length === candidate.length)) ); } function canCreateArray() { if (Array.isArray(newControlsConfig && newControlsConfig[key]) === false) { return false; } const candidate = newControlsConfig && newControlsConfig[key][0]; return ( candidate.constructor && typeof candidate === 'object' && (candidate.length === undefined || (candidate.length !== undefined && Object.keys(candidate).length === candidate.length)) ); } }); } } // Remove empty validators = validators && validators.filter((validator) => validator); asyncValidators = asyncValidators && asyncValidators.filter((validator) => validator); // Create an Angular group from the top-level object let classValidators: any = getClassValidators<TModel>( factoryModel, newControlsConfig, extra && extra.classValidatorOptions ); let formGroup: any = super.group(classValidators, { ...(asyncValidators || {}), ...(updateOn || {}), ...(validators || {}), }); // Initialize the resulting group const dynamicFormGroup = this.factoryDynamicFormGroup<TModel>( factoryModel, newControlsConfig, { asyncValidators, updateOn, validators, }, undefined, extra ); // Add all angular controls to the resulting dynamic group Object.keys(formGroup.controls).forEach((key) => { dynamicFormGroup.addControl(key, formGroup.controls[key]); }); // Add a listener to the dynamic group for value changes; on change, execute validation dynamicFormGroup.subscribeToValueChanges(undefined, extra && extra.classValidatorOptions); classValidators = null; formGroup = null; if (extra.validateAllFormFields) { dynamicFormGroup.validate(); } // console.timeEnd(factoryModel.toString()); return dynamicFormGroup; } protected factoryFormBuilder() { return new FormBuilder(); } public factoryDynamicFormGroup<TModel>( factoryModel: ClassConstructor<TModel>, fields?: FormModel<TModel>, validatorOrOpts?: ValidatorFn | ValidatorFn[] | AbstractControlOptions | null, asyncValidator?: AsyncValidatorFn | AsyncValidatorFn[] | null, options?: DynamicFormGroupConfig ) { const formGroup = this.options?.factoryDynamicFormGroup ? this.options.factoryDynamicFormGroup(factoryModel, fields, validatorOrOpts, asyncValidator, options) : new DynamicFormGroup<TModel>(factoryModel, fields, validatorOrOpts, asyncValidator, options); formGroup.dynamicFormBuilder = this; formGroup.originalFormBuilder = this.options?.factoryFormBuilder ? this.options?.factoryFormBuilder() : this.factoryFormBuilder(); return formGroup; } // ******************* // Helpers /** * Recursively creates an empty object from the data provided */ protected createEmptyObject<TModel>(factoryModel: ClassConstructor<TModel>, data = {}) { let modifed = false; let object: any = factoryModel ? this.emptyDynamicFormGroup.plainToClass(factoryModel, data) : data; let fields: any = Object.keys(object); let objectFieldNameLength: number | undefined; let objectFieldName0: any; fields.forEach((fieldName: any) => { const object1 = object[fieldName]; objectFieldNameLength = object1 && Array.isArray(object1) ? object1.length : undefined; if (objectFieldNameLength !== undefined) { objectFieldName0 = object1[0]; if (objectFieldNameLength === 1 && Object.keys(objectFieldName0).length > 0 && objectFieldName0.constructor) { object[fieldName] = [this.createEmptyObject(objectFieldName0.constructor)]; } if (objectFieldNameLength === 0) { data[fieldName] = [{}]; modifed = true; } } else { data[fieldName] = undefined; } }); if (modifed) { object = null; fields = null; return this.createEmptyObject(factoryModel, data); } fields = null; return object; } }
the_stack
import * as numbers from "./magic_numbers"; import * as _ from "lodash"; /** Default font name */ export const FONT_NAME = "gs-bmp-font"; /** Italic font name */ export const ITALIC_FONT_NAME = "gs-italic-bmp-font"; /** Element keys */ export enum elements { VENUS = "venus", MERCURY = "mercury", MARS = "mars", JUPITER = "jupiter", NO_ELEMENT = "no_element", ALL_ELEMENTS = "all_elements", } /** Default elements order */ export const ordered_elements = [elements.VENUS, elements.MERCURY, elements.MARS, elements.JUPITER]; /** Element names */ export const element_names = { [elements.VENUS]: "Earth", [elements.MERCURY]: "Water", [elements.MARS]: "Fire", [elements.JUPITER]: "Wind", [elements.ALL_ELEMENTS]: "All elements", }; /** Element colors */ export const element_colors = { [elements.VENUS]: 0xfef116, [elements.MERCURY]: 0x0ad2ef, [elements.MARS]: 0xf87000, [elements.JUPITER]: 0xe070b0, }; /** Element colors in battle */ export const element_colors_in_battle = { [elements.VENUS]: 0xf8f848, [elements.MERCURY]: 0x80f8f8, [elements.MARS]: 0xf87038, [elements.JUPITER]: 0xf7adf7, [elements.NO_ELEMENT]: 0xc6c6c6, }; /** 8-Directional direction values */ export enum directions { right = 0, down_right = 1, down = 2, down_left = 3, left = 4, up_left = 5, up = 6, up_right = 7, } /** 8-Directional direction keys */ export const reverse_directions = { [directions.right]: "right", [directions.up_right]: "up_right", [directions.up]: "up", [directions.up_left]: "up_left", [directions.left]: "left", [directions.down_left]: "down_left", [directions.down]: "down", [directions.down_right]: "down_right", }; /** The directions angles */ export const directions_angles = { [directions.right]: 0, [directions.up_right]: numbers.degree360 - numbers.degree45, [directions.up]: numbers.degree270, [directions.up_left]: numbers.degree270 - numbers.degree45, [directions.left]: Math.PI, [directions.down_left]: Math.PI - numbers.degree45, [directions.down]: numbers.degree90, [directions.down_right]: numbers.degree45, }; /** Base actions of a controllable char */ export enum base_actions { IDLE = "idle", WALK = "walk", DASH = "dash", PUSH = "push", CLIMB = "climb", CAST = "cast", JUMP = "jump", BATTLE = "battle", GRANT = "grant", ROPE = "rope", } /** * Generate a mask for the given direction. * Examples: * - The given direction is left, which is 4, then this function will return: 00010000 * - The given direction is right, which is 0, then this function will return: 00000001 * @param direction The direction to generate the mask. * @returns Returns the mask. */ export function get_direction_mask(direction: directions) { if (direction === null) return 0; return direction === 0 ? 1 : 2 << (direction - 1); } /** * Returns the direction values for diagonal directions * Example: Input: 7 (up_right) / Output: [6,0] * @param direction Diagonal direction value * @returns Array with split direction values */ export function split_direction(direction: directions) { if (direction % 2 === 0) return [direction]; const vals: directions[] = new Array(2); vals[0] = direction === directions.right ? directions.up_right : direction - 1; vals[1] = direction === directions.up_right ? directions.right : direction + 1; return vals; } /** * Returns the diagonal value for its component directions * Example: Input: 6, 0 (up, right) / Output: 7 (up_right) * * @param {number} dir_1 - Direction values * @param {number} dir_2 - Direction values * @return {number} Diagonal direction value */ export function join_directions(dir_1: directions, dir_2: directions): directions { dir_2 = dir_1 === directions.up && dir_2 === directions.right ? 8 : dir_2; return Math.min(dir_1, dir_2) + 1; } /** * Given a direction, returns this direction and its adjacent directions. * @param direction the direction value. * @returns Returns the range of the given direction. */ export function direction_range(direction: directions): directions[] { return [(direction + 7) % 8, direction, (direction + 1) % 8]; } /** * Returns a random number from 0 to 4. * @returns Random number from 0 to 4 */ export function variation() { return _.random(0, 4); } /** * Places the angle (radians) in the [0,2*PI[ range. * @param angle any angle in radians. * @returns Angle in the [0,2*PI[ range. */ export function range_360(angle: number) { angle = angle % numbers.degree360; angle = angle < 0 ? angle + numbers.degree360 : angle; return angle; } /** * Returns the opposite of the given direction. * @param direction Direction value * @returns Opposite direction value */ export function get_opposite_direction(direction: directions): directions { return (direction + 4) % 8; } /** * Transforms a direction into a non diagonal direction. * @param direction the input direction. * @returns returns the non diagonal direction. */ export function get_non_diagonal_direction(direction: directions) { return (direction % 2 ? direction + 1 : direction) % 8; } /** * Gets the directions between the current direction and a desired/target direction. * Example of transition between left and right: left -> up left -> up -> up right -> right. * @param current_direction Current direction value * @param desired_direction Desired/Target direction value * @returns The direction value to apply */ export function get_transition_directions(current_direction: directions, desired_direction: directions): directions { const diff = desired_direction - current_direction; if (diff === 0) return current_direction; const sign = diff === 4 ? -1 : Math.sign(diff); return (current_direction + (Math.abs(diff) >= 4 ? -sign : sign) + 8) % 8; } /** * Gets the direction of a given vector. * @param x1 x source pos. * @param x2 x dest pos. * @param y1 y source pos. * @param y2 y dest pos. * @returns The direction of the given vector. */ export function get_vector_direction(x1: number, x2: number, y1: number, y2: number): directions { const angle = range_360(Math.atan2(y2 - y1, x2 - x1)); return (((8 * (angle + numbers.degree45_half)) / numbers.degree360) | 0) % 8; } /** * Given a direction and a current position, returns the next front position. * @param x_pos x tile position. * @param y_pos y tile position. * @param direction the direction that's going towards. * @param strict_cardinal if true, returns null on diagonal input. Otherwise, returns input as is. * @returns the front position. */ export function get_front_position( x_pos: number, y_pos: number, direction: directions, strict_cardinal: boolean = true ) { switch (direction) { case directions.up: --y_pos; break; case directions.down: ++y_pos; break; case directions.right: ++x_pos; break; case directions.left: --x_pos; break; default: if (strict_cardinal) { return null; } } return { x: x_pos, y: y_pos, }; } /** * Obtains the text width in pixels (INEFFICIENT). * @param game the Phaser.Game instance * @param text the text string to measure width * @param italic whether the text is in italic * @returns */ export function get_text_width(game: Phaser.Game, text: string, italic = false) { //get text width in px (dirty way) const font_name = italic ? ITALIC_FONT_NAME : FONT_NAME; const text_sprite = game.add.bitmapText(0, 0, font_name, text, numbers.FONT_SIZE); const text_width = text_sprite.width; text_sprite.destroy(); return text_width; } /** * Either kills or destroys each sprite in the group * @param group The parent group * @param destroy If true, child is destroyed instead */ export function kill_all_sprites(group, destroy = false) { group.children.forEach(child => { if (destroy) child.parent.remove(child, true); else child.kill(); }); } /** * Gets the center position of a tile in pixels. * @param tile_pos the tile position. * @param tile_size the tile size. * @returns centered tile position in pixels. */ export function get_centered_pos_in_px(tile_pos: number, tile_size: number) { return tile_pos * tile_size + (tile_size >> 1); } /** * Gets the tile position of a given position in px. * @param pos the position in px. * @param tile_size the tile size. * @returns returns the tile position. */ export function get_tile_position(pos: number, tile_size: number) { return (pos / tile_size) | 0; } /** * Gets the px position of a given position in tile. * @param pos the position in tile. * @param tile_size the tile size. * @returns returns the px position. */ export function get_px_position(tile_pos: number, tile_size: number) { return tile_pos * tile_size; } /** * Advances a step in a given direction. * @param current_x the current x pos in px. * @param current_y the current x pos in px. * @param distance the step distance. * @param angle_direction the angle of the new direction. * @returns Returns the new x and y position. */ export function next_px_step(current_x: number, current_y: number, distance: number, angle_direction: number) { return { x: (current_x + distance * Math.cos(angle_direction)) | 0, y: (current_y + distance * Math.sin(angle_direction)) | 0, }; } /** * Calculates the distance between two points. * @param x1 source x position. * @param x2 dest x position. * @param y1 source y position. * @param y2 dest y position. * @returns return the distance value. */ export function get_distance(x1: number, x2: number, y1: number, y2: number) { return Math.sqrt((x2 - x1) * (x2 - x1) + (y2 - y1) * (y2 - y1)); } /** * Calculates the squared distance between two points. * @param x1 source x position. * @param x2 dest x position. * @param y1 source y position. * @param y2 dest y position. * @returns return the squared distance value. */ export function get_sqr_distance(x1: number, x2: number, y1: number, y2: number) { return (x2 - x1) * (x2 - x1) + (y2 - y1) * (y2 - y1); } /** * Returns the surrounding positions. Diagonals are optional. * @param x the x reference position. * @param y the y reference position. * @param with_diagonals if true, includes diagonals. * @param shift how distant if the surrounding positions from the given position. * @returns Returns the surrounding positions. */ export function get_surroundings(x: number, y: number, with_diagonals = false, shift = 1) { let surroundings = [ {x: x - shift, y: y, diag: false, direction: directions.left}, {x: x + shift, y: y, diag: false, direction: directions.right}, {x: x, y: y - shift, diag: false, direction: directions.up}, {x: x, y: y + shift, diag: false, direction: directions.down}, ]; if (with_diagonals) { surroundings = surroundings.concat([ {x: x - shift, y: y - shift, diag: true, direction: directions.up_left}, {x: x + shift, y: y - shift, diag: true, direction: directions.up_right}, {x: x - shift, y: y + shift, diag: true, direction: directions.down_left}, {x: x + shift, y: y + shift, diag: true, direction: directions.down_right}, ]); } return surroundings; } /** * Lists all directions, diagonals optional. * @param with_diagonals If true, includes diagonals. * @returns Returns the directions. */ export function get_directions(with_diagonals = false) { const dirs = [directions.up, directions.down, directions.left, directions.right]; if (with_diagonals) { dirs.push(...[directions.up_left, directions.up_right, directions.down_left, directions.down_right]); } return dirs; } /** * Converts a color from hex to rgb. * @param hex the color in hex format. * @returns the color in rgb format. */ export function hex2rgb(hex: string | number) { if (typeof hex === "string") { hex = hex.replace(/^\s*#|\s*$/g, ""); } else { hex = hex.toString(16); } if (hex.length == 3) { hex = hex.replace(/(.)/g, "$1$1"); } else { hex = ("000000" + hex).slice(-6); } const r = parseInt(hex.substr(0, 2), 16); const g = parseInt(hex.substr(2, 2), 16); const b = parseInt(hex.substr(4, 2), 16); return {r: r, g: g, b: b}; } /** * Changes the brightness of a given color code. * @param hex the input color in hex. * @param percent how much the brightness should change. * @returns returns the new color in hex. */ export function change_brightness(hex: string | number, percent: number) { let {r, g, b} = hex2rgb(hex); let h, s, v; [h, s, v] = rgb2hsv(r, g, b); v = (v * percent) | 0; [r, g, b] = hsv2rgb(h, s, v); hex = ((1 << 24) + (r << 16) + (g << 8) + b).toString(16).slice(1); return parseInt(hex, 16); } /** * Transforms RGB color into HSV color. * @param r red channel color value [0, 255] * @param g green channel color value [0, 255] * @param b blue channel color value [0, 255] * @returns returns the color in HSV */ export function rgb2hsv(r: number, g: number, b: number) { const v = Math.max(r, g, b), n = v - Math.min(r, g, b); const h = n && (v === r ? (g - b) / n : v === g ? 2 + (b - r) / n : 4 + (r - g) / n); return [60 * (h < 0 ? h + 6 : h), v && n / v, v]; } /** * Transforms HSV color into RGB color * @param h hue channel color value * @param s saturation channel color value * @param v value channel color value * @returns returns the color in RGB */ export function hsv2rgb(h: number, s: number, v: number) { let f = (n, k = (n + h / 60) % 6) => v - v * s * Math.max(Math.min(k, 4 - k, 1), 0); return [f(5), f(3), f(1)]; } /** * Creates a collision polygon. * @param width Width of the body * @param shift Shift value * @param bevel Body's bevel value * @returns Returns a list of coodinates of the polygon. */ export function mount_collision_polygon(width: number, shift: number, bevel: number = 0) { return [ [bevel + shift, shift], ...(bevel === 0 ? [] : [[width - bevel + shift, shift]]), [width + shift, bevel + shift], ...(bevel === 0 ? [] : [[width + shift, width - bevel + shift]]), [width - bevel + shift, width + shift], ...(bevel === 0 ? [] : [[bevel + shift, width + shift]]), [shift, width - bevel + shift], ...(bevel === 0 ? [] : [[shift, bevel + shift]]), ]; } /** * Standard Normal variate using Box-Muller transform. Mean = 0, variance = 1. * @returns returns a normal pseudo-random number. */ export function random_normal() { let u = 0, v = 0; while (u === 0) u = Math.random(); while (v === 0) v = Math.random(); return Math.sqrt(-2.0 * Math.log(u)) * Math.cos(2.0 * Math.PI * v); } /** * Convert a string to a `PIXI.blendModes` enum value * @param blend_mode Desired blend mode string * @returns returns a `PIXI.blendModes` enum value */ export function parse_blend_mode(blend_mode: string) { return (PIXI.blendModes[blend_mode] as unknown) as PIXI.blendModes; } /** * Promised way to create and wait a Phaser.Timer. Waits for the amount of time given. * @param game the Phaser game object. * @param time the time in ms. * @param callback on timer finish callback. */ export async function promised_wait(game: Phaser.Game, time: number, callback?: () => void) { let this_resolve; const promise = new Promise(resolve => (this_resolve = resolve)); const timer = game.time.create(true); timer.add(time, () => { if (callback) { callback(); } this_resolve(); timer.destroy(); }); timer.start(); await promise; }
the_stack
import * as debug_ from "debug"; import { promises as fsp } from "fs"; import * as http from "http"; import * as https from "https"; import { Headers, RequestInit } from "node-fetch"; import { AbortSignal as IAbortSignal } from "node-fetch/externals"; import { IHttpGetResult, THttpGetCallback, THttpOptions, THttpResponse, } from "readium-desktop/common/utils/http"; import { decryptPersist, encryptPersist } from "readium-desktop/main/fs/persistCrypto"; import { IS_DEV } from "readium-desktop/preprocessor-directives"; import { tryCatch, tryCatchSync } from "readium-desktop/utils/tryCatch"; import { resolve } from "url"; import { ConfigRepository } from "../db/repository/config"; import { diMainGet, opdsAuthFilePath } from "../di"; import { fetchWithCookie } from "./fetch"; // Logger const filename_ = "readium-desktop:main/http"; const debug = debug_(filename_); const DEFAULT_HTTP_TIMEOUT = 30000; // https://github.com/node-fetch/node-fetch/blob/master/src/utils/is-redirect.js const redirectStatus = new Set([301, 302, 303, 307, 308]); let authenticationToken: Record<string, IOpdsAuthenticationToken> = {}; /** * Redirect code matching * * @param {number} code - Status code * @return {boolean} */ const isRedirect = (code: number) => { return redirectStatus.has(code); }; const FOLLOW_REDIRECT_COUNTER = 20; export const httpSetHeaderAuthorization = (type: string, credentials: string) => `${type} ${credentials}`; export const CONFIGREPOSITORY_OPDS_AUTHENTICATION_TOKEN = "CONFIGREPOSITORY_OPDS_AUTHENTICATION_TOKEN"; // tslint:disable-next-line: variable-name const CONFIGREPOSITORY_OPDS_AUTHENTICATION_TOKEN_fn = (host: string) => `${CONFIGREPOSITORY_OPDS_AUTHENTICATION_TOKEN}.${Buffer.from(host).toString("base64")}`; export interface IOpdsAuthenticationToken { id?: string; opdsAuthenticationUrl?: string; // application/opds-authentication+json refreshUrl?: string; authenticateUrl?: string; accessToken?: string; refreshToken?: string; tokenType?: string; } let authenticationTokenInitialized = false; const authenticationTokenInit = async () => { if (authenticationTokenInitialized) { return; } const data = await tryCatch(() => fsp.readFile(opdsAuthFilePath), ""); let docsFS: string | undefined; if (data) { try { docsFS = decryptPersist(data, CONFIGREPOSITORY_OPDS_AUTHENTICATION_TOKEN, opdsAuthFilePath); } catch (_err) { docsFS = undefined; } } let docs: Record<string, IOpdsAuthenticationToken>; const isValid = typeof docsFS === "string" && ( docs = JSON.parse(docsFS), typeof docs === "object" && Object.entries(docs) .reduce((pv, [k, v]) => pv && k.startsWith(CONFIGREPOSITORY_OPDS_AUTHENTICATION_TOKEN) && typeof v === "object", true)); if (!isValid) { const configDoc = diMainGet("config-repository") as ConfigRepository<IOpdsAuthenticationToken>; docs = await tryCatch(async () => (await configDoc.findAll()) .filter((v) => v.identifier.startsWith(CONFIGREPOSITORY_OPDS_AUTHENTICATION_TOKEN)) .map<[string, IOpdsAuthenticationToken]>((v) => [v.identifier, v.value]) .reduce<Record<string, IOpdsAuthenticationToken>>((pv, [k, v]) => ({ ...pv, [k]: v, }), {}), ""); } if (!docs) { docs = {}; } authenticationToken = docs; authenticationTokenInitialized = true; }; export const httpSetAuthenticationToken = async (data: IOpdsAuthenticationToken) => { // return tryCatch( // async () => { // if (!data.opdsAuthenticationUrl) { // throw new Error("no opdsAutenticationUrl !!"); // } // // const url = new URL(data.opdsAuthenticationUrl); // const { host } = url; // // do not risk showing plaintext access/refresh tokens in console / command line shell // debug("SET opds authentication credentials for", host); // data // const configRepo = diMainGet("config-repository"); // await configRepo.save({ // identifier: CONFIGREPOSITORY_OPDS_AUTHENTICATION_TOKEN_fn(host), // value: data, // }); // }, // filename_); if (!data.opdsAuthenticationUrl) { throw new Error("no opdsAutenticationUrl !!"); } await authenticationTokenInit(); const url = new URL(data.opdsAuthenticationUrl); const { host } = url; // do not risk showing plaintext access/refresh tokens in console / command line shell debug("SET opds authentication credentials for", host); // data const id = CONFIGREPOSITORY_OPDS_AUTHENTICATION_TOKEN_fn(host); const res = authenticationToken[id] = data; await persistJson(); return res; }; const persistJson = () => tryCatch(() => { if (!authenticationToken) return Promise.resolve(); const encrypted = encryptPersist(JSON.stringify(authenticationToken), CONFIGREPOSITORY_OPDS_AUTHENTICATION_TOKEN, opdsAuthFilePath); return fsp.writeFile(opdsAuthFilePath, encrypted); }, ""); export const absorbDBToJson = async () => { await authenticationTokenInit(); await persistJson(); }; export const getAuthenticationToken = async (host: string) => { // return await tryCatch( // async () => { // const id = CONFIGREPOSITORY_OPDS_AUTHENTICATION_TOKEN_fn(host); // const configRepo = diMainGet("config-repository") as ConfigRepository<IOpdsAuthenticationToken>; // const doc = await configRepo.get(id); // debug("GET opds authentication credentials for", host); // // do not risk showing plaintext access/refresh tokens in console / command line shell // // debug("Credentials: ", doc?.value); // return doc?.value; // }, // filename_); await authenticationTokenInit(); const id = CONFIGREPOSITORY_OPDS_AUTHENTICATION_TOKEN_fn(host); return authenticationToken[id]; }; export const deleteAuthenticationToken = async (host: string) => { // return await tryCatch(async () => { // const id = CONFIGREPOSITORY_OPDS_AUTHENTICATION_TOKEN_fn(host); // const configRepo = diMainGet( // "config-repository", // ) as ConfigRepository<IOpdsAuthenticationToken>; // const doc = await configRepo.get(id); // debug("DELETE opds authentication credentials for", host); // // do not risk showing plaintext access/refresh tokens in console / command line shell // // debug("Credentials: ", doc?.value); // await configRepo.delete(doc.identifier); // }, filename_); await authenticationTokenInit(); const id = CONFIGREPOSITORY_OPDS_AUTHENTICATION_TOKEN_fn(host); delete authenticationToken[id]; const encrypted = encryptPersist(JSON.stringify(authenticationToken), CONFIGREPOSITORY_OPDS_AUTHENTICATION_TOKEN, opdsAuthFilePath); return fsp.writeFile(opdsAuthFilePath, encrypted); }; export const wipeAuthenticationTokenStorage = async () => { // authenticationTokenInitialized = false; authenticationToken = {}; const encrypted = encryptPersist(JSON.stringify(authenticationToken), CONFIGREPOSITORY_OPDS_AUTHENTICATION_TOKEN, opdsAuthFilePath); return fsp.writeFile(opdsAuthFilePath, encrypted); }; export async function httpFetchRawResponse( url: string | URL, options: THttpOptions = {}, redirectCounter = 0, locale = tryCatchSync(() => diMainGet("store")?.getState()?.i18n?.locale, filename_) || "en-US", ): Promise<THttpResponse> { options.headers = options.headers instanceof Headers ? options.headers : new Headers(options.headers || {}); options.headers.set("user-agent", "readium-desktop"); options.headers.set("accept-language", `${locale},en-US;q=0.7,en;q=0.5`); options.redirect = "manual"; // handle cookies // https://github.com/node-fetch/node-fetch#custom-agent // httpAgent doesn't works // err: Protocol "http:" not supported. Expected "https: // https://github.com/edrlab/thorium-reader/issues/1323#issuecomment-911772951 const httpsAgent = new https.Agent({ timeout: options.timeout || DEFAULT_HTTP_TIMEOUT, rejectUnauthorized: IS_DEV ? false : true, }); const httpAgent = new http.Agent({ timeout: options.timeout || DEFAULT_HTTP_TIMEOUT, }); options.agent = (parsedURL: URL) => { if (parsedURL.protocol === "http:") { return httpAgent; } else { return httpsAgent; } }; // if (!options.agent && url.toString().startsWith("https:")) { // const httpsAgent = new https.Agent({ // timeout: options.timeout || DEFAULT_HTTP_TIMEOUT, // rejectUnauthorized: IS_DEV ? false : true, // }); // options.agent = httpsAgent; // } options.timeout = options.timeout || DEFAULT_HTTP_TIMEOUT; const response = await fetchWithCookie(url, options); debug("fetch URL:", `${url}`); debug("Method", options.method); debug("Request headers :"); debug(options.headers); debug("###"); debug("OK: ", response.ok); debug("status code :", response.status); debug("status text :", response.statusText); // manual Redirect to handle cookies // https://github.com/node-fetch/node-fetch/blob/0d35ddbf7377a483332892d2b625ec8231fa6181/src/index.js#L129 if (isRedirect(response.status)) { const location = response.headers.get("Location"); debug("Redirect", response.status, "to: ", location); if (location) { const locationUrl = resolve(response.url, location); if (redirectCounter > FOLLOW_REDIRECT_COUNTER) { throw new Error(`maximum redirect reached at: ${url}`); } if ( response.status === 303 || ((response.status === 301 || response.status === 302) && options.method === "POST") ) { options.method = "GET"; options.body = undefined; if (options.headers) { if (!(options.headers instanceof Headers)) { options.headers = new Headers(options.headers); } options.headers.delete("content-length"); } } return await httpFetchRawResponse(locationUrl, options, redirectCounter + 1, locale); } else { debug("No location URL to redirect"); } } return response; } const handleCallback = async <T = undefined>(res: IHttpGetResult<T>, callback: THttpGetCallback<T>) => { if (callback) { res = await Promise.resolve(callback(res)); // remove for IPC sync res.body = undefined; res.response = undefined; } return res; }; export async function httpFetchFormattedResponse<TData = undefined>( url: string | URL, options?: THttpOptions, callback?: THttpGetCallback<TData>, locale?: string, ): Promise<IHttpGetResult<TData>> { let result: IHttpGetResult<TData> = { isFailure: true, isSuccess: false, url, }; try { const response = await httpFetchRawResponse(url, options, 0, locale); debug("Response headers :"); debug({ ...response.headers.raw() }); debug("###"); result = { isAbort: false, isNetworkError: false, isTimeout: false, isFailure: !response.ok/*response.status < 200 || response.status >= 300*/, isSuccess: response.ok/*response.status >= 200 && response.status < 300*/, url, responseUrl: response.url, statusCode: response.status, statusMessage: response.statusText, body: response.body, response, data: undefined, contentType: response.headers.get("Content-Type"), // cookies: response.headers.get("Set-Cookie"), }; } catch (err) { const errStr = err.toString(); debug("### HTTP FETCH ERROR ###"); debug(errStr); debug("url: ", url); debug("options: ", options); if (err.name === "AbortError") { result = { isAbort: true, isNetworkError: false, isTimeout: false, isFailure: true, isSuccess: false, url, }; } else if (errStr.includes("timeout")) { // err.name === "FetchError" result = { isAbort: false, isNetworkError: true, isTimeout: true, isFailure: true, isSuccess: false, url, statusMessage: errStr, }; } else { // err.name === "FetchError" result = { isAbort: false, isNetworkError: true, isTimeout: false, isFailure: true, isSuccess: false, url, statusMessage: errStr, }; } debug("HTTP FAIL RESUlT"); debug(result); debug("#################"); } finally { result = await handleCallback(result, callback); } return result; } export const httpGetWithAuth = (enableAuth = true): typeof httpFetchFormattedResponse => async (...arg) => { const [_url, _options, _callback, ..._arg] = arg; const options = _options || {}; options.method = "get"; // const response = await httpFetchFormattedResponse( // _url, // options, // enableAuth ? undefined : _callback, // ..._arg, // ); if (enableAuth) { // response.statusCode === 401 // enableAuth always activate on httpGet request // means that on each request the acessToken is returned and not only for the 401 http response // specific to 'librarySimplified' server implementation const url = _url instanceof URL ? _url : new URL(_url); const { host } = url; const auth = await getAuthenticationToken(host); if ( typeof auth === "object" && auth.accessToken ) { // We have an authentication token for this host. // We should use it by default // Because we won't always get a 401 response that will ask us to use it. return httpGetUnauthorized(auth)(_url, options, _callback, ..._arg); } // return await handleCallback(response, _callback); } // return response; return httpFetchFormattedResponse( _url, options, _callback, ..._arg, ); }; export const httpGet = httpGetWithAuth(true); const httpGetUnauthorized = (auth: IOpdsAuthenticationToken, enableRefresh = true): typeof httpFetchFormattedResponse => async (...arg) => { const [_url, _options, _callback, ..._arg] = arg; const url = _url instanceof URL ? _url : new URL(_url); const options = _options || {}; const { accessToken, tokenType } = auth; options.headers = options.headers instanceof Headers ? options.headers : new Headers(options.headers || {}); options.headers.set("Authorization", httpSetHeaderAuthorization(tokenType || "Bearer", accessToken)); const response = await httpGetWithAuth(false)( url, options, enableRefresh ? undefined : _callback, ..._arg, ); if (enableRefresh) { if (response.statusCode === 401) { if (auth.refreshUrl && auth.refreshToken) { const responseAfterRefresh = await httpGetUnauthorizedRefresh( auth, )(url, options, _callback, ..._arg); return responseAfterRefresh || response; } else { // Most likely because of a wrong access token. // In some cases the returned content won't launch a new authentication process // It's safer to just delete the access token and start afresh now. await deleteAuthenticationToken(url.host); options.headers.delete("Authorization"); const responseWithoutAuth = await httpGetWithAuth( false, )(url, options, _callback, ..._arg); return responseWithoutAuth || response; } } else { return await handleCallback(response, _callback); } } return response; }; const httpGetUnauthorizedRefresh = (auth: IOpdsAuthenticationToken): typeof httpFetchFormattedResponse | undefined => async (...arg) => { const { refreshToken, refreshUrl } = auth; const options: RequestInit = {}; options.headers = options.headers instanceof Headers ? options.headers : new Headers(options.headers || {}); options.headers.set("Content-Type", "application/json"); options.body = JSON.stringify({ refresh_token: refreshToken, grant_type: "refresh_token", }); const httpPostResponse = await httpPost(refreshUrl, options); if (httpPostResponse.isSuccess) { const jsonDataResponse = await httpPostResponse.response.json(); const newRefreshToken = typeof jsonDataResponse?.refresh_token === "string" ? jsonDataResponse.refresh_token : undefined; auth.refreshToken = newRefreshToken || auth.refreshToken; const newAccessToken = typeof jsonDataResponse?.access_token === "string" ? jsonDataResponse.access_token : undefined; auth.accessToken = newAccessToken || auth.accessToken; const httpGetResponse = await httpGetUnauthorized(auth, false)(...arg); if (httpGetResponse.statusCode !== 401) { debug("authenticate with the new access_token"); debug("saved it into db"); await httpSetAuthenticationToken(auth); } return httpGetResponse; } return undefined; }; export const httpPost: typeof httpFetchFormattedResponse = async (...arg) => { let [, options] = arg; options = options || {}; options.method = "post"; arg[1] = options; // do not risk showing plaintext password in console / command line shell // debug("Body:"); // debug(options.body); return httpFetchFormattedResponse(...arg); }; // fetch checks the class name // https://github.com/node-fetch/node-fetch/blob/b7076bb24f75be688d8fc8b175f41b341e853f2b/src/utils/is.js#L78 export class AbortSignal implements IAbortSignal { public aborted: boolean; private listenerArray: any[]; constructor() { this.listenerArray = []; this.aborted = false; } public onabort: IAbortSignal["onabort"] = null; // public get aborted() { // return this._aborted; // } public addEventListener(_type: "abort", listener: (a: any[]) => any) { this.listenerArray.push(listener); } public removeEventListener(_type: "abort", listener: (a: any[]) => any) { const index = this.listenerArray.findIndex((v) => v === listener); if (index > -1) { this.listenerArray = [...this.listenerArray.slice(0, index), ...this.listenerArray.slice(index + 1)]; } } public dispatchEvent() { this.listenerArray.forEach((l) => { try { l(); } catch (_e) { // ignore } }); return this.aborted = true; } }
the_stack
import * as React from 'react'; import { useState, useRef, useEffect } from 'react'; import classNames from 'classnames'; import raf from 'rc-util/lib/raf'; import ResizeObserver from 'rc-resize-observer'; import useRaf, { useRafState } from '../hooks/useRaf'; import TabNode from './TabNode'; import type { TabSizeMap, TabPosition, RenderTabBar, TabsLocale, EditableConfig, AnimatedConfig, OnTabScroll, TabBarExtraPosition, TabBarExtraContent, TabBarExtraMap, } from '../interface'; import useOffsets from '../hooks/useOffsets'; import useVisibleRange from '../hooks/useVisibleRange'; import OperationNode from './OperationNode'; import TabContext from '../TabContext'; import useTouchMove from '../hooks/useTouchMove'; import useRefs from '../hooks/useRefs'; import AddButton from './AddButton'; import useSyncState from '../hooks/useSyncState'; export interface TabNavListProps { id: string; tabPosition: TabPosition; activeKey: string; rtl: boolean; panes: React.ReactNode; animated?: AnimatedConfig; extra?: TabBarExtraContent; editable?: EditableConfig; moreIcon?: React.ReactNode; moreTransitionName?: string; mobile: boolean; tabBarGutter?: number; renderTabBar?: RenderTabBar; className?: string; style?: React.CSSProperties; locale?: TabsLocale; onTabClick: (activeKey: string, e: React.MouseEvent | React.KeyboardEvent) => void; onTabScroll?: OnTabScroll; children?: (node: React.ReactElement) => React.ReactElement; } interface ExtraContentProps { position: TabBarExtraPosition; prefixCls: string; extra?: TabBarExtraContent; } const ExtraContent = ({ position, prefixCls, extra }: ExtraContentProps) => { if (!extra) return null; let content: React.ReactNode; // Parse extra let assertExtra: TabBarExtraMap = {}; if (extra && typeof extra === 'object' && !React.isValidElement(extra)) { assertExtra = extra as TabBarExtraMap; } else { assertExtra.right = extra; } if (position === 'right') { content = assertExtra.right; } if (position === 'left') { content = assertExtra.left; } return content ? <div className={`${prefixCls}-extra-content`}>{content}</div> : null; }; function TabNavList(props: TabNavListProps, ref: React.Ref<HTMLDivElement>) { const { prefixCls, tabs } = React.useContext(TabContext); const { className, style, id, animated, activeKey, rtl, extra, editable, locale, tabPosition, tabBarGutter, children, onTabClick, onTabScroll, } = props; const tabsWrapperRef = useRef<HTMLDivElement>(); const tabListRef = useRef<HTMLDivElement>(); const operationsRef = useRef<HTMLDivElement>(); const innerAddButtonRef = useRef<HTMLButtonElement>(); const [getBtnRef, removeBtnRef] = useRefs<HTMLDivElement>(); const tabPositionTopOrBottom = tabPosition === 'top' || tabPosition === 'bottom'; const [transformLeft, setTransformLeft] = useSyncState(0, (next, prev) => { if (tabPositionTopOrBottom && onTabScroll) { onTabScroll({ direction: next > prev ? 'left' : 'right' }); } }); const [transformTop, setTransformTop] = useSyncState(0, (next, prev) => { if (!tabPositionTopOrBottom && onTabScroll) { onTabScroll({ direction: next > prev ? 'top' : 'bottom' }); } }); const [wrapperScrollWidth, setWrapperScrollWidth] = useState<number>(0); const [wrapperScrollHeight, setWrapperScrollHeight] = useState<number>(0); const [wrapperContentWidth, setWrapperContentWidth] = useState<number>(0); const [wrapperContentHeight, setWrapperContentHeight] = useState<number>(0); const [wrapperWidth, setWrapperWidth] = useState<number>(null); const [wrapperHeight, setWrapperHeight] = useState<number>(null); const [addWidth, setAddWidth] = useState<number>(0); const [addHeight, setAddHeight] = useState<number>(0); const [tabSizes, setTabSizes] = useRafState<TabSizeMap>(new Map()); const tabOffsets = useOffsets(tabs, tabSizes, wrapperScrollWidth); // ========================== Util ========================= const operationsHiddenClassName = `${prefixCls}-nav-operations-hidden`; let transformMin = 0; let transformMax = 0; if (!tabPositionTopOrBottom) { transformMin = Math.min(0, wrapperHeight - wrapperScrollHeight); transformMax = 0; } else if (rtl) { transformMin = 0; transformMax = Math.max(0, wrapperScrollWidth - wrapperWidth); } else { transformMin = Math.min(0, wrapperWidth - wrapperScrollWidth); transformMax = 0; } function alignInRange(value: number): number { if (value < transformMin) { return transformMin; } if (value > transformMax) { return transformMax; } return value; } // ========================= Mobile ======================== const touchMovingRef = useRef<number>(); const [lockAnimation, setLockAnimation] = useState<number>(); function doLockAnimation() { setLockAnimation(Date.now()); } function clearTouchMoving() { window.clearTimeout(touchMovingRef.current); } useTouchMove(tabsWrapperRef, (offsetX, offsetY) => { function doMove(setState: React.Dispatch<React.SetStateAction<number>>, offset: number) { setState(value => { const newValue = alignInRange(value + offset); return newValue; }); } if (tabPositionTopOrBottom) { // Skip scroll if place is enough if (wrapperWidth >= wrapperScrollWidth) { return false; } doMove(setTransformLeft, offsetX); } else { if (wrapperHeight >= wrapperScrollHeight) { return false; } doMove(setTransformTop, offsetY); } clearTouchMoving(); doLockAnimation(); return true; }); useEffect(() => { clearTouchMoving(); if (lockAnimation) { touchMovingRef.current = window.setTimeout(() => { setLockAnimation(0); }, 100); } return clearTouchMoving; }, [lockAnimation]); // ========================= Scroll ======================== function scrollToTab(key = activeKey) { const tabOffset = tabOffsets.get(key) || { width: 0, height: 0, left: 0, right: 0, top: 0, }; if (tabPositionTopOrBottom) { // ============ Align with top & bottom ============ let newTransform = transformLeft; // RTL if (rtl) { if (tabOffset.right < transformLeft) { newTransform = tabOffset.right; } else if (tabOffset.right + tabOffset.width > transformLeft + wrapperWidth) { newTransform = tabOffset.right + tabOffset.width - wrapperWidth; } } // LTR else if (tabOffset.left < -transformLeft) { newTransform = -tabOffset.left; } else if (tabOffset.left + tabOffset.width > -transformLeft + wrapperWidth) { newTransform = -(tabOffset.left + tabOffset.width - wrapperWidth); } setTransformTop(0); setTransformLeft(alignInRange(newTransform)); } else { // ============ Align with left & right ============ let newTransform = transformTop; if (tabOffset.top < -transformTop) { newTransform = -tabOffset.top; } else if (tabOffset.top + tabOffset.height > -transformTop + wrapperHeight) { newTransform = -(tabOffset.top + tabOffset.height - wrapperHeight); } setTransformLeft(0); setTransformTop(alignInRange(newTransform)); } } // ========================== Tab ========================== // Render tab node & collect tab offset const [visibleStart, visibleEnd] = useVisibleRange( tabOffsets, { width: wrapperWidth, height: wrapperHeight, left: transformLeft, top: transformTop, }, { width: wrapperContentWidth, height: wrapperContentHeight, }, { width: addWidth, height: addHeight, }, { ...props, tabs }, ); const tabNodeStyle: React.CSSProperties = {}; if (tabPosition === 'top' || tabPosition === 'bottom') { tabNodeStyle[rtl ? 'marginRight' : 'marginLeft'] = tabBarGutter; } else { tabNodeStyle.marginTop = tabBarGutter; } const tabNodes: React.ReactElement[] = tabs.map((tab, i) => { const { key } = tab; return ( <TabNode id={id} prefixCls={prefixCls} key={key} tab={tab} /* first node should not have margin left */ style={i === 0 ? undefined : tabNodeStyle} closable={tab.closable} editable={editable} active={key === activeKey} renderWrapper={children} removeAriaLabel={locale?.removeAriaLabel} ref={getBtnRef(key)} onClick={e => { onTabClick(key, e); }} onRemove={() => { removeBtnRef(key); }} onFocus={() => { scrollToTab(key); doLockAnimation(); if (!tabsWrapperRef.current) { return; } // Focus element will make scrollLeft change which we should reset back if (!rtl) { tabsWrapperRef.current.scrollLeft = 0; } tabsWrapperRef.current.scrollTop = 0; }} /> ); }); const onListHolderResize = useRaf(() => { // Update wrapper records const offsetWidth = tabsWrapperRef.current?.offsetWidth || 0; const offsetHeight = tabsWrapperRef.current?.offsetHeight || 0; const newAddWidth = innerAddButtonRef.current?.offsetWidth || 0; const newAddHeight = innerAddButtonRef.current?.offsetHeight || 0; const newOperationWidth = operationsRef.current?.offsetWidth || 0; const newOperationHeight = operationsRef.current?.offsetHeight || 0; setWrapperWidth(offsetWidth); setWrapperHeight(offsetHeight); setAddWidth(newAddWidth); setAddHeight(newAddHeight); const newWrapperScrollWidth = (tabListRef.current?.offsetWidth || 0) - newAddWidth; const newWrapperScrollHeight = (tabListRef.current?.offsetHeight || 0) - newAddHeight; setWrapperScrollWidth(newWrapperScrollWidth); setWrapperScrollHeight(newWrapperScrollHeight); const isOperationHidden = operationsRef.current?.className.includes(operationsHiddenClassName); setWrapperContentWidth(newWrapperScrollWidth - (isOperationHidden ? 0 : newOperationWidth)); setWrapperContentHeight(newWrapperScrollHeight - (isOperationHidden ? 0 : newOperationHeight)); // Update buttons records setTabSizes(() => { const newSizes: TabSizeMap = new Map(); tabs.forEach(({ key }) => { const btnNode = getBtnRef(key).current; if (btnNode) { newSizes.set(key, { width: btnNode.offsetWidth, height: btnNode.offsetHeight, left: btnNode.offsetLeft, top: btnNode.offsetTop, }); } }); return newSizes; }); }); // ======================== Dropdown ======================= const startHiddenTabs = tabs.slice(0, visibleStart); const endHiddenTabs = tabs.slice(visibleEnd + 1); const hiddenTabs = [...startHiddenTabs, ...endHiddenTabs]; // =================== Link & Operations =================== const [inkStyle, setInkStyle] = useState<React.CSSProperties>(); const activeTabOffset = tabOffsets.get(activeKey); // Delay set ink style to avoid remove tab blink const inkBarRafRef = useRef<number>(); function cleanInkBarRaf() { raf.cancel(inkBarRafRef.current); } useEffect(() => { const newInkStyle: React.CSSProperties = {}; if (activeTabOffset) { if (tabPositionTopOrBottom) { if (rtl) { newInkStyle.right = activeTabOffset.right; } else { newInkStyle.left = activeTabOffset.left; } newInkStyle.width = activeTabOffset.width; } else { newInkStyle.top = activeTabOffset.top; newInkStyle.height = activeTabOffset.height; } } cleanInkBarRaf(); inkBarRafRef.current = raf(() => { setInkStyle(newInkStyle); }); return cleanInkBarRaf; }, [activeTabOffset, tabPositionTopOrBottom, rtl]); // ========================= Effect ======================== useEffect(() => { scrollToTab(); }, [activeKey, activeTabOffset, tabOffsets, tabPositionTopOrBottom]); // Should recalculate when rtl changed useEffect(() => { onListHolderResize(); }, [rtl, tabBarGutter, activeKey, tabs.map(tab => tab.key).join('_')]); // ========================= Render ======================== const hasDropdown = !!hiddenTabs.length; const wrapPrefix = `${prefixCls}-nav-wrap`; let pingLeft: boolean; let pingRight: boolean; let pingTop: boolean; let pingBottom: boolean; if (tabPositionTopOrBottom) { if (rtl) { pingRight = transformLeft > 0; pingLeft = transformLeft + wrapperWidth < wrapperScrollWidth; } else { pingLeft = transformLeft < 0; pingRight = -transformLeft + wrapperWidth < wrapperScrollWidth; } } else { pingTop = transformTop < 0; pingBottom = -transformTop + wrapperHeight < wrapperScrollHeight; } return ( <div ref={ref} role="tablist" className={classNames(`${prefixCls}-nav`, className)} style={style} onKeyDown={() => { // No need animation when use keyboard doLockAnimation(); }} > <ExtraContent position="left" extra={extra} prefixCls={prefixCls} /> <ResizeObserver onResize={onListHolderResize}> <div className={classNames(wrapPrefix, { [`${wrapPrefix}-ping-left`]: pingLeft, [`${wrapPrefix}-ping-right`]: pingRight, [`${wrapPrefix}-ping-top`]: pingTop, [`${wrapPrefix}-ping-bottom`]: pingBottom, })} ref={tabsWrapperRef} > <ResizeObserver onResize={onListHolderResize}> <div ref={tabListRef} className={`${prefixCls}-nav-list`} style={{ transform: `translate(${transformLeft}px, ${transformTop}px)`, transition: lockAnimation ? 'none' : undefined, }} > {tabNodes} <AddButton ref={innerAddButtonRef} prefixCls={prefixCls} locale={locale} editable={editable} style={{ ...(tabNodes.length === 0 ? undefined : tabNodeStyle), visibility: hasDropdown ? 'hidden' : null, }} /> <div className={classNames(`${prefixCls}-ink-bar`, { [`${prefixCls}-ink-bar-animated`]: animated.inkBar, })} style={inkStyle} /> </div> </ResizeObserver> </div> </ResizeObserver> <OperationNode {...props} removeAriaLabel={locale?.removeAriaLabel} ref={operationsRef} prefixCls={prefixCls} tabs={hiddenTabs} className={!hasDropdown && operationsHiddenClassName} tabMoving={!!lockAnimation} /> <ExtraContent position="right" extra={extra} prefixCls={prefixCls} /> </div> ); /* eslint-enable */ } export default React.forwardRef(TabNavList);
the_stack
import { ParticleEmitter2, ParticleEmitter2FilterMode, ParticleEmitter2Flags, ParticleEmitter2FramesFlags } from '../model'; import {vec3, vec4} from 'gl-matrix'; import {ModelInterp} from './modelInterp'; import {mat4} from 'gl-matrix'; import {degToRad, rand, getShader} from './util'; import {RendererData} from './rendererData'; import {lerp} from './interp'; let gl: WebGLRenderingContext; let shaderProgram: WebGLProgram; const shaderProgramLocations: any = {}; const particleStorage: Particle[] = []; const rotateCenter: vec3 = vec3.fromValues(0, 0, 0); const firstColor = vec4.create(); const secondColor = vec4.create(); const color = vec4.create(); const tailPos = vec3.create(); const tailCross = vec3.create(); const vertexShader = ` attribute vec3 aVertexPosition; attribute vec2 aTextureCoord; attribute vec4 aColor; uniform mat4 uMVMatrix; uniform mat4 uPMatrix; varying vec2 vTextureCoord; varying vec4 vColor; void main(void) { vec4 position = vec4(aVertexPosition, 1.0); gl_Position = uPMatrix * uMVMatrix * position; vTextureCoord = aTextureCoord; vColor = aColor; } `; const fragmentShader = ` precision mediump float; varying vec2 vTextureCoord; varying vec4 vColor; uniform sampler2D uSampler; uniform vec3 uReplaceableColor; uniform float uReplaceableType; uniform float uDiscardAlphaLevel; float hypot (vec2 z) { float t; float x = abs(z.x); float y = abs(z.y); t = min(x, y); x = max(x, y); t = t / x; return (z.x == 0.0 && z.y == 0.0) ? 0.0 : x * sqrt(1.0 + t * t); } void main(void) { vec2 coords = vec2(vTextureCoord.s, vTextureCoord.t); if (uReplaceableType == 0.) { gl_FragColor = texture2D(uSampler, coords); } else if (uReplaceableType == 1.) { gl_FragColor = vec4(uReplaceableColor, 1.0); } else if (uReplaceableType == 2.) { float dist = hypot(coords - vec2(0.5, 0.5)) * 2.; float truncateDist = clamp(1. - dist * 1.4, 0., 1.); float alpha = sin(truncateDist); gl_FragColor = vec4(uReplaceableColor * alpha, 1.0); } gl_FragColor *= vColor; if (gl_FragColor[3] < uDiscardAlphaLevel) { discard; } } `; interface Particle { emitter: ParticleEmitterWrapper; // xyz pos: vec3; // xyz speed: vec3; angle: number; gravity: number; lifeSpan: number; } interface ParticleEmitterWrapper { emission: number; squirtFrame: number; particles: Particle[]; props: ParticleEmitter2; capacity: number; baseCapacity: number; // head or tail or both type: number; // xyz tailVertices: Float32Array; tailVertexBuffer: WebGLBuffer; // xyz headVertices: Float32Array; headVertexBuffer: WebGLBuffer; // xy tailTexCoords: Float32Array; tailTexCoordBuffer: WebGLBuffer; // xy headTexCoords: Float32Array; headTexCoordBuffer: WebGLBuffer; // rgba colors: Float32Array; colorBuffer: WebGLBuffer; // 2 * triangles indices: Uint16Array; indexBuffer: WebGLBuffer; } const DISCARD_ALPHA_KEY_LEVEL = 0.83; const DISCARD_MODULATE_LEVEL = 0.01; export class ParticlesController { public static initGL (glContext: WebGLRenderingContext): void { gl = glContext; ParticlesController.initShaders(); } private static initShaders (): void { const vertex = getShader(gl, vertexShader, gl.VERTEX_SHADER); const fragment = getShader(gl, fragmentShader, gl.FRAGMENT_SHADER); shaderProgram = gl.createProgram(); gl.attachShader(shaderProgram, vertex); gl.attachShader(shaderProgram, fragment); gl.linkProgram(shaderProgram); if (!gl.getProgramParameter(shaderProgram, gl.LINK_STATUS)) { alert('Could not initialise shaders'); } gl.useProgram(shaderProgram); shaderProgramLocations.vertexPositionAttribute = gl.getAttribLocation(shaderProgram, 'aVertexPosition'); shaderProgramLocations.textureCoordAttribute = gl.getAttribLocation(shaderProgram, 'aTextureCoord'); shaderProgramLocations.colorAttribute = gl.getAttribLocation(shaderProgram, 'aColor'); shaderProgramLocations.pMatrixUniform = gl.getUniformLocation(shaderProgram, 'uPMatrix'); shaderProgramLocations.mvMatrixUniform = gl.getUniformLocation(shaderProgram, 'uMVMatrix'); shaderProgramLocations.samplerUniform = gl.getUniformLocation(shaderProgram, 'uSampler'); shaderProgramLocations.replaceableColorUniform = gl.getUniformLocation(shaderProgram, 'uReplaceableColor'); shaderProgramLocations.replaceableTypeUniform = gl.getUniformLocation(shaderProgram, 'uReplaceableType'); shaderProgramLocations.discardAlphaLevelUniform = gl.getUniformLocation(shaderProgram, 'uDiscardAlphaLevel'); } private static updateParticle (particle: Particle, delta: number): void { delta /= 1000; particle.lifeSpan -= delta; if (particle.lifeSpan <= 0) { return; } particle.speed[2] -= particle.gravity * delta; particle.pos[0] += particle.speed[0] * delta; particle.pos[1] += particle.speed[1] * delta; particle.pos[2] += particle.speed[2] * delta; } private static resizeEmitterBuffers (emitter: ParticleEmitterWrapper, size: number): void { if (size <= emitter.capacity) { return; } size = Math.max(size, emitter.baseCapacity); let tailVertices; let headVertices; let tailTexCoords; let headTexCoords; if (emitter.type & ParticleEmitter2FramesFlags.Tail) { tailVertices = new Float32Array(size * 4 * 3); // 4 vertices * xyz tailTexCoords = new Float32Array(size * 4 * 2); // 4 vertices * xy } if (emitter.type & ParticleEmitter2FramesFlags.Head) { headVertices = new Float32Array(size * 4 * 3); // 4 vertices * xyz headTexCoords = new Float32Array(size * 4 * 2); // 4 vertices * xy } const colors = new Float32Array(size * 4 * 4); // 4 vertices * rgba const indices = new Uint16Array(size * 6); // 4 vertices * 2 triangles if (emitter.capacity) { indices.set(emitter.indices); } for (let i = emitter.capacity; i < size; ++i) { indices[i * 6 ] = i * 4 ; indices[i * 6 + 1] = i * 4 + 1; indices[i * 6 + 2] = i * 4 + 2; indices[i * 6 + 3] = i * 4 + 2; indices[i * 6 + 4] = i * 4 + 1; indices[i * 6 + 5] = i * 4 + 3; } if (tailVertices) { emitter.tailVertices = tailVertices; emitter.tailTexCoords = tailTexCoords; } if (headVertices) { emitter.headVertices = headVertices; emitter.headTexCoords = headTexCoords; } emitter.colors = colors; emitter.indices = indices; emitter.capacity = size; if (!emitter.indexBuffer) { if (emitter.type & ParticleEmitter2FramesFlags.Tail) { emitter.tailVertexBuffer = gl.createBuffer(); emitter.tailTexCoordBuffer = gl.createBuffer(); } if (emitter.type & ParticleEmitter2FramesFlags.Head) { emitter.headVertexBuffer = gl.createBuffer(); emitter.headTexCoordBuffer = gl.createBuffer(); } emitter.colorBuffer = gl.createBuffer(); emitter.indexBuffer = gl.createBuffer(); } } private interp: ModelInterp; private rendererData: RendererData; private emitters: ParticleEmitterWrapper[]; private particleBaseVectors: vec3[]; constructor (interp: ModelInterp, rendererData: RendererData) { this.interp = interp; this.rendererData = rendererData; this.emitters = []; if (rendererData.model.ParticleEmitters2.length) { this.particleBaseVectors = [ vec3.create(), vec3.create(), vec3.create(), vec3.create() ]; for (const particleEmitter of rendererData.model.ParticleEmitters2) { const emitter: ParticleEmitterWrapper = { emission: 0, squirtFrame: 0, particles: [], props: particleEmitter, capacity: 0, baseCapacity: 0, type: particleEmitter.FrameFlags, tailVertices: null, tailVertexBuffer: null, headVertices: null, headVertexBuffer: null, tailTexCoords: null, tailTexCoordBuffer: null, headTexCoords: null, headTexCoordBuffer: null, colors: null, colorBuffer: null, indices: null, indexBuffer: null }; emitter.baseCapacity = Math.ceil( ModelInterp.maxAnimVectorVal(emitter.props.EmissionRate) * emitter.props.LifeSpan ); this.emitters.push(emitter); } } } public update (delta: number): void { for (const emitter of this.emitters) { this.updateEmitter(emitter, delta); } } public render (mvMatrix: mat4, pMatrix: mat4): void { gl.enable(gl.CULL_FACE); gl.useProgram(shaderProgram); gl.uniformMatrix4fv(shaderProgramLocations.pMatrixUniform, false, pMatrix); gl.uniformMatrix4fv(shaderProgramLocations.mvMatrixUniform, false, mvMatrix); gl.enableVertexAttribArray(shaderProgramLocations.vertexPositionAttribute); gl.enableVertexAttribArray(shaderProgramLocations.textureCoordAttribute); gl.enableVertexAttribArray(shaderProgramLocations.colorAttribute); for (const emitter of this.emitters) { if (!emitter.particles.length) { continue; } this.setLayerProps(emitter); this.setGeneralBuffers(emitter); if (emitter.type & ParticleEmitter2FramesFlags.Tail) { this.renderEmitterType(emitter, ParticleEmitter2FramesFlags.Tail); } if (emitter.type & ParticleEmitter2FramesFlags.Head) { this.renderEmitterType(emitter, ParticleEmitter2FramesFlags.Head); } } gl.disableVertexAttribArray(shaderProgramLocations.vertexPositionAttribute); gl.disableVertexAttribArray(shaderProgramLocations.textureCoordAttribute); gl.disableVertexAttribArray(shaderProgramLocations.colorAttribute); } private updateEmitter (emitter: ParticleEmitterWrapper, delta: number): void { const visibility = this.interp.animVectorVal(emitter.props.Visibility, 1); if (visibility > 0) { if (emitter.props.Squirt && typeof emitter.props.EmissionRate !== 'number') { const interp = this.interp.findKeyframes(emitter.props.EmissionRate); if (interp && interp.left && interp.left.Frame !== emitter.squirtFrame) { emitter.squirtFrame = interp.left.Frame; if (interp.left.Vector[0] > 0) { emitter.emission += interp.left.Vector[0] * 1000; } } } else { const emissionRate = this.interp.animVectorVal(emitter.props.EmissionRate, 0); emitter.emission += emissionRate * delta; } while (emitter.emission >= 1000) { emitter.emission -= 1000; emitter.particles.push( this.createParticle(emitter, this.rendererData.nodes[emitter.props.ObjectId].matrix) ); } } if (emitter.particles.length) { const updatedParticles = []; for (const particle of emitter.particles) { ParticlesController.updateParticle(particle, delta); if (particle.lifeSpan > 0) { updatedParticles.push(particle); } else { particleStorage.push(particle); } } emitter.particles = updatedParticles; if (emitter.type & ParticleEmitter2FramesFlags.Head) { if (emitter.props.Flags & ParticleEmitter2Flags.XYQuad) { vec3.set(this.particleBaseVectors[0], -1, 1, 0); vec3.set(this.particleBaseVectors[1], -1, -1, 0); vec3.set(this.particleBaseVectors[2], 1, 1, 0); vec3.set(this.particleBaseVectors[3], 1, -1, 0); } else { vec3.set(this.particleBaseVectors[0], 0, -1, 1); vec3.set(this.particleBaseVectors[1], 0, -1, -1); vec3.set(this.particleBaseVectors[2], 0, 1, 1); vec3.set(this.particleBaseVectors[3], 0, 1, -1); for (let i = 0; i < 4; ++i) { vec3.transformQuat(this.particleBaseVectors[i], this.particleBaseVectors[i], this.rendererData.cameraQuat); } } } ParticlesController.resizeEmitterBuffers(emitter, emitter.particles.length); for (let i = 0; i < emitter.particles.length; ++i) { this.updateParticleBuffers(emitter.particles[i], i, emitter); } } } private createParticle (emitter: ParticleEmitterWrapper, emitterMatrix: mat4) { let particle: Particle; if (particleStorage.length) { particle = particleStorage.pop(); } else { particle = { emitter: null, pos: vec3.create(), angle: 0, speed: vec3.create(), gravity: null, lifeSpan: null }; } const width: number = this.interp.animVectorVal(emitter.props.Width, 0); const length: number = this.interp.animVectorVal(emitter.props.Length, 0); let speedScale: number = this.interp.animVectorVal(emitter.props.Speed, 0); const variation: number = this.interp.animVectorVal(emitter.props.Variation, 0); const latitude: number = degToRad(this.interp.animVectorVal(emitter.props.Latitude, 0)); particle.emitter = emitter; particle.pos[0] = emitter.props.PivotPoint[0] + rand(-width, width); particle.pos[1] = emitter.props.PivotPoint[1] + rand(-length, length); particle.pos[2] = emitter.props.PivotPoint[2]; vec3.transformMat4(particle.pos, particle.pos, emitterMatrix); if (variation > 0) { speedScale *= 1 + rand(-variation, variation); } vec3.set(particle.speed, 0, 0, speedScale); particle.angle = rand(0, Math.PI * 2); vec3.rotateY(particle.speed, particle.speed, rotateCenter, rand(0, latitude)); vec3.rotateZ(particle.speed, particle.speed, rotateCenter, particle.angle); if (emitter.props.Flags & ParticleEmitter2Flags.LineEmitter) { particle.speed[0] = 0; } vec3.transformMat4(particle.speed, particle.speed, emitterMatrix); // minus translation of emitterMatrix particle.speed[0] -= emitterMatrix[12]; particle.speed[1] -= emitterMatrix[13]; particle.speed[2] -= emitterMatrix[14]; particle.gravity = this.interp.animVectorVal(emitter.props.Gravity, 0); particle.lifeSpan = emitter.props.LifeSpan; return particle; } private updateParticleBuffers (particle: Particle, index: number, emitter: ParticleEmitterWrapper): void { const globalT: number = 1 - particle.lifeSpan / emitter.props.LifeSpan; const firstHalf: boolean = globalT < emitter.props.Time; let t: number; if (firstHalf) { t = globalT / emitter.props.Time; } else { t = (globalT - emitter.props.Time) / (1 - emitter.props.Time); } this.updateParticleVertices(particle, index, emitter, firstHalf, t); this.updateParticleTexCoords(index, emitter, firstHalf, t); this.updateParticleColor(index, emitter, firstHalf, t); } private updateParticleVertices (particle: Particle, index: number, emitter: ParticleEmitterWrapper, firstHalf: boolean, t: number) { let firstScale; let secondScale; let scale; if (firstHalf) { firstScale = emitter.props.ParticleScaling[0]; secondScale = emitter.props.ParticleScaling[1]; } else { firstScale = emitter.props.ParticleScaling[1]; secondScale = emitter.props.ParticleScaling[2]; } // eslint-disable-next-line prefer-const scale = lerp(firstScale, secondScale, t); if (emitter.type & ParticleEmitter2FramesFlags.Head) { for (let i = 0; i < 4; ++i) { emitter.headVertices[index * 12 + i * 3] = this.particleBaseVectors[i][0] * scale; emitter.headVertices[index * 12 + i * 3 + 1] = this.particleBaseVectors[i][1] * scale; emitter.headVertices[index * 12 + i * 3 + 2] = this.particleBaseVectors[i][2] * scale; if (emitter.props.Flags & ParticleEmitter2Flags.XYQuad) { const x = emitter.headVertices[index * 12 + i * 3]; const y = emitter.headVertices[index * 12 + i * 3 + 1]; emitter.headVertices[index * 12 + i * 3] = x * Math.cos(particle.angle) - y * Math.sin(particle.angle); emitter.headVertices[index * 12 + i * 3 + 1] = x * Math.sin(particle.angle) + y * Math.cos(particle.angle); } } } if (emitter.type & ParticleEmitter2FramesFlags.Tail) { tailPos[0] = -particle.speed[0] * emitter.props.TailLength; tailPos[1] = -particle.speed[1] * emitter.props.TailLength; tailPos[2] = -particle.speed[2] * emitter.props.TailLength; vec3.cross(tailCross, particle.speed, this.rendererData.cameraPos); vec3.normalize(tailCross, tailCross); vec3.scale(tailCross, tailCross, scale); emitter.tailVertices[index * 12] = tailCross[0]; emitter.tailVertices[index * 12 + 1] = tailCross[1]; emitter.tailVertices[index * 12 + 2] = tailCross[2]; emitter.tailVertices[index * 12 + 3] = -tailCross[0]; emitter.tailVertices[index * 12 + 3 + 1] = -tailCross[1]; emitter.tailVertices[index * 12 + 3 + 2] = -tailCross[2]; emitter.tailVertices[index * 12 + 2 * 3] = tailCross[0] + tailPos[0]; emitter.tailVertices[index * 12 + 2 * 3 + 1] = tailCross[1] + tailPos[1]; emitter.tailVertices[index * 12 + 2 * 3 + 2] = tailCross[2] + tailPos[2]; emitter.tailVertices[index * 12 + 3 * 3] = -tailCross[0] + tailPos[0]; emitter.tailVertices[index * 12 + 3 * 3 + 1] = -tailCross[1] + tailPos[1]; emitter.tailVertices[index * 12 + 3 * 3 + 2] = -tailCross[2] + tailPos[2]; } for (let i = 0; i < 4; ++i) { if (emitter.headVertices) { emitter.headVertices[index * 12 + i * 3] += particle.pos[0]; emitter.headVertices[index * 12 + i * 3 + 1] += particle.pos[1]; emitter.headVertices[index * 12 + i * 3 + 2] += particle.pos[2]; } if (emitter.tailVertices) { emitter.tailVertices[index * 12 + i * 3] += particle.pos[0]; emitter.tailVertices[index * 12 + i * 3 + 1] += particle.pos[1]; emitter.tailVertices[index * 12 + i * 3 + 2] += particle.pos[2]; } } } private updateParticleTexCoords (index: number, emitter: ParticleEmitterWrapper, firstHalf: boolean, t: number) { if (emitter.type & ParticleEmitter2FramesFlags.Head) { this.updateParticleTexCoordsByType(index, emitter, firstHalf, t, ParticleEmitter2FramesFlags.Head); } if (emitter.type & ParticleEmitter2FramesFlags.Tail) { this.updateParticleTexCoordsByType(index, emitter, firstHalf, t, ParticleEmitter2FramesFlags.Tail); } } private updateParticleTexCoordsByType (index: number, emitter: ParticleEmitterWrapper, firstHalf: boolean, t: number, type: ParticleEmitter2FramesFlags) { let uvAnim; let texCoords; if (type === ParticleEmitter2FramesFlags.Tail) { uvAnim = firstHalf ? emitter.props.TailUVAnim : emitter.props.TailDecayUVAnim; texCoords = emitter.tailTexCoords; } else { uvAnim = firstHalf ? emitter.props.LifeSpanUVAnim : emitter.props.DecayUVAnim; texCoords = emitter.headTexCoords; } const firstFrame = uvAnim[0]; const secondFrame = uvAnim[1]; const frame = Math.round(lerp(firstFrame, secondFrame, t)); const texCoordX = frame % emitter.props.Columns; const texCoordY = Math.floor(frame / emitter.props.Rows); const cellWidth = 1 / emitter.props.Columns; const cellHeight = 1 / emitter.props.Rows; texCoords[index * 8] = texCoordX * cellWidth; texCoords[index * 8 + 1] = texCoordY * cellHeight; texCoords[index * 8 + 2] = texCoordX * cellWidth; texCoords[index * 8 + 3] = (1 + texCoordY) * cellHeight; texCoords[index * 8 + 4] = (1 + texCoordX) * cellWidth; texCoords[index * 8 + 5] = texCoordY * cellHeight; texCoords[index * 8 + 6] = (1 + texCoordX) * cellWidth; texCoords[index * 8 + 7] = (1 + texCoordY) * cellHeight; } private updateParticleColor(index: number, emitter: ParticleEmitterWrapper, firstHalf: boolean, t: number) { if (firstHalf) { firstColor[0] = emitter.props.SegmentColor[0][0]; firstColor[1] = emitter.props.SegmentColor[0][1]; firstColor[2] = emitter.props.SegmentColor[0][2]; firstColor[3] = emitter.props.Alpha[0] / 255; secondColor[0] = emitter.props.SegmentColor[1][0]; secondColor[1] = emitter.props.SegmentColor[1][1]; secondColor[2] = emitter.props.SegmentColor[1][2]; secondColor[3] = emitter.props.Alpha[1] / 255; } else { firstColor[0] = emitter.props.SegmentColor[1][0]; firstColor[1] = emitter.props.SegmentColor[1][1]; firstColor[2] = emitter.props.SegmentColor[1][2]; firstColor[3] = emitter.props.Alpha[1] / 255; secondColor[0] = emitter.props.SegmentColor[2][0]; secondColor[1] = emitter.props.SegmentColor[2][1]; secondColor[2] = emitter.props.SegmentColor[2][2]; secondColor[3] = emitter.props.Alpha[2] / 255; } vec4.lerp(color, firstColor, secondColor, t); for (let i = 0; i < 4; ++i) { emitter.colors[index * 16 + i * 4] = color[0]; emitter.colors[index * 16 + i * 4 + 1] = color[1]; emitter.colors[index * 16 + i * 4 + 2] = color[2]; emitter.colors[index * 16 + i * 4 + 3] = color[3]; } } private setLayerProps (emitter: ParticleEmitterWrapper): void { if (emitter.props.FilterMode === ParticleEmitter2FilterMode.AlphaKey) { gl.uniform1f(shaderProgramLocations.discardAlphaLevelUniform, DISCARD_ALPHA_KEY_LEVEL); } else if (emitter.props.FilterMode === ParticleEmitter2FilterMode.Modulate || emitter.props.FilterMode === ParticleEmitter2FilterMode.Modulate2x) { gl.uniform1f(shaderProgramLocations.discardAlphaLevelUniform, DISCARD_MODULATE_LEVEL); } else { gl.uniform1f(shaderProgramLocations.discardAlphaLevelUniform, 0.); } if (emitter.props.FilterMode === ParticleEmitter2FilterMode.Blend) { gl.enable(gl.BLEND); gl.enable(gl.DEPTH_TEST); gl.blendFuncSeparate(gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA, gl.ONE, gl.ONE_MINUS_SRC_ALPHA); gl.depthMask(false); } else if (emitter.props.FilterMode === ParticleEmitter2FilterMode.Additive) { gl.enable(gl.BLEND); gl.enable(gl.DEPTH_TEST); gl.blendFunc(gl.SRC_ALPHA, gl.ONE); gl.depthMask(false); } else if (emitter.props.FilterMode === ParticleEmitter2FilterMode.AlphaKey) { gl.enable(gl.BLEND); gl.enable(gl.DEPTH_TEST); gl.blendFunc(gl.SRC_ALPHA, gl.ONE); gl.depthMask(false); } else if (emitter.props.FilterMode === ParticleEmitter2FilterMode.Modulate) { gl.enable(gl.BLEND); gl.enable(gl.DEPTH_TEST); gl.blendFuncSeparate(gl.ZERO, gl.SRC_COLOR, gl.ZERO, gl.ONE); gl.depthMask(false); } else if (emitter.props.FilterMode === ParticleEmitter2FilterMode.Modulate2x) { gl.enable(gl.BLEND); gl.enable(gl.DEPTH_TEST); gl.blendFuncSeparate(gl.DST_COLOR, gl.SRC_COLOR, gl.ZERO, gl.ONE); gl.depthMask(false); } const texture = this.rendererData.model.Textures[emitter.props.TextureID]; if (texture.Image) { gl.activeTexture(gl.TEXTURE0); gl.bindTexture(gl.TEXTURE_2D, this.rendererData.textures[texture.Image]); gl.uniform1i(shaderProgramLocations.samplerUniform, 0); gl.uniform1f(shaderProgramLocations.replaceableTypeUniform, 0); } else if (texture.ReplaceableId === 1 || texture.ReplaceableId === 2) { gl.uniform3fv(shaderProgramLocations.replaceableColorUniform, this.rendererData.teamColor); gl.uniform1f(shaderProgramLocations.replaceableTypeUniform, texture.ReplaceableId); } } private setGeneralBuffers (emitter: ParticleEmitterWrapper): void { gl.bindBuffer(gl.ARRAY_BUFFER, emitter.colorBuffer); gl.bufferData(gl.ARRAY_BUFFER, emitter.colors, gl.DYNAMIC_DRAW); gl.vertexAttribPointer(shaderProgramLocations.colorAttribute, 4, gl.FLOAT, false, 0, 0); gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, emitter.indexBuffer); gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, emitter.indices, gl.DYNAMIC_DRAW); } private renderEmitterType (emitter: ParticleEmitterWrapper, type: ParticleEmitter2FramesFlags): void { if (type === ParticleEmitter2FramesFlags.Tail) { gl.bindBuffer(gl.ARRAY_BUFFER, emitter.tailTexCoordBuffer); gl.bufferData(gl.ARRAY_BUFFER, emitter.tailTexCoords, gl.DYNAMIC_DRAW); } else { gl.bindBuffer(gl.ARRAY_BUFFER, emitter.headTexCoordBuffer); gl.bufferData(gl.ARRAY_BUFFER, emitter.headTexCoords, gl.DYNAMIC_DRAW); } gl.vertexAttribPointer(shaderProgramLocations.textureCoordAttribute, 2, gl.FLOAT, false, 0, 0); if (type === ParticleEmitter2FramesFlags.Tail) { gl.bindBuffer(gl.ARRAY_BUFFER, emitter.tailVertexBuffer); gl.bufferData(gl.ARRAY_BUFFER, emitter.tailVertices, gl.DYNAMIC_DRAW); } else { gl.bindBuffer(gl.ARRAY_BUFFER, emitter.headVertexBuffer); gl.bufferData(gl.ARRAY_BUFFER, emitter.headVertices, gl.DYNAMIC_DRAW); } gl.vertexAttribPointer(shaderProgramLocations.vertexPositionAttribute, 3, gl.FLOAT, false, 0, 0); gl.drawElements(gl.TRIANGLES, emitter.particles.length * 6, gl.UNSIGNED_SHORT, 0); } }
the_stack
import { Directive, ElementRef, EventEmitter, Input, OnChanges, OnDestroy, OnInit, Output, SimpleChanges, } from '@angular/core' import { Point } from '@app/type' import { Rectangle } from 'electron' import { DirectiveUtils } from './directive-utils.directive' enum Status { OFF = 0, RESIZE = 1, RESIZING = 2, MOVE = 3, MOVING = 4, } enum ResizeDir { NONE = 0, WIDTH = 1 << 0, HEIGHT = 1 << 1, BOTH = WIDTH | HEIGHT, } interface AppliedBounds { x: boolean y: boolean width: boolean height: boolean } @Directive({ selector: '[appResizeDrag]', }) export class ResizeDragDirective implements OnInit, OnChanges, OnDestroy { private element: HTMLElement private resizeAnchorContainer: HTMLElement private resizeAnchorWidth: HTMLElement private resizeAnchorHeight: HTMLElement private resizeAnchorBoth: HTMLElement private dragAreaExtension: HTMLElement private status: Status = Status.OFF private resizeDir: ResizeDir = ResizeDir.NONE private mouseDownPosition: Point private mouseDownBounds: Rectangle @Input('appResizeDrag') public rootElementSelector: string // tslint:disable-next-line:no-input-rename @Input('ardDisabled') public disabled: boolean // tslint:disable-next-line:no-input-rename @Input('ardInteractionsDisabled') public interactionsDisabled: boolean // tslint:disable-next-line:no-input-rename @Input('ardAllowResize') public allowResize: boolean // tslint:disable-next-line:no-input-rename @Input('ardExtendDragArea') public extendDragArea: boolean // tslint:disable-next-line:no-input-rename @Input('ardBounds') public bounds: Rectangle // tslint:disable-next-line:no-input-rename @Input('ardDragThreshold') public resizeDragThreshold = 5 // tslint:disable-next-line:no-input-rename @Input('ardResizeWidth') public resizeWidth = 8 // tslint:disable-next-line:no-input-rename @Input('ardAppliedBounds') public appliedBounds: AppliedBounds = { x: true, y: true, width: true, height: true } // tslint:disable-next-line:no-input-rename @Input('ardAppliedBounds.x') public set appliedBoundsX(val: boolean) { this.appliedBounds.x = val } // tslint:disable-next-line:no-input-rename @Input('ardAppliedBounds.y') public set appliedBoundsY(val: boolean) { this.appliedBounds.y = val } // tslint:disable-next-line:no-input-rename @Input('ardAppliedBounds.width') public set appliedBoundsWidth(val: boolean) { this.appliedBounds.width = val } // tslint:disable-next-line:no-input-rename @Input('ardAppliedBounds.height') public set appliedBoundsHeight(val: boolean) { this.appliedBounds.height = val } // tslint:disable-next-line:no-input-rename @Input('ardReversePosition.x') public reversePositionX: boolean // tslint:disable-next-line:no-input-rename @Input('ardReversePosition.y') public reversePositionY: boolean // tslint:disable-next-line:no-input-rename @Input('ardOffset.x') public offsetPositionX = 0 // tslint:disable-next-line:no-input-rename @Input('ardOffset.y') public offsetPositionY = 0 // tslint:disable-next-line:no-output-rename @Output('ardResizeDrag') public resizeDrag = new EventEmitter<Rectangle>() // tslint:disable-next-line:no-output-rename @Output('ardResizeDragBegin') public resizeDragBegin = new EventEmitter<Rectangle>() // tslint:disable-next-line:no-output-rename @Output('ardResizeDragEnd') public resizeDragEnd = new EventEmitter<Rectangle>() constructor(private readonly elementRef: ElementRef<HTMLElement>) {} public ngOnInit(): void { if (this.rootElementSelector) { this.element = DirectiveUtils.getClosestMatchingAncestor( this.elementRef.nativeElement, this.rootElementSelector ) } this.element = this.element || this.elementRef.nativeElement this.element.addEventListener('mousedown', this.onMousedown, true) this.element.addEventListener('mouseup', this.onMouseup, true) this.element.addEventListener('mousemove', this.onMousemove, true) this.onChanged() } public ngOnChanges(changes: SimpleChanges): void { this.onChanged() } public ngOnDestroy(): void { this.element.removeEventListener('mousedown', this.onMousedown) this.element.removeEventListener('mouseup', this.onMouseup) this.element.removeEventListener('mousemove', this.onMousemove) this.resizeAnchorContainer?.remove() this.resizeAnchorWidth = null this.resizeAnchorHeight = null this.resizeAnchorBoth = null } private onChanged(): void { if (!this.element) { return } if (this.extendDragArea && !this.dragAreaExtension) { this.dragAreaExtension = document.createElement('div') this.dragAreaExtension.style.display = 'none' this.dragAreaExtension.style.position = 'fixed' this.dragAreaExtension.style.left = '0px' this.dragAreaExtension.style.top = '0px' this.dragAreaExtension.style.width = '500px' this.dragAreaExtension.style.height = '500px' this.dragAreaExtension.style.transform = 'translate3d(-50%, -50%, 0)' this.dragAreaExtension.classList.add('interactable') this.element.append(this.dragAreaExtension) } if (this.allowResize && !this.resizeAnchorContainer) { this.resizeAnchorContainer = document.createElement('div') this.resizeAnchorContainer.classList.add('interactable') this.resizeAnchorContainer.style.display = 'inline-grid' this.resizeAnchorContainer.style.position = 'absolute' this.resizeAnchorContainer.style.top = '0px' this.resizeAnchorContainer.style.left = '0px' this.resizeAnchorContainer.style['grid-auto-flow'] = 'row' const templateColRows = `auto ${this.resizeWidth}px` this.resizeAnchorContainer.style['grid-template-columns'] = templateColRows this.resizeAnchorContainer.style['grid-template-rows'] = templateColRows const topLeftDragArea = document.createElement('div') topLeftDragArea.style.cursor = 'move' this.resizeAnchorContainer.appendChild(topLeftDragArea) const resizeWidthHeight = `${this.resizeWidth * 2}px` const resizeBGColor = 'rgb(0, 0, 0, 0.01)' this.resizeAnchorWidth = document.createElement('div') this.resizeAnchorWidth.style.width = resizeWidthHeight this.resizeAnchorWidth.style.cursor = 'e-resize' this.resizeAnchorWidth.style['background-color'] = resizeBGColor this.resizeAnchorContainer.appendChild(this.resizeAnchorWidth) this.resizeAnchorHeight = document.createElement('div') this.resizeAnchorHeight.style.height = resizeWidthHeight this.resizeAnchorHeight.style.cursor = 's-resize' this.resizeAnchorHeight.style['background-color'] = resizeBGColor this.resizeAnchorContainer.appendChild(this.resizeAnchorHeight) this.resizeAnchorBoth = document.createElement('div') this.resizeAnchorBoth.style.transform = 'rotateZ(45deg)' this.resizeAnchorBoth.style['border-style'] = 'solid' this.resizeAnchorBoth.style['border-width'] = `${this.resizeWidth}px` this.resizeAnchorBoth.style['border-color'] = 'transparent transparent transparent yellow' this.resizeAnchorBoth.style.cursor = 'nwse-resize' this.resizeAnchorContainer.appendChild(this.resizeAnchorBoth) this.element.append(this.resizeAnchorContainer) } this.applyBounds() } private applyBounds(): void { if (this.disabled) { return } if (this.appliedBounds.x) { const posX = this.bounds.x + this.offsetPositionX if (this.reversePositionX) { const right = this.element.offsetParent.scrollWidth - posX this.element.style.right = `${right}px` this.element.style.removeProperty('left') } else { this.element.style.left = `${posX}px` this.element.style.removeProperty('right') } } if (this.appliedBounds.y) { const posY = this.bounds.y + this.offsetPositionY if (this.reversePositionY) { const bottom = this.element.offsetParent.scrollHeight - posY this.element.style.bottom = `${bottom}px` this.element.style.removeProperty('top') } else { this.element.style.top = `${posY}px` this.element.style.removeProperty('bottom') } } const width = `${this.bounds.width}px` if (this.appliedBounds.width) this.element.style.width = width if (this.resizeAnchorContainer) this.resizeAnchorContainer.style.width = width const height = `${this.bounds.height}px` if (this.appliedBounds.height) this.element.style.height = height if (this.resizeAnchorContainer) this.resizeAnchorContainer.style.height = height } private onMousedown = (event: MouseEvent) => { if (this.disabled || this.interactionsDisabled || this.status !== Status.OFF) { return } const point: Point = { x: event.clientX, y: event.clientY, } if (!this.overlaps(this.element, point)) { return } this.mouseDownPosition = point this.mouseDownBounds = { ...this.bounds } if (this.overlaps(this.resizeAnchorWidth, point)) { this.status = Status.RESIZE this.resizeDir = ResizeDir.WIDTH } else if (this.overlaps(this.resizeAnchorHeight, point)) { this.status = Status.RESIZE this.resizeDir = ResizeDir.HEIGHT } else if (this.overlaps(this.resizeAnchorBoth, point)) { this.status = Status.RESIZE this.resizeDir = ResizeDir.BOTH } else { this.status = Status.MOVE } this.resizeDragBegin.emit(this.bounds) } private onMouseup = () => { if (this.disabled || this.interactionsDisabled || this.status === Status.OFF) { return } const oldStatus = this.status this.status = Status.OFF this.resizeDir = ResizeDir.NONE if (this.dragAreaExtension) this.dragAreaExtension.style.display = 'none' switch (oldStatus) { case Status.MOVING: case Status.RESIZING: this.resizeDragEnd.emit(this.bounds) } } private onMousemove = (event: MouseEvent) => { if (this.disabled || this.interactionsDisabled || this.status === Status.OFF) { return } event.preventDefault() event.stopImmediatePropagation() const delta = { x: event.clientX - this.mouseDownPosition.x, y: event.clientY - this.mouseDownPosition.y, } if (this.dragAreaExtension) this.dragAreaExtension.style.display = 'block' switch (this.status) { case Status.MOVE: case Status.RESIZE: if (Math.abs(delta.x) + Math.abs(delta.y) >= this.resizeDragThreshold) { this.status++ } else { return } break } switch (this.status) { case Status.MOVING: this.bounds.x = this.mouseDownBounds.x + delta.x this.bounds.y = this.mouseDownBounds.y + delta.y break case Status.RESIZING: if ((this.resizeDir & ResizeDir.WIDTH) !== 0) this.bounds.width = this.mouseDownBounds.width + delta.x if ((this.resizeDir & ResizeDir.HEIGHT) !== 0) this.bounds.height = this.mouseDownBounds.height + delta.y break } if (this.dragAreaExtension) { this.dragAreaExtension.style.left = `${event.clientX}px` this.dragAreaExtension.style.top = `${event.clientY}px` } this.applyBounds() this.resizeDrag.emit(this.bounds) } private overlaps(element: HTMLElement, point: Point): boolean { if (!element) { return false } const elementBounds = element.getBoundingClientRect() if ( point.x >= elementBounds.left && point.x <= elementBounds.right && point.y >= elementBounds.top && point.y < elementBounds.bottom ) { return true } return false } }
the_stack
import t, { terminal as term, autoComplete as ac, getDetectedTerminal, ScreenBufferHD, ScreenBuffer, Terminal } from "terminal-kit"; import "node"; import * as fs from "fs"; new t.Rect({width: 4, height: 4}); // The term() function simply output a string to stdout, using current style // output "Hello world!" in default terminal's colors t.terminal("Hello world!\n"); // This output 'red' in red term.red("red"); // This output 'bold' in bold term.bold("bold"); // output 'mixed' using bold, underlined & red, exposing the style-mixing syntax term.bold.underline.red("mixed"); // printf() style formatting everywhere: // this will output 'My name is Jack, I'm 32.' in green term.green("My name is %s, I'm %d.\n", "Jack", 32); // Since v0.16.x, style markup are supported as a shorthand. // Those two lines produce the same result. term("My name is ") .red("Jack")(" and I'm ") .green("32\n"); term("My name is ^rJack^ and I'm ^g32\n"); // Width and height of the terminal term("The terminal size is %dx%d", term.width, term.height); // Move the cursor at the upper-left corner term.moveTo(1, 1); // We can always pass additional arguments that will be displayed... term.moveTo(1, 1, "Upper-left corner"); // ... and formated term.moveTo(1, 1, "My name is %s, I'm %d.\n", "Jack", 32); // ... or even combined with other styles term.moveTo.cyan(1, 1, "My name is %s, I'm %d.\n", "Jack", 32); // Get some user input term.magenta("Enter your name: "); term.inputField((error: any, input: any) => { term.green("\nYour name is '%s'\n", input); }); function terminate() { term.grabInput(false); setTimeout(() => {}, 100); } term.bold.cyan("Type anything on the keyboard...\n"); term.green("Hit CTRL-C to quit.\n\n"); term.grabInput({ mouse: "button" }); term.on("key", (name: string, matches: any[], data: any) => { console.log("'key' event:", name); if (name === "CTRL_C") { terminate(); } }); term.on("terminal", (name: string, data: any) => { console.log("'terminal' event:", name, data); }); term.on("mouse", (name: string, data: any) => { console.log("'mouse' event:", name, data); }); // Word-wrap this along the full terminal width term.wrap.yellow( `'Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish'` ); // Word-wrap this inside a column starting at x=10 with a width of 25 terminal cells term.wrapColumn({ x: 10, width: 25 }); term.wrap.green( `'Permission is hereby granted, free of charge, to any person obtaining a copy of this software an d associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish'` ); // This reset the offset term("\n"); // term.wrapColumn() could be used as well, but the next text would overwrite the last line // Text continuation: the second text start at the end of line of the first text term.wrap.blue("^GP^re^Yr^um^Mi^bs^bs^ci^ro^mn^ is "); term.wrap.red("hereby granted"); function question() { term("Do you like javascript? [Y|n]\n"); // Exit on y and ENTER key // Ask again on n term.yesOrNo( { yes: ["y", "ENTER"], no: ["n"] }, (error: any, result: any) => { if (result) { term.green("'Yes' detected! Good bye!\n"); } else { term.red("'No' detected, are you sure?\n"); question(); } } ); } question(); const history = ["John", "Jack", "Joey", "Billy", "Bob"]; const autoComplete = [ "Barack Obama", "George W. Bush", "Bill Clinton", "George Bush", "Ronald W. Reagan", "Jimmy Carter", "Gerald Ford", "Richard Nixon", "Lyndon Johnson", "John F. Kennedy", "Dwight Eisenhower", "Harry Truman", "Franklin Roosevelt" ]; term("Please enter your name: "); term.inputField( { history, autoComplete, autoCompleteMenu: true }, (error: any, input: string) => { term.green("\nYour name is '%s'\n", input); } ); const history1 = ["John", "Jack", "Joey", "Billy", "Bob"]; const autoComplete1 = [ "Barack Obama", "George W. Bush", "Bill Clinton", "George Bush", "Ronald W. Reagan", "Jimmy Carter", "Gerald Ford", "Richard Nixon", "Lyndon Johnson", "John F. Kennedy", "Dwight Eisenhower", "Harry Truman", "Franklin Roosevelt" ]; term("Please enter your name: "); () => { const input = term.inputField({ history: history1, autoComplete: autoComplete1, autoCompleteMenu: true }).promise; term.green("\nYour name is '%s'\n", input); }; const autoCompleter = function autoCompleter( inputString: string, callback: (err: any, input: string) => void ) { fs.readdir(__dirname, (error, files) => { callback(undefined, ac(files, inputString, true)); }); }; term("Choose a file: "); term.inputField( { autoComplete: autoCompleter, autoCompleteMenu: true }, (error: any, input: any) => { if (error) { term.red.bold(`'\nAn error occurs: ' + ${error} + '\n'`); } else { term.green("\nYour file is '%s'\n", input); } } ); const autoComplete2 = [ "dnf install", "dnf install nodejs", "dnf search", "sudo", "sudo dnf install", "sudo dnf install nodejs", "sudo dnf search" ]; term.inputField( { autoComplete: autoComplete2, autoCompleteHint: true, autoCompleteMenu: true, tokenHook: ( token: any, isEndOfInput: any, previousTokens: any, term: any, config: any ) => { const previousText = previousTokens.join(" "); switch (token) { case "sudo": config.style = term.red; return previousTokens.length ? null : term.bold.red; case "dnf": return previousText === "" || previousText === "sudo" ? term.brightMagenta : null; case "install": config.style = term.brightBlue; config.hintStyle = term.brightBlack.italic; return previousText === "dnf" || previousText === "sudo dnf" ? term.brightYellow : null; case "search": config.style = term.brightBlue; return previousText === "dnf" || previousText === "sudo dnf" ? term.brightCyan : null; default: return; } } }, (error: any, input: any) => { term.green("\nYour command is: '%s'\n", input); } ); term("Choose a file: "); term.fileInput({ baseDir: "../" }, (error: any, input: any) => { if (error) { term.red.bold(`'\nAn error occurs: ' + ${error} + '\n'`); } else { term.green("\nYour file is '%s'\n", input); } }); const items1 = [ "File", "Edit", "View", "History", "Bookmarks", "Tools", "Help" ]; const options = { y: 1, // the menu will be on the top of the terminal style: term.inverse, selectedStyle: term.dim.blue.bgGreen }; term.clear(); term.singleLineMenu(items1, options, (error: any, response: any) => { term("\n").eraseLineAfter.green( "#%s selected: %s (%s,%s)\n", response.selectedIndex, response.selectedText, response.x, response.y ); }); term.cyan("The hall is spacious. Someone lighted few chandeliers.\n"); term.cyan("There are doorways south and west.\n"); const items2 = ["a. Go south", "b. Go west", "c. Go back to the street"]; term.singleColumnMenu(items2, (error: any, response: any) => { term("\n").eraseLineAfter.green( "#%s selected: %s (%s,%s)\n", response.selectedIndex, response.selectedText, response.x, response.y ); }); term.cyan("Choose a file:\n"); const items = fs.readdirSync(process.cwd()); term.gridMenu(items, (error: any, response: any) => { term("\n").eraseLineAfter.green( "#%s selected: %s (%s,%s)\n", response.selectedIndex, response.selectedText, response.x, response.y ); }); let progressBar: Terminal.ProgressBarController; let progress = 0; function doProgress() { // Add random progress progress += Math.random() / 10; progressBar.update(progress); if (progress >= 1) { // Cleanup and exit setTimeout(() => { term("\n"); }, 200); } else { setTimeout(doProgress, 100 + Math.random() * 400); } } progressBar = term.progressBar({ width: 80, title: "Serious stuff in progress:", eta: true, percent: true }); doProgress(); const thingsToDo = [ "update my lib", "data analyzing", "serious business", "decrunching data", "do my laundry", "optimizing" ]; let countDown = thingsToDo.length; function start() { const task = thingsToDo.shift(); if (!task) { return; } progressBar.startItem(task); // Finish the task in... setTimeout(done.bind(null, task), 500 + Math.random() * 1200); // Start another parallel task in... setTimeout(start, 400 + Math.random() * 400); } function done(task: string) { progressBar.itemDone(task); countDown--; // Cleanup and exit if (!countDown) { setTimeout(() => { term("\n"); }, 200); } } progressBar = term.progressBar({ width: 80, title: "Daily tasks:", eta: true, percent: true, items: thingsToDo.length }); start(); term.slowTyping( "What a wonderful world!\n", { flashStyle: term.brightWhite }, () => {} ); // low level term("My name is ") .red("Jack")(" and I'm ") .green("32\n"); term("My name is ^rJack^ and I'm ^g32\n"); term.noFormat.red("hello"); term.noFormat("hello"); // color methods with a second argument term.color(1, "test"); term.darkColor(1, "test"); term.brightColor(1, "test"); term.color256(1, "test"); term.colorRgb(255, 0, 0, "test"); term.colorRgbHex("#ff0000", "test"); term.colorGrayscale(192, "test"); // bgColor methods with a second argument term.bgColor(1, "test"); term.bgDarkColor(1, "test"); term.bgBrightColor(1, "test"); term.bgColor256(1, "test"); term.bgColorRgb(255, 0, 0, "test"); term.bgColorRgbHex("#ff0000", "test"); term.bgColorGrayscale(192, "test"); // new color & bgColor with color name term.color("red"); term.color("red", "test"); term.bgColor("red"); term.bgColor("red", "test"); getDetectedTerminal((error: any, term: any) => { term.cyan("Terminal name: %s\n", term.appName); term.cyan("Terminal app: %s\n", term.app); term.cyan("Terminal generic: %s\n", term.generic); term.cyan("Config file: %s\n", term.termconfigFile); }); const screen = new ScreenBufferHD({ dst: term, noFill: true }); screen.fill({ attr: { // Both foreground and background must have the same color r: 40, g: 20, b: 0, bgR: 40, bgG: 20, bgB: 0 } }); const path_to_image = "/home/imoti/Downloads/photo_2019-01-24_13-15-50.jpg"; ScreenBufferHD.loadImage( path_to_image, { shrink: { width: term.width, height: term.height * 2 } }, (error: any, image: any) => { if (error) { throw error; } // Doh! image.draw({ dst: screen, blending: true }); screen.draw(); } ); const screen1 = new ScreenBuffer({ dst: term, noFill: true }); screen1.fill({ attr: { // Both foreground and background must have the same color color: 0, bgColor: 0 } }); ScreenBuffer.loadImage( path_to_image, { terminal: term, shrink: { width: term.width, height: term.height * 2 } }, (error: any, image: any) => { if (error) { throw error; } // Doh! image.draw({ dst: screen, blending: true }); screen.draw(); } );
the_stack
import { localize } from 'vs/nls'; import { IExtensionManagementService, IGlobalExtensionEnablementService, ILocalExtension } from 'vs/platform/extensionManagement/common/extensionManagement'; import { IStorageService, StorageScope, StorageTarget } from 'vs/platform/storage/common/storage'; import { ExtensionType, IExtension, isResolverExtension } from 'vs/platform/extensions/common/extensions'; import { registerSingleton } from 'vs/platform/instantiation/common/extensions'; import { INotificationService, IPromptChoice, Severity } from 'vs/platform/notification/common/notification'; import { IHostService } from 'vs/workbench/services/host/browser/host'; import { createDecorator, ServicesAccessor } from 'vs/platform/instantiation/common/instantiation'; import { Action2, MenuId, registerAction2 } from 'vs/platform/actions/common/actions'; import { ContextKeyExpr, IContextKeyService, RawContextKey } from 'vs/platform/contextkey/common/contextkey'; import { IDialogService } from 'vs/platform/dialogs/common/dialogs'; import { LifecyclePhase } from 'vs/workbench/services/lifecycle/common/lifecycle'; import { Registry } from 'vs/platform/registry/common/platform'; import { Extensions, IWorkbenchContributionsRegistry } from 'vs/workbench/common/contributions'; import { ICommandService } from 'vs/platform/commands/common/commands'; import { ILogService } from 'vs/platform/log/common/log'; import { IProductService } from 'vs/platform/product/common/productService'; import { IWorkbenchIssueService } from 'vs/workbench/services/issue/common/issue'; import { IWorkbenchEnvironmentService } from 'vs/workbench/services/environment/common/environmentService'; import { areSameExtensions } from 'vs/platform/extensionManagement/common/extensionManagementUtil'; import { CATEGORIES } from 'vs/workbench/common/actions'; // --- bisect service export const IExtensionBisectService = createDecorator<IExtensionBisectService>('IExtensionBisectService'); export interface IExtensionBisectService { readonly _serviceBrand: undefined; isDisabledByBisect(extension: IExtension): boolean; isActive: boolean; disabledCount: number; start(extensions: ILocalExtension[]): Promise<void>; next(seeingBad: boolean): Promise<{ id: string; bad: boolean } | undefined>; reset(): Promise<void>; } class BisectState { static fromJSON(raw: string | undefined): BisectState | undefined { if (!raw) { return undefined; } try { interface Raw extends BisectState { } const data: Raw = JSON.parse(raw); return new BisectState(data.extensions, data.low, data.high, data.mid); } catch { return undefined; } } constructor( readonly extensions: string[], readonly low: number, readonly high: number, readonly mid: number = ((low + high) / 2) | 0 ) { } } class ExtensionBisectService implements IExtensionBisectService { declare readonly _serviceBrand: undefined; private static readonly _storageKey = 'extensionBisectState'; private readonly _state: BisectState | undefined; private readonly _disabled = new Map<string, boolean>(); constructor( @ILogService logService: ILogService, @IStorageService private readonly _storageService: IStorageService, @IWorkbenchEnvironmentService private readonly _envService: IWorkbenchEnvironmentService ) { const raw = _storageService.get(ExtensionBisectService._storageKey, StorageScope.GLOBAL); this._state = BisectState.fromJSON(raw); if (this._state) { const { mid, high } = this._state; for (let i = 0; i < this._state.extensions.length; i++) { const isDisabled = i >= mid && i < high; this._disabled.set(this._state.extensions[i], isDisabled); } logService.warn('extension BISECT active', [...this._disabled]); } } get isActive() { return !!this._state; } get disabledCount() { return this._state ? this._state.high - this._state.mid : -1; } isDisabledByBisect(extension: IExtension): boolean { if (!this._state) { // bisect isn't active return false; } if (isResolverExtension(extension.manifest, this._envService.remoteAuthority)) { // the current remote resolver extension cannot be disabled return false; } if (this._isEnabledInEnv(extension)) { // Extension enabled in env cannot be disabled return false; } const disabled = this._disabled.get(extension.identifier.id); return disabled ?? false; } private _isEnabledInEnv(extension: IExtension): boolean { return Array.isArray(this._envService.enableExtensions) && this._envService.enableExtensions.some(id => areSameExtensions({ id }, extension.identifier)); } async start(extensions: ILocalExtension[]): Promise<void> { if (this._state) { throw new Error('invalid state'); } const extensionIds = extensions.map(ext => ext.identifier.id); const newState = new BisectState(extensionIds, 0, extensionIds.length, 0); this._storageService.store(ExtensionBisectService._storageKey, JSON.stringify(newState), StorageScope.GLOBAL, StorageTarget.MACHINE); await this._storageService.flush(); } async next(seeingBad: boolean): Promise<{ id: string; bad: boolean } | undefined> { if (!this._state) { throw new Error('invalid state'); } // check if bad when all extensions are disabled if (seeingBad && this._state.mid === 0 && this._state.high === this._state.extensions.length) { return { bad: true, id: '' }; } // check if there is only one left if (this._state.low === this._state.high - 1) { await this.reset(); return { id: this._state.extensions[this._state.low], bad: seeingBad }; } // the second half is disabled so if there is still bad it must be // in the first half const nextState = new BisectState( this._state.extensions, seeingBad ? this._state.low : this._state.mid, seeingBad ? this._state.mid : this._state.high, ); this._storageService.store(ExtensionBisectService._storageKey, JSON.stringify(nextState), StorageScope.GLOBAL, StorageTarget.MACHINE); await this._storageService.flush(); return undefined; } async reset(): Promise<void> { this._storageService.remove(ExtensionBisectService._storageKey, StorageScope.GLOBAL); await this._storageService.flush(); } } registerSingleton(IExtensionBisectService, ExtensionBisectService, true); // --- bisect UI class ExtensionBisectUi { static ctxIsBisectActive = new RawContextKey('isExtensionBisectActive', false); constructor( @IContextKeyService contextKeyService: IContextKeyService, @IExtensionBisectService private readonly _extensionBisectService: IExtensionBisectService, @INotificationService private readonly _notificationService: INotificationService, @ICommandService private readonly _commandService: ICommandService, ) { if (_extensionBisectService.isActive) { ExtensionBisectUi.ctxIsBisectActive.bindTo(contextKeyService).set(true); this._showBisectPrompt(); } } private _showBisectPrompt(): void { const goodPrompt: IPromptChoice = { label: 'Good now', run: () => this._commandService.executeCommand('extension.bisect.next', false) }; const badPrompt: IPromptChoice = { label: 'This is bad', run: () => this._commandService.executeCommand('extension.bisect.next', true) }; const stop: IPromptChoice = { label: 'Stop Bisect', run: () => this._commandService.executeCommand('extension.bisect.stop') }; const message = this._extensionBisectService.disabledCount === 1 ? localize('bisect.singular', "Extension Bisect is active and has disabled 1 extension. Check if you can still reproduce the problem and proceed by selecting from these options.") : localize('bisect.plural', "Extension Bisect is active and has disabled {0} extensions. Check if you can still reproduce the problem and proceed by selecting from these options.", this._extensionBisectService.disabledCount); this._notificationService.prompt( Severity.Info, message, [goodPrompt, badPrompt, stop], { sticky: true } ); } } Registry.as<IWorkbenchContributionsRegistry>(Extensions.Workbench).registerWorkbenchContribution( ExtensionBisectUi, LifecyclePhase.Restored ); registerAction2(class extends Action2 { constructor() { super({ id: 'extension.bisect.start', title: { value: localize('title.start', "Start Extension Bisect"), original: 'Start Extension Bisect' }, category: CATEGORIES.Help, f1: true, precondition: ExtensionBisectUi.ctxIsBisectActive.negate(), menu: { id: MenuId.ViewContainerTitle, when: ContextKeyExpr.equals('viewContainer', 'workbench.view.extensions'), group: '2_enablement', order: 4 } }); } async run(accessor: ServicesAccessor): Promise<void> { const dialogService = accessor.get(IDialogService); const hostService = accessor.get(IHostService); const extensionManagement = accessor.get(IExtensionManagementService); const extensionEnablementService = accessor.get(IGlobalExtensionEnablementService); const extensionsBisect = accessor.get(IExtensionBisectService); const disabled = new Set(extensionEnablementService.getDisabledExtensions().map(id => id.id)); const extensions = (await extensionManagement.getInstalled(ExtensionType.User)).filter(ext => !disabled.has(ext.identifier.id)); const res = await dialogService.confirm({ message: localize('msg.start', "Extension Bisect"), detail: localize('detail.start', "Extension Bisect will use binary search to find an extension that causes a problem. During the process the window reloads repeatedly (~{0} times). Each time you must confirm if you are still seeing problems.", 2 + Math.log2(extensions.length) | 0), primaryButton: localize('msg2', "Start Extension Bisect") }); if (res.confirmed) { await extensionsBisect.start(extensions); hostService.reload(); } } }); registerAction2(class extends Action2 { constructor() { super({ id: 'extension.bisect.next', title: { value: localize('title.isBad', "Continue Extension Bisect"), original: 'Continue Extension Bisect' }, category: localize('help', "Help"), f1: true, precondition: ExtensionBisectUi.ctxIsBisectActive }); } async run(accessor: ServicesAccessor, seeingBad: boolean | undefined): Promise<void> { const dialogService = accessor.get(IDialogService); const hostService = accessor.get(IHostService); const bisectService = accessor.get(IExtensionBisectService); const productService = accessor.get(IProductService); const extensionEnablementService = accessor.get(IGlobalExtensionEnablementService); const issueService = accessor.get(IWorkbenchIssueService); if (!bisectService.isActive) { return; } if (seeingBad === undefined) { const goodBadStopCancel = await this._checkForBad(dialogService, bisectService); if (goodBadStopCancel === null) { return; } seeingBad = goodBadStopCancel; } if (seeingBad === undefined) { await bisectService.reset(); hostService.reload(); return; } const done = await bisectService.next(seeingBad); if (!done) { hostService.reload(); return; } if (done.bad) { // DONE but nothing found await dialogService.show(Severity.Info, localize('done.msg', "Extension Bisect"), undefined, { detail: localize('done.detail2', "Extension Bisect is done but no extension has been identified. This might be a problem with {0}.", productService.nameShort) }); } else { // DONE and identified extension const res = await dialogService.show(Severity.Info, localize('done.msg', "Extension Bisect"), [localize('report', "Report Issue & Continue"), localize('done', "Continue")], { detail: localize('done.detail', "Extension Bisect is done and has identified {0} as the extension causing the problem.", done.id), checkbox: { label: localize('done.disbale', "Keep this extension disabled"), checked: true }, cancelId: 1 } ); if (res.checkboxChecked) { await extensionEnablementService.disableExtension({ id: done.id }, undefined); } if (res.choice === 0) { await issueService.openReporter({ extensionId: done.id }); } } await bisectService.reset(); hostService.reload(); } private async _checkForBad(dialogService: IDialogService, bisectService: IExtensionBisectService): Promise<boolean | undefined | null> { const options = { cancelId: 3, detail: localize('bisect', "Extension Bisect is active and has disabled {0} extensions. Check if you can still reproduce the problem and proceed by selecting from these options.", bisectService.disabledCount), }; const res = await dialogService.show( Severity.Info, localize('msg.next', "Extension Bisect"), [localize('next.good', "Good now"), localize('next.bad', "This is bad"), localize('next.stop', "Stop Bisect"), localize('next.cancel', "Cancel")], options ); switch (res.choice) { case 0: return false; //good now case 1: return true; //bad case 2: return undefined; //stop } return null; //cancel } }); registerAction2(class extends Action2 { constructor() { super({ id: 'extension.bisect.stop', title: { value: localize('title.stop', "Stop Extension Bisect"), original: 'Stop Extension Bisect' }, category: localize('help', "Help"), f1: true, precondition: ExtensionBisectUi.ctxIsBisectActive }); } async run(accessor: ServicesAccessor): Promise<void> { const extensionsBisect = accessor.get(IExtensionBisectService); const hostService = accessor.get(IHostService); await extensionsBisect.reset(); hostService.reload(); } });
the_stack
import { HttpResponse, HttpEvent } from '@angular/common/http'; import { Observable } from 'rxjs';import { HttpOptions } from '../../types'; import * as models from '../../models'; export interface OrgsAPIClientInterface { /** * Arguments object for method `getOrgsOrg`. */ getOrgsOrgParams?: { /** Name of organisation. */ org: string, /** * You can check the current version of media type in responses. * */ xGitHubMediaType?: string, /** Is used to set specified media type. */ accept?: string, xRateLimit?: number, xRateLimitRemaining?: number, xRateLimitReset?: number, xGitHubRequestId?: number, }; /** * Get an Organization. * Response generated for [ 200 ] HTTP response code. */ getOrgsOrg( args: Exclude<OrgsAPIClientInterface['getOrgsOrgParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Organization>; getOrgsOrg( args: Exclude<OrgsAPIClientInterface['getOrgsOrgParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Organization>>; getOrgsOrg( args: Exclude<OrgsAPIClientInterface['getOrgsOrgParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Organization>>; /** * Arguments object for method `patchOrgsOrg`. */ patchOrgsOrgParams?: { /** Name of organisation. */ org: string, /** * You can check the current version of media type in responses. * */ xGitHubMediaType?: string, /** Is used to set specified media type. */ accept?: string, xRateLimit?: number, xRateLimitRemaining?: number, xRateLimitReset?: number, xGitHubRequestId?: number, body: models.PatchOrg, }; /** * Edit an Organization. * Response generated for [ 200 ] HTTP response code. */ patchOrgsOrg( args: Exclude<OrgsAPIClientInterface['patchOrgsOrgParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Organization>; patchOrgsOrg( args: Exclude<OrgsAPIClientInterface['patchOrgsOrgParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Organization>>; patchOrgsOrg( args: Exclude<OrgsAPIClientInterface['patchOrgsOrgParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Organization>>; /** * Arguments object for method `getOrgsOrgEvents`. */ getOrgsOrgEventsParams?: { /** Name of organisation. */ org: string, /** * You can check the current version of media type in responses. * */ xGitHubMediaType?: string, /** Is used to set specified media type. */ accept?: string, xRateLimit?: number, xRateLimitRemaining?: number, xRateLimitReset?: number, xGitHubRequestId?: number, }; /** * List public events for an organization. * Response generated for [ 200 ] HTTP response code. */ getOrgsOrgEvents( args: Exclude<OrgsAPIClientInterface['getOrgsOrgEventsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Events>; getOrgsOrgEvents( args: Exclude<OrgsAPIClientInterface['getOrgsOrgEventsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Events>>; getOrgsOrgEvents( args: Exclude<OrgsAPIClientInterface['getOrgsOrgEventsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Events>>; /** * Arguments object for method `getOrgsOrgIssues`. */ getOrgsOrgIssuesParams?: { /** Name of organisation. */ org: string, /** * Issues assigned to you / created by you / mentioning you / you're * subscribed to updates for / All issues the authenticated user can see * * If not set, server will use the default value: all */ filter: ('assigned' | 'created' | 'mentioned' | 'subscribed' | 'all'), /** If not set, server will use the default value: open */ state: ('open' | 'closed'), /** String list of comma separated Label names. Example - bug,ui,@high. */ labels: string, /** If not set, server will use the default value: created */ sort: ('created' | 'updated' | 'comments'), /** If not set, server will use the default value: desc */ direction: ('asc' | 'desc'), /** * Optional string of a timestamp in ISO 8601 format: YYYY-MM-DDTHH:MM:SSZ. * Only issues updated at or after this time are returned. * */ since?: string, /** * You can check the current version of media type in responses. * */ xGitHubMediaType?: string, /** Is used to set specified media type. */ accept?: string, xRateLimit?: number, xRateLimitRemaining?: number, xRateLimitReset?: number, xGitHubRequestId?: number, }; /** * List issues. * List all issues for a given organization for the authenticated user. * * Response generated for [ 200 ] HTTP response code. */ getOrgsOrgIssues( args: Exclude<OrgsAPIClientInterface['getOrgsOrgIssuesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Issues>; getOrgsOrgIssues( args: Exclude<OrgsAPIClientInterface['getOrgsOrgIssuesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Issues>>; getOrgsOrgIssues( args: Exclude<OrgsAPIClientInterface['getOrgsOrgIssuesParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Issues>>; /** * Arguments object for method `getOrgsOrgMembers`. */ getOrgsOrgMembersParams?: { /** Name of organisation. */ org: string, /** * You can check the current version of media type in responses. * */ xGitHubMediaType?: string, /** Is used to set specified media type. */ accept?: string, xRateLimit?: number, xRateLimitRemaining?: number, xRateLimitReset?: number, xGitHubRequestId?: number, }; /** * Members list. * List all users who are members of an organization. A member is a user tha * belongs to at least 1 team in the organization. If the authenticated user * is also an owner of this organization then both concealed and public members * will be returned. If the requester is not an owner of the organization the * query will be redirected to the public members list. * * Response generated for [ 200 ] HTTP response code. */ getOrgsOrgMembers( args: Exclude<OrgsAPIClientInterface['getOrgsOrgMembersParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Users>; getOrgsOrgMembers( args: Exclude<OrgsAPIClientInterface['getOrgsOrgMembersParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Users>>; getOrgsOrgMembers( args: Exclude<OrgsAPIClientInterface['getOrgsOrgMembersParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Users>>; /** * Arguments object for method `deleteOrgsOrgMembersUsername`. */ deleteOrgsOrgMembersUsernameParams?: { /** Name of organisation. */ org: string, /** Name of the user. */ username: string, /** * You can check the current version of media type in responses. * */ xGitHubMediaType?: string, /** Is used to set specified media type. */ accept?: string, xRateLimit?: number, xRateLimitRemaining?: number, xRateLimitReset?: number, xGitHubRequestId?: number, }; /** * Remove a member. * Removing a user from this list will remove them from all teams and they * will no longer have any access to the organization's repositories. * * Response generated for [ 204 ] HTTP response code. */ deleteOrgsOrgMembersUsername( args: Exclude<OrgsAPIClientInterface['deleteOrgsOrgMembersUsernameParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<void>; deleteOrgsOrgMembersUsername( args: Exclude<OrgsAPIClientInterface['deleteOrgsOrgMembersUsernameParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<void>>; deleteOrgsOrgMembersUsername( args: Exclude<OrgsAPIClientInterface['deleteOrgsOrgMembersUsernameParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<void>>; /** * Arguments object for method `getOrgsOrgMembersUsername`. */ getOrgsOrgMembersUsernameParams?: { /** Name of organisation. */ org: string, /** Name of the user. */ username: string, /** * You can check the current version of media type in responses. * */ xGitHubMediaType?: string, /** Is used to set specified media type. */ accept?: string, xRateLimit?: number, xRateLimitRemaining?: number, xRateLimitReset?: number, xGitHubRequestId?: number, }; /** * Check if a user is, publicly or privately, a member of the organization. * Response generated for [ 204 ] HTTP response code. */ getOrgsOrgMembersUsername( args: Exclude<OrgsAPIClientInterface['getOrgsOrgMembersUsernameParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<void>; getOrgsOrgMembersUsername( args: Exclude<OrgsAPIClientInterface['getOrgsOrgMembersUsernameParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<void>>; getOrgsOrgMembersUsername( args: Exclude<OrgsAPIClientInterface['getOrgsOrgMembersUsernameParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<void>>; /** * Arguments object for method `getOrgsOrgPublicMembers`. */ getOrgsOrgPublicMembersParams?: { /** Name of organisation. */ org: string, /** * You can check the current version of media type in responses. * */ xGitHubMediaType?: string, /** Is used to set specified media type. */ accept?: string, xRateLimit?: number, xRateLimitRemaining?: number, xRateLimitReset?: number, xGitHubRequestId?: number, }; /** * Public members list. * Members of an organization can choose to have their membership publicized * or not. * * Response generated for [ 200 ] HTTP response code. */ getOrgsOrgPublicMembers( args: Exclude<OrgsAPIClientInterface['getOrgsOrgPublicMembersParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Users>; getOrgsOrgPublicMembers( args: Exclude<OrgsAPIClientInterface['getOrgsOrgPublicMembersParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Users>>; getOrgsOrgPublicMembers( args: Exclude<OrgsAPIClientInterface['getOrgsOrgPublicMembersParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Users>>; /** * Arguments object for method `deleteOrgsOrgPublicMembersUsername`. */ deleteOrgsOrgPublicMembersUsernameParams?: { /** Name of organisation. */ org: string, /** Name of the user. */ username: string, /** * You can check the current version of media type in responses. * */ xGitHubMediaType?: string, /** Is used to set specified media type. */ accept?: string, xRateLimit?: number, xRateLimitRemaining?: number, xRateLimitReset?: number, xGitHubRequestId?: number, }; /** * Conceal a user's membership. * Response generated for [ 204 ] HTTP response code. */ deleteOrgsOrgPublicMembersUsername( args: Exclude<OrgsAPIClientInterface['deleteOrgsOrgPublicMembersUsernameParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<void>; deleteOrgsOrgPublicMembersUsername( args: Exclude<OrgsAPIClientInterface['deleteOrgsOrgPublicMembersUsernameParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<void>>; deleteOrgsOrgPublicMembersUsername( args: Exclude<OrgsAPIClientInterface['deleteOrgsOrgPublicMembersUsernameParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<void>>; /** * Arguments object for method `getOrgsOrgPublicMembersUsername`. */ getOrgsOrgPublicMembersUsernameParams?: { /** Name of organisation. */ org: string, /** Name of the user. */ username: string, /** * You can check the current version of media type in responses. * */ xGitHubMediaType?: string, /** Is used to set specified media type. */ accept?: string, xRateLimit?: number, xRateLimitRemaining?: number, xRateLimitReset?: number, xGitHubRequestId?: number, }; /** * Check public membership. * Response generated for [ 204 ] HTTP response code. */ getOrgsOrgPublicMembersUsername( args: Exclude<OrgsAPIClientInterface['getOrgsOrgPublicMembersUsernameParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<void>; getOrgsOrgPublicMembersUsername( args: Exclude<OrgsAPIClientInterface['getOrgsOrgPublicMembersUsernameParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<void>>; getOrgsOrgPublicMembersUsername( args: Exclude<OrgsAPIClientInterface['getOrgsOrgPublicMembersUsernameParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<void>>; /** * Arguments object for method `putOrgsOrgPublicMembersUsername`. */ putOrgsOrgPublicMembersUsernameParams?: { /** Name of organisation. */ org: string, /** Name of the user. */ username: string, /** * You can check the current version of media type in responses. * */ xGitHubMediaType?: string, /** Is used to set specified media type. */ accept?: string, xRateLimit?: number, xRateLimitRemaining?: number, xRateLimitReset?: number, xGitHubRequestId?: number, }; /** * Publicize a user's membership. * Response generated for [ 204 ] HTTP response code. */ putOrgsOrgPublicMembersUsername( args: Exclude<OrgsAPIClientInterface['putOrgsOrgPublicMembersUsernameParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<void>; putOrgsOrgPublicMembersUsername( args: Exclude<OrgsAPIClientInterface['putOrgsOrgPublicMembersUsernameParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<void>>; putOrgsOrgPublicMembersUsername( args: Exclude<OrgsAPIClientInterface['putOrgsOrgPublicMembersUsernameParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<void>>; /** * Arguments object for method `getOrgsOrgRepos`. */ getOrgsOrgReposParams?: { /** Name of organisation. */ org: string, /** If not set, server will use the default value: all */ type?: ('all' | 'public' | 'private' | 'forks' | 'sources' | 'member'), /** * You can check the current version of media type in responses. * */ xGitHubMediaType?: string, /** Is used to set specified media type. */ accept?: string, xRateLimit?: number, xRateLimitRemaining?: number, xRateLimitReset?: number, xGitHubRequestId?: number, }; /** * List repositories for the specified org. * Response generated for [ 200 ] HTTP response code. */ getOrgsOrgRepos( args: Exclude<OrgsAPIClientInterface['getOrgsOrgReposParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Repos>; getOrgsOrgRepos( args: Exclude<OrgsAPIClientInterface['getOrgsOrgReposParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Repos>>; getOrgsOrgRepos( args: Exclude<OrgsAPIClientInterface['getOrgsOrgReposParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Repos>>; /** * Arguments object for method `postOrgsOrgRepos`. */ postOrgsOrgReposParams?: { /** Name of organisation. */ org: string, /** * You can check the current version of media type in responses. * */ xGitHubMediaType?: string, /** Is used to set specified media type. */ accept?: string, xRateLimit?: number, xRateLimitRemaining?: number, xRateLimitReset?: number, xGitHubRequestId?: number, body: models.PostRepo, }; /** * Create a new repository for the authenticated user. OAuth users must supply * repo scope. * * Response generated for [ 201 ] HTTP response code. */ postOrgsOrgRepos( args: Exclude<OrgsAPIClientInterface['postOrgsOrgReposParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Repos>; postOrgsOrgRepos( args: Exclude<OrgsAPIClientInterface['postOrgsOrgReposParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Repos>>; postOrgsOrgRepos( args: Exclude<OrgsAPIClientInterface['postOrgsOrgReposParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Repos>>; /** * Arguments object for method `getOrgsOrgTeams`. */ getOrgsOrgTeamsParams?: { /** Name of organisation. */ org: string, /** * You can check the current version of media type in responses. * */ xGitHubMediaType?: string, /** Is used to set specified media type. */ accept?: string, xRateLimit?: number, xRateLimitRemaining?: number, xRateLimitReset?: number, xGitHubRequestId?: number, }; /** * List teams. * Response generated for [ 200 ] HTTP response code. */ getOrgsOrgTeams( args: Exclude<OrgsAPIClientInterface['getOrgsOrgTeamsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Teams>; getOrgsOrgTeams( args: Exclude<OrgsAPIClientInterface['getOrgsOrgTeamsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Teams>>; getOrgsOrgTeams( args: Exclude<OrgsAPIClientInterface['getOrgsOrgTeamsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Teams>>; /** * Arguments object for method `postOrgsOrgTeams`. */ postOrgsOrgTeamsParams?: { /** Name of organisation. */ org: string, /** * You can check the current version of media type in responses. * */ xGitHubMediaType?: string, /** Is used to set specified media type. */ accept?: string, xRateLimit?: number, xRateLimitRemaining?: number, xRateLimitReset?: number, xGitHubRequestId?: number, body: models.OrgTeamsPost, }; /** * Create team. * In order to create a team, the authenticated user must be an owner of organization. * * Response generated for [ 201 ] HTTP response code. */ postOrgsOrgTeams( args: Exclude<OrgsAPIClientInterface['postOrgsOrgTeamsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'body', ): Observable<models.Team>; postOrgsOrgTeams( args: Exclude<OrgsAPIClientInterface['postOrgsOrgTeamsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'response', ): Observable<HttpResponse<models.Team>>; postOrgsOrgTeams( args: Exclude<OrgsAPIClientInterface['postOrgsOrgTeamsParams'], undefined>, requestHttpOptions?: HttpOptions, observe?: 'events', ): Observable<HttpEvent<models.Team>>; }
the_stack
import React, { useCallback, useEffect, useMemo, useRef, useState, } from "react"; import { Editor } from "@dvargas92495/react-draft-wysiwyg"; import { EditorState, ContentState, ContentBlock, CharacterMetadata, genKey, } from "draft-js"; import { List } from "immutable"; import "@dvargas92495/react-draft-wysiwyg/dist/react-draft-wysiwyg.css"; import ReactDOM from "react-dom"; import { TextArea } from "@blueprintjs/core"; import { useDocumentKeyDown } from "./hooks"; import { getUids } from "roam-client"; import { fixCursorById } from "../entry-helpers"; const ToolbarWarning = () => ( <span style={{ margin: "0 8px" }}> Warning: Clicking Buttons will close WYSIWIG. Use Hot Keys Instead </span> ); type EditorType = { getEditorState: () => EditorState; } & Editor; const parseValue = ({ initialValue, initialStart, initialEnd, }: { initialValue: string; initialStart: number; initialEnd: number; }) => { const textData = Array.from(initialValue).map((c) => ({ c, styles: [] as string[], keep: true, })); const selection = { defaultSelectionStart: initialStart, defaultSelectionEnd: initialEnd, }; const deleteIndex = (n: number) => { textData[n].keep = false; if (initialStart > n) { selection.defaultSelectionStart--; } if (initialEnd > n) { selection.defaultSelectionEnd--; } }; const applyStyle = (matcher: string, style: string) => { const regex = new RegExp(matcher, "g"); let match; const indices = []; while ((match = regex.exec(initialValue))) { indices.push(match.index); } const groupedIndices = indices.slice(0, Math.floor(indices.length / 2) * 2); for (let pointer = 0; pointer < groupedIndices.length; pointer += 2) { const start = groupedIndices[pointer]; const end = groupedIndices[pointer + 1]; for (let td = start + 2; td < end; td++) { textData[td].styles.push(style); } [start, start + 1, end, end + 1].forEach(deleteIndex); } }; applyStyle("\\*\\*", "BOLD"); applyStyle("__", "ITALIC"); const filteredTextData = textData.filter((td) => td.keep); const text = filteredTextData.map((t) => t.c).join(""); const characterList = List( filteredTextData.map((t) => t.styles.reduce( (c, s) => CharacterMetadata.applyStyle(c, s), CharacterMetadata.create() ) ) ); return { ...selection, defaultEditorState: EditorState.createWithContent( ContentState.createFromBlockArray([ new ContentBlock({ characterList, type: "paragraph", text, key: genKey(), }), ]) ), }; }; const blockToString = ( contentBlock: ContentBlock, selection: { start: number; end: number; initialStart: number; initialEnd: number; pointer: number; } ) => { const text = contentBlock.getText(); const characterList = contentBlock.getCharacterList(); const length = contentBlock.getLength(); let markdown = ""; const addMarkdown = (text: string, i: number) => { if (selection.initialStart > i + selection.pointer) { selection.start += text.length; } if (selection.initialEnd > i + selection.pointer) { selection.end += text.length; } return text; }; let bolded = false; let italicized = false; for (let index = 0; index < length; index++) { // unfortunately order matters. __**okay**__ does not apply both styles const isBold = characterList.get(index).hasStyle("BOLD"); const isItalic = characterList.get(index).hasStyle("ITALIC"); if (!bolded && isBold) { markdown = `${markdown}${addMarkdown("**", index)}`; bolded = true; } if (!italicized && isItalic) { markdown = `${markdown}${addMarkdown("__", index)}`; italicized = true; } if (italicized && !isItalic) { markdown = `${markdown}${addMarkdown("__", index)}`; italicized = false; } if (bolded && !isBold) { markdown = `${markdown}${addMarkdown("**", index)}`; bolded = false; } markdown = `${markdown}${text.charAt(index)}`; } if (italicized) { markdown = `${markdown}${addMarkdown("__", length)}`; } if (bolded) { markdown = `${markdown}${addMarkdown("**", length)}`; } selection.pointer += length; return markdown; }; const WYSIWYGMode = ({ initialValue, initialStart, initialEnd, onUnmount, }: { initialValue: string; initialStart: number; initialEnd: number; onUnmount: ({ output, start, end, }: { output: string; start: number; end: number; }) => void; }): JSX.Element => { const editorRef = useRef<EditorType>(null); const outputOnUnmount = useCallback(() => { const editorState = editorRef.current.getEditorState(); const editorSelection = editorState.getSelection(); const editorBlocks = editorState.getCurrentContent().getBlocksAsArray(); const getOffset = (offset: number, key: string) => { let total = offset; for (let b = 0; b < editorBlocks.length; b++) { const thisBlock = editorBlocks[b]; if (thisBlock.getKey() !== key) { total += thisBlock.getLength(); } else { return total; } } return total; }; const initialStart = getOffset( editorSelection.getStartOffset(), editorSelection.getStartKey() ); const initialEnd = getOffset( editorSelection.getEndOffset(), editorSelection.getEndKey() ); const selection = { start: initialStart, end: initialEnd, pointer: 0, initialStart, initialEnd, }; const output = editorBlocks .map((b) => blockToString(b, selection)) .join("\n"); onUnmount({ output, start: selection.start, end: selection.end, }); }, [onUnmount, editorRef]); const eventListener = useCallback( (e: KeyboardEvent) => { if (e.code === "KeyW" && e.altKey) { outputOnUnmount(); e.stopImmediatePropagation(); e.preventDefault(); } }, [outputOnUnmount] ); useDocumentKeyDown(eventListener); useEffect(() => { if (editorRef.current) { editorRef.current.focusEditor(); } }, [editorRef]); const { defaultEditorState, defaultSelectionStart, defaultSelectionEnd, } = useMemo(() => parseValue({ initialValue, initialStart, initialEnd }), [ initialValue, initialStart, initialEnd, ]); return ( <> <style> {`.public-DraftStyleDefault-block { margin: 0; } .rdw-option-wrapper { cursor: not-allowed; } .rdw-option-wrapper.rdw-option-active:hover { box-shadow: 1px 1px 0px #BFBDBD inset; } .rdw-option-wrapper:hover { box-shadow: none; } `} </style> <Editor toolbar={{ options: [ "inline", /* "blockType", "textAlign", "link", "image"*/ ], inline: { options: [ "bold", "italic", // "monospace" ], }, blockType: { inDropdown: false, options: ["Normal", "H1", "H2", "H3", "Code"], }, textAlign: { options: ["left", "center", "right"], }, }} toolbarCustomButtons={[<ToolbarWarning />]} editorClassName={ "roam-block dont-unfocus-block hoverparent rm-block-text" } wrapperStyle={{ display: "flex", flexDirection: "column-reverse", }} ref={(ref) => (editorRef.current = ref as EditorType)} defaultEditorState={defaultEditorState} defaultSelectionStart={defaultSelectionStart} defaultSelectionEnd={defaultSelectionEnd} onBlur={outputOnUnmount} /> </> ); }; export const renderWYSIWYGMode = ( b: HTMLElement, textarea: HTMLTextAreaElement, onUnmount: () => void ): void => ReactDOM.render( <WYSIWYGMode initialValue={textarea.value} initialStart={textarea.selectionStart} initialEnd={textarea.selectionEnd} onUnmount={async ({ output, start, end }) => { ReactDOM.unmountComponentAtNode(b); b.parentElement.removeChild(b); onUnmount(); const { blockUid } = getUids(textarea); window.roamAlphaAPI.updateBlock({ block: { string: output, uid: blockUid }, }); fixCursorById({ id: textarea.id, start, end, focus: true }); }} />, b ); const DemoTextArea = React.forwardRef< HTMLTextAreaElement, { value: string; setValue: (v: string) => void; wysiwyg: () => void; } >(({ wysiwyg, value, setValue }, ref) => { const eventListener = useCallback( (e: KeyboardEvent) => { if (e.code === "KeyW" && e.altKey) { wysiwyg(); e.stopImmediatePropagation(); e.preventDefault(); } }, [wysiwyg] ); useDocumentKeyDown(eventListener); return ( <TextArea growVertically={true} value={value} onChange={(e) => setValue(e.target.value)} id={"blockId"} style={{ width: "100%", resize: "none" }} inputRef={ref} /> ); }); export const DemoWYSIWYGMode = (): JSX.Element => { const [isBlock, setIsBlock] = useState(true); const [isOutputting, setIsOutputting] = useState(false); const [isSelecting, setIsSelecting] = useState(false); const [selection, setSelection] = useState({ start: 0, end: 0 }); const [value, setValue] = useState(""); const [initialValue, setInitialValue] = useState(""); const wysiwyg = useCallback(() => { setInitialValue(value); setIsBlock(false); }, [setIsBlock, setInitialValue, value]); const textareaRef = useRef<HTMLTextAreaElement>(null); useEffect(() => { if (isOutputting) { textareaRef.current.focus(); setIsOutputting(false); setIsSelecting(true); textareaRef.current.setSelectionRange(selection.start, selection.end); } }, [setIsOutputting, textareaRef, isOutputting, setIsSelecting]); useEffect(() => { if (isSelecting) { setIsSelecting(false); textareaRef.current.setSelectionRange(selection.start, selection.end); } }, [isSelecting, textareaRef, selection, setIsSelecting]); const onUnmount = useCallback( ({ output, start, end }) => { setIsBlock(true); setValue(output); setIsOutputting(true); setSelection({ start, end }); }, [setIsOutputting, setValue, setIsBlock] ); return ( <div style={{ border: "1px solid black" }}> {isBlock ? ( <DemoTextArea wysiwyg={wysiwyg} ref={textareaRef} value={value} setValue={setValue} /> ) : ( <WYSIWYGMode initialValue={initialValue} initialStart={textareaRef.current.selectionStart} initialEnd={textareaRef.current.selectionEnd} onUnmount={onUnmount} /> )} </div> ); }; export default WYSIWYGMode;
the_stack
import { addNamed } from '@babel/helper-module-imports'; import autoprefixer from 'autoprefixer'; import CleanCSS from 'clean-css'; import { writeFileSync } from 'fs'; import { join } from 'path'; import postcss from 'postcss'; import { transform as transformClassesToTypes } from '../config/transform-classes-to-types'; import { transform as transformConfigToClasses } from '../config/transform-config-to-classes'; import { IClassesByType, IExtractedClass, IExtractedClasses } from '../types'; import { createClassObjects, createProductionClassObjects, createProductionCss, evaluateConfig, getUserConfig, injectDevelopment, injectProduction, } from '../utils'; const config = evaluateConfig(getUserConfig()); const classes = transformConfigToClasses(config); if (process.env.NODE_ENV !== 'test') { try { const rootPath = join(process.cwd(), 'classy-ui.d.ts'); const libPath = join(__dirname, '..', '..', 'lib', 'classy-ui.d.ts'); const types = transformClassesToTypes(config); writeFileSync( rootPath, ` declare module 'classy-ui' {\n${types}\n} declare module 'classy-ui/macro' { export * from 'classy-ui' } `, ); writeFileSync(libPath, types); } catch { // Codesandbox or some other unwritable environment } } export default (babel: any) => { const { types: t } = babel; return { name: 'classy-ui/plugin', visitor: { Program(programmPath: any, state: any) { programmPath.traverse({ ImportDeclaration(path: any) { if (path?.node?.source?.value === 'classy-ui') { const imports = path .get('specifiers') .filter((s: any) => { if (!t.isImportSpecifier(s.node)) { throw s.buildCodeFrameError(`This style of importing isn't allowed.`); } return true; }) .map((s: any) => ({ local: s.node.local.name, name: s.node.imported.name })); const referencePaths = imports.reduce((aggr: any, { local, name }: { local: string; name: string }) => { const binding = path.scope.getBinding(local); if (binding && Boolean(binding.referencePaths.length)) { aggr[name] = binding.referencePaths; } return aggr; }, {}); processReferences(babel, state, referencePaths); path.remove(); } }, }); }, }, }; }; let hasRegisteredExitHook = false; export const productionClassesByType: IClassesByType = { screens: {}, common: {}, themeTokens: {}, rootTokens: {}, }; /* This can be improved by rather structuring classnames as unique with tokens and decorators underneath. So that A__A (color__RED) and B__A (background-color__RED) can live together */ export const evaluatedProductionShortnames = { classnames: [] as string[], tokens: [] as string[], decorators: [] as string[], }; export function processReferences(babel: any, state: any, refs: any) { const { types: t } = babel; const filePath = state.file.opts.parserOpts.sourceFileName; const isProduction = babel.getEnv() === 'production'; const classCollection: IExtractedClasses = {}; if (refs.tokens) { processTokens(refs.tokens, isProduction); } if (refs.group) { processGroup(refs.group); } if (refs.themes) { processThemes(refs.themes); } Object.keys(config.screens).forEach(screenCompose => { if (refs[screenCompose]) { // Process as compose but don't allow variables here processCompose(refs[screenCompose], false); } }); if (refs.compose) { processCompose(refs.compose); } // We require access to the babel options, so have to do it here if (isProduction && !hasRegisteredExitHook) { hasRegisteredExitHook = true; process.on('exit', () => { writeFileSync( join(process.cwd(), state.opts.output || 'build', 'classy-ui.css'), new CleanCSS().minify(postcss([autoprefixer]).process(createProductionCss(productionClassesByType, config)).css) .styles, ); }); } if (isProduction && filePath) { injectProduction(productionClassesByType, classCollection, classes, config); } else { const runtimeCall = t.expressionStatement( t.callExpression(addNamed(state.file.path, 'addClasses', 'classy-ui/runtime'), [ t.arrayExpression( injectDevelopment(classCollection, classes, config).map(value => typeof value === 'string' ? t.stringLiteral(value) : t.numericLiteral(value), ), ), ]), ); state.file.ast.program.body.push(runtimeCall); } function processCompose(cRefs: any[], allowDynamicValuesInExpression = true) { cRefs.forEach((path: any) => { if (t.isCallExpression(path.parentPath.parent)) { const b = path.scope.getBinding(path.parent.callee.name); if ( t.isImportSpecifier(b.path.node) && b.path.parent.source.value.startsWith('classy-ui') && b.identifier.name === 'compose' ) { throw path.buildCodeFrameError(`CLASSY-UI: don't nest c/compose calls`); } } const statementPath = path.parentPath; const args = statementPath.get('arguments'); statementPath.replaceWith(convertToExpression(args, allowDynamicValuesInExpression)); }); } function processTokens(tRefs: any[], isProductionProcess: boolean) { tRefs.forEach((tRef: any) => { if (!t.isMemberExpression(tRef.parent)) { throw tRef.buildCodeFrameError(`CLASSY-UI: t/tokens can't be used without a base class`); } const callExpr = tRef.findParent((p: any) => t.isCallExpression(p)); if (!callExpr) { throw tRef.buildCodeFrameError(`CLASSY-UI: t/tokens must be used inside a compose/screen function`); } const composition = callExpr.node.callee.name as string; const memExpr = extractMemberExpression(tRef); if (memExpr.arr.length >= 2) { try { const [baseClass, token, ...decorators] = memExpr.arr; const classObjects = isProductionProcess ? createProductionClassObjects( { composition, baseClass, token, decorators }, classes, evaluatedProductionShortnames, ) : createClassObjects({ composition, baseClass, token, decorators }, classes); classObjects.forEach(collectGlobally); memExpr.root.replaceWith(t.stringLiteral(`${classObjects.map(classObject => classObject.name).join(' ')} `)); } catch (e) { throw memExpr.root.buildCodeFrameError(`CLASSY-UI: ${e.message}`); } } else { throw tRef.buildCodeFrameError(`CLASSY-UI: t/tokens must reference a base class and a token`); } return tRef; }); } function processGroup(processRefs: any[]) { processRefs.forEach((ref: any) => { if (!t.isCallExpression(ref.parent)) { throw ref.buildCodeFrameError(`CLASSY-UI: group must be used inside c/compose`); } if (ref.parent.callee === ref.node) { throw ref.buildCodeFrameError(`CLASSY-UI: group should not be invoked`); } ref.replaceWith(t.stringLiteral(`${ref.node.name} `)); }); } function processThemes(processRefs: any[]) { processRefs.forEach((ref: any) => { if (t.isMemberExpression(ref.parent)) { const memberExpr = extractMemberExpression(ref); memberExpr.root.replaceWith(t.stringLiteral(`${ref.node.name}-${memberExpr.arr.join('-')} `)); } else { throw ref.buildCodeFrameError(`CLASSY-UI: add the theme name here like themes.dark`); } }); } function convertToExpression(classAttribs: any[], allowDynamicValuesInExpression = true) { if (classAttribs.length === 0) { return t.stringLiteral(' '); } let needsRuntime = false; const strings: string[] = []; const others: any[] = []; for (const itemPath of classAttribs) { if (t.isStringLiteral(itemPath.node)) { strings.push(itemPath.node.value); } else if (allowDynamicValuesInExpression) { needsRuntime = true; if (t.isLogicalExpression(itemPath.node) && itemPath.node.operator === '&&') { others.push(t.conditionalExpression(itemPath.node.left, itemPath.node.right, t.stringLiteral(' '))); } else { others.push(itemPath.node); } } else { throw itemPath.buildCodeFrameError(`CLASSY-UI: using dynamic values isn't allowed here`); } } // if there are only string literals just return them. This is a _short path_ if (strings.length > 0 && others.length === 0) { return t.stringLiteral(strings.join('')); } let max: number; let start: number; if (strings.length === 0) { if (needsRuntime) { return t.callExpression(addNamed(state.file.path, 'fixSpecificity', 'classy-ui/runtime'), others); } else { max = others.length - 1; start = others[max]; for (let i = max - 1; i >= 0; i--) { start = t.binaryExpression('+', others[i], start); } return start; } } if (needsRuntime) { return t.callExpression(addNamed(state.file.path, 'fixSpecificity', 'classy-ui/runtime'), [ ...others, t.stringLiteral(strings.join('').trim()), ]); } max = others.length - 1; start = others[max]; for (let i = max - 1; i >= 0; i--) { start = t.binaryExpression('+', others[i], start); } return t.binaryExpression('+', start, t.stringLiteral(strings.join(''))); } function collectGlobally(classObj: IExtractedClass) { if (!classCollection[classObj.composition]) { classCollection[classObj.composition] = {}; } classCollection[classObj.composition][classObj.id] = classObj; } function extractMemberExpression(tRefPath: any) { let prev = tRefPath; let path = prev.parentPath; const arr = []; while (path.node.type === 'MemberExpression') { if (path.node.property) { arr.push(path.node.property.name); } prev = path; path = path.parentPath; } return { root: prev, arr, }; } }
the_stack
import {Request} from '../lib/request'; import {Response} from '../lib/response'; import {AWSError} from '../lib/error'; import {Service} from '../lib/service'; import {ServiceConfigurationOptions} from '../lib/service'; import {ConfigBase as Config} from '../lib/config-base'; interface Blob {} declare class Athena extends Service { /** * Constructs a service object. This object has one method for each API operation. */ constructor(options?: Athena.Types.ClientConfiguration) config: Config & Athena.Types.ClientConfiguration; /** * Returns the details of a single named query or a list of up to 50 queries, which you provide as an array of query ID strings. Requires you to have access to the workgroup in which the queries were saved. Use ListNamedQueriesInput to get the list of named query IDs in the specified workgroup. If information could not be retrieved for a submitted query ID, information about the query ID submitted is listed under UnprocessedNamedQueryId. Named queries differ from executed queries. Use BatchGetQueryExecutionInput to get details about each unique query execution, and ListQueryExecutionsInput to get a list of query execution IDs. */ batchGetNamedQuery(params: Athena.Types.BatchGetNamedQueryInput, callback?: (err: AWSError, data: Athena.Types.BatchGetNamedQueryOutput) => void): Request<Athena.Types.BatchGetNamedQueryOutput, AWSError>; /** * Returns the details of a single named query or a list of up to 50 queries, which you provide as an array of query ID strings. Requires you to have access to the workgroup in which the queries were saved. Use ListNamedQueriesInput to get the list of named query IDs in the specified workgroup. If information could not be retrieved for a submitted query ID, information about the query ID submitted is listed under UnprocessedNamedQueryId. Named queries differ from executed queries. Use BatchGetQueryExecutionInput to get details about each unique query execution, and ListQueryExecutionsInput to get a list of query execution IDs. */ batchGetNamedQuery(callback?: (err: AWSError, data: Athena.Types.BatchGetNamedQueryOutput) => void): Request<Athena.Types.BatchGetNamedQueryOutput, AWSError>; /** * Returns the details of a single query execution or a list of up to 50 query executions, which you provide as an array of query execution ID strings. Requires you to have access to the workgroup in which the queries ran. To get a list of query execution IDs, use ListQueryExecutionsInput$WorkGroup. Query executions differ from named (saved) queries. Use BatchGetNamedQueryInput to get details about named queries. */ batchGetQueryExecution(params: Athena.Types.BatchGetQueryExecutionInput, callback?: (err: AWSError, data: Athena.Types.BatchGetQueryExecutionOutput) => void): Request<Athena.Types.BatchGetQueryExecutionOutput, AWSError>; /** * Returns the details of a single query execution or a list of up to 50 query executions, which you provide as an array of query execution ID strings. Requires you to have access to the workgroup in which the queries ran. To get a list of query execution IDs, use ListQueryExecutionsInput$WorkGroup. Query executions differ from named (saved) queries. Use BatchGetNamedQueryInput to get details about named queries. */ batchGetQueryExecution(callback?: (err: AWSError, data: Athena.Types.BatchGetQueryExecutionOutput) => void): Request<Athena.Types.BatchGetQueryExecutionOutput, AWSError>; /** * Creates (registers) a data catalog with the specified name and properties. Catalogs created are visible to all users of the same AWS account. */ createDataCatalog(params: Athena.Types.CreateDataCatalogInput, callback?: (err: AWSError, data: Athena.Types.CreateDataCatalogOutput) => void): Request<Athena.Types.CreateDataCatalogOutput, AWSError>; /** * Creates (registers) a data catalog with the specified name and properties. Catalogs created are visible to all users of the same AWS account. */ createDataCatalog(callback?: (err: AWSError, data: Athena.Types.CreateDataCatalogOutput) => void): Request<Athena.Types.CreateDataCatalogOutput, AWSError>; /** * Creates a named query in the specified workgroup. Requires that you have access to the workgroup. For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide. */ createNamedQuery(params: Athena.Types.CreateNamedQueryInput, callback?: (err: AWSError, data: Athena.Types.CreateNamedQueryOutput) => void): Request<Athena.Types.CreateNamedQueryOutput, AWSError>; /** * Creates a named query in the specified workgroup. Requires that you have access to the workgroup. For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide. */ createNamedQuery(callback?: (err: AWSError, data: Athena.Types.CreateNamedQueryOutput) => void): Request<Athena.Types.CreateNamedQueryOutput, AWSError>; /** * Creates a workgroup with the specified name. */ createWorkGroup(params: Athena.Types.CreateWorkGroupInput, callback?: (err: AWSError, data: Athena.Types.CreateWorkGroupOutput) => void): Request<Athena.Types.CreateWorkGroupOutput, AWSError>; /** * Creates a workgroup with the specified name. */ createWorkGroup(callback?: (err: AWSError, data: Athena.Types.CreateWorkGroupOutput) => void): Request<Athena.Types.CreateWorkGroupOutput, AWSError>; /** * Deletes a data catalog. */ deleteDataCatalog(params: Athena.Types.DeleteDataCatalogInput, callback?: (err: AWSError, data: Athena.Types.DeleteDataCatalogOutput) => void): Request<Athena.Types.DeleteDataCatalogOutput, AWSError>; /** * Deletes a data catalog. */ deleteDataCatalog(callback?: (err: AWSError, data: Athena.Types.DeleteDataCatalogOutput) => void): Request<Athena.Types.DeleteDataCatalogOutput, AWSError>; /** * Deletes the named query if you have access to the workgroup in which the query was saved. For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide. */ deleteNamedQuery(params: Athena.Types.DeleteNamedQueryInput, callback?: (err: AWSError, data: Athena.Types.DeleteNamedQueryOutput) => void): Request<Athena.Types.DeleteNamedQueryOutput, AWSError>; /** * Deletes the named query if you have access to the workgroup in which the query was saved. For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide. */ deleteNamedQuery(callback?: (err: AWSError, data: Athena.Types.DeleteNamedQueryOutput) => void): Request<Athena.Types.DeleteNamedQueryOutput, AWSError>; /** * Deletes the workgroup with the specified name. The primary workgroup cannot be deleted. */ deleteWorkGroup(params: Athena.Types.DeleteWorkGroupInput, callback?: (err: AWSError, data: Athena.Types.DeleteWorkGroupOutput) => void): Request<Athena.Types.DeleteWorkGroupOutput, AWSError>; /** * Deletes the workgroup with the specified name. The primary workgroup cannot be deleted. */ deleteWorkGroup(callback?: (err: AWSError, data: Athena.Types.DeleteWorkGroupOutput) => void): Request<Athena.Types.DeleteWorkGroupOutput, AWSError>; /** * Returns the specified data catalog. */ getDataCatalog(params: Athena.Types.GetDataCatalogInput, callback?: (err: AWSError, data: Athena.Types.GetDataCatalogOutput) => void): Request<Athena.Types.GetDataCatalogOutput, AWSError>; /** * Returns the specified data catalog. */ getDataCatalog(callback?: (err: AWSError, data: Athena.Types.GetDataCatalogOutput) => void): Request<Athena.Types.GetDataCatalogOutput, AWSError>; /** * Returns a database object for the specfied database and data catalog. */ getDatabase(params: Athena.Types.GetDatabaseInput, callback?: (err: AWSError, data: Athena.Types.GetDatabaseOutput) => void): Request<Athena.Types.GetDatabaseOutput, AWSError>; /** * Returns a database object for the specfied database and data catalog. */ getDatabase(callback?: (err: AWSError, data: Athena.Types.GetDatabaseOutput) => void): Request<Athena.Types.GetDatabaseOutput, AWSError>; /** * Returns information about a single query. Requires that you have access to the workgroup in which the query was saved. */ getNamedQuery(params: Athena.Types.GetNamedQueryInput, callback?: (err: AWSError, data: Athena.Types.GetNamedQueryOutput) => void): Request<Athena.Types.GetNamedQueryOutput, AWSError>; /** * Returns information about a single query. Requires that you have access to the workgroup in which the query was saved. */ getNamedQuery(callback?: (err: AWSError, data: Athena.Types.GetNamedQueryOutput) => void): Request<Athena.Types.GetNamedQueryOutput, AWSError>; /** * Returns information about a single execution of a query if you have access to the workgroup in which the query ran. Each time a query executes, information about the query execution is saved with a unique ID. */ getQueryExecution(params: Athena.Types.GetQueryExecutionInput, callback?: (err: AWSError, data: Athena.Types.GetQueryExecutionOutput) => void): Request<Athena.Types.GetQueryExecutionOutput, AWSError>; /** * Returns information about a single execution of a query if you have access to the workgroup in which the query ran. Each time a query executes, information about the query execution is saved with a unique ID. */ getQueryExecution(callback?: (err: AWSError, data: Athena.Types.GetQueryExecutionOutput) => void): Request<Athena.Types.GetQueryExecutionOutput, AWSError>; /** * Streams the results of a single query execution specified by QueryExecutionId from the Athena query results location in Amazon S3. For more information, see Query Results in the Amazon Athena User Guide. This request does not execute the query but returns results. Use StartQueryExecution to run a query. To stream query results successfully, the IAM principal with permission to call GetQueryResults also must have permissions to the Amazon S3 GetObject action for the Athena query results location. IAM principals with permission to the Amazon S3 GetObject action for the query results location are able to retrieve query results from Amazon S3 even if permission to the GetQueryResults action is denied. To restrict user or role access, ensure that Amazon S3 permissions to the Athena query location are denied. */ getQueryResults(params: Athena.Types.GetQueryResultsInput, callback?: (err: AWSError, data: Athena.Types.GetQueryResultsOutput) => void): Request<Athena.Types.GetQueryResultsOutput, AWSError>; /** * Streams the results of a single query execution specified by QueryExecutionId from the Athena query results location in Amazon S3. For more information, see Query Results in the Amazon Athena User Guide. This request does not execute the query but returns results. Use StartQueryExecution to run a query. To stream query results successfully, the IAM principal with permission to call GetQueryResults also must have permissions to the Amazon S3 GetObject action for the Athena query results location. IAM principals with permission to the Amazon S3 GetObject action for the query results location are able to retrieve query results from Amazon S3 even if permission to the GetQueryResults action is denied. To restrict user or role access, ensure that Amazon S3 permissions to the Athena query location are denied. */ getQueryResults(callback?: (err: AWSError, data: Athena.Types.GetQueryResultsOutput) => void): Request<Athena.Types.GetQueryResultsOutput, AWSError>; /** * Returns table metadata for the specified catalog, database, and table. */ getTableMetadata(params: Athena.Types.GetTableMetadataInput, callback?: (err: AWSError, data: Athena.Types.GetTableMetadataOutput) => void): Request<Athena.Types.GetTableMetadataOutput, AWSError>; /** * Returns table metadata for the specified catalog, database, and table. */ getTableMetadata(callback?: (err: AWSError, data: Athena.Types.GetTableMetadataOutput) => void): Request<Athena.Types.GetTableMetadataOutput, AWSError>; /** * Returns information about the workgroup with the specified name. */ getWorkGroup(params: Athena.Types.GetWorkGroupInput, callback?: (err: AWSError, data: Athena.Types.GetWorkGroupOutput) => void): Request<Athena.Types.GetWorkGroupOutput, AWSError>; /** * Returns information about the workgroup with the specified name. */ getWorkGroup(callback?: (err: AWSError, data: Athena.Types.GetWorkGroupOutput) => void): Request<Athena.Types.GetWorkGroupOutput, AWSError>; /** * Lists the data catalogs in the current AWS account. */ listDataCatalogs(params: Athena.Types.ListDataCatalogsInput, callback?: (err: AWSError, data: Athena.Types.ListDataCatalogsOutput) => void): Request<Athena.Types.ListDataCatalogsOutput, AWSError>; /** * Lists the data catalogs in the current AWS account. */ listDataCatalogs(callback?: (err: AWSError, data: Athena.Types.ListDataCatalogsOutput) => void): Request<Athena.Types.ListDataCatalogsOutput, AWSError>; /** * Lists the databases in the specified data catalog. */ listDatabases(params: Athena.Types.ListDatabasesInput, callback?: (err: AWSError, data: Athena.Types.ListDatabasesOutput) => void): Request<Athena.Types.ListDatabasesOutput, AWSError>; /** * Lists the databases in the specified data catalog. */ listDatabases(callback?: (err: AWSError, data: Athena.Types.ListDatabasesOutput) => void): Request<Athena.Types.ListDatabasesOutput, AWSError>; /** * Provides a list of available query IDs only for queries saved in the specified workgroup. Requires that you have access to the specified workgroup. If a workgroup is not specified, lists the saved queries for the primary workgroup. For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide. */ listNamedQueries(params: Athena.Types.ListNamedQueriesInput, callback?: (err: AWSError, data: Athena.Types.ListNamedQueriesOutput) => void): Request<Athena.Types.ListNamedQueriesOutput, AWSError>; /** * Provides a list of available query IDs only for queries saved in the specified workgroup. Requires that you have access to the specified workgroup. If a workgroup is not specified, lists the saved queries for the primary workgroup. For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide. */ listNamedQueries(callback?: (err: AWSError, data: Athena.Types.ListNamedQueriesOutput) => void): Request<Athena.Types.ListNamedQueriesOutput, AWSError>; /** * Provides a list of available query execution IDs for the queries in the specified workgroup. If a workgroup is not specified, returns a list of query execution IDs for the primary workgroup. Requires you to have access to the workgroup in which the queries ran. For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide. */ listQueryExecutions(params: Athena.Types.ListQueryExecutionsInput, callback?: (err: AWSError, data: Athena.Types.ListQueryExecutionsOutput) => void): Request<Athena.Types.ListQueryExecutionsOutput, AWSError>; /** * Provides a list of available query execution IDs for the queries in the specified workgroup. If a workgroup is not specified, returns a list of query execution IDs for the primary workgroup. Requires you to have access to the workgroup in which the queries ran. For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide. */ listQueryExecutions(callback?: (err: AWSError, data: Athena.Types.ListQueryExecutionsOutput) => void): Request<Athena.Types.ListQueryExecutionsOutput, AWSError>; /** * Lists the metadata for the tables in the specified data catalog database. */ listTableMetadata(params: Athena.Types.ListTableMetadataInput, callback?: (err: AWSError, data: Athena.Types.ListTableMetadataOutput) => void): Request<Athena.Types.ListTableMetadataOutput, AWSError>; /** * Lists the metadata for the tables in the specified data catalog database. */ listTableMetadata(callback?: (err: AWSError, data: Athena.Types.ListTableMetadataOutput) => void): Request<Athena.Types.ListTableMetadataOutput, AWSError>; /** * Lists the tags associated with an Athena workgroup or data catalog resource. */ listTagsForResource(params: Athena.Types.ListTagsForResourceInput, callback?: (err: AWSError, data: Athena.Types.ListTagsForResourceOutput) => void): Request<Athena.Types.ListTagsForResourceOutput, AWSError>; /** * Lists the tags associated with an Athena workgroup or data catalog resource. */ listTagsForResource(callback?: (err: AWSError, data: Athena.Types.ListTagsForResourceOutput) => void): Request<Athena.Types.ListTagsForResourceOutput, AWSError>; /** * Lists available workgroups for the account. */ listWorkGroups(params: Athena.Types.ListWorkGroupsInput, callback?: (err: AWSError, data: Athena.Types.ListWorkGroupsOutput) => void): Request<Athena.Types.ListWorkGroupsOutput, AWSError>; /** * Lists available workgroups for the account. */ listWorkGroups(callback?: (err: AWSError, data: Athena.Types.ListWorkGroupsOutput) => void): Request<Athena.Types.ListWorkGroupsOutput, AWSError>; /** * Runs the SQL query statements contained in the Query. Requires you to have access to the workgroup in which the query ran. Running queries against an external catalog requires GetDataCatalog permission to the catalog. For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide. */ startQueryExecution(params: Athena.Types.StartQueryExecutionInput, callback?: (err: AWSError, data: Athena.Types.StartQueryExecutionOutput) => void): Request<Athena.Types.StartQueryExecutionOutput, AWSError>; /** * Runs the SQL query statements contained in the Query. Requires you to have access to the workgroup in which the query ran. Running queries against an external catalog requires GetDataCatalog permission to the catalog. For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide. */ startQueryExecution(callback?: (err: AWSError, data: Athena.Types.StartQueryExecutionOutput) => void): Request<Athena.Types.StartQueryExecutionOutput, AWSError>; /** * Stops a query execution. Requires you to have access to the workgroup in which the query ran. For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide. */ stopQueryExecution(params: Athena.Types.StopQueryExecutionInput, callback?: (err: AWSError, data: Athena.Types.StopQueryExecutionOutput) => void): Request<Athena.Types.StopQueryExecutionOutput, AWSError>; /** * Stops a query execution. Requires you to have access to the workgroup in which the query ran. For code samples using the AWS SDK for Java, see Examples and Code Samples in the Amazon Athena User Guide. */ stopQueryExecution(callback?: (err: AWSError, data: Athena.Types.StopQueryExecutionOutput) => void): Request<Athena.Types.StopQueryExecutionOutput, AWSError>; /** * Adds one or more tags to an Athena resource. A tag is a label that you assign to a resource. In Athena, a resource can be a workgroup or data catalog. Each tag consists of a key and an optional value, both of which you define. For example, you can use tags to categorize Athena workgroups or data catalogs by purpose, owner, or environment. Use a consistent set of tag keys to make it easier to search and filter workgroups or data catalogs in your account. For best practices, see Tagging Best Practices. Tag keys can be from 1 to 128 UTF-8 Unicode characters, and tag values can be from 0 to 256 UTF-8 Unicode characters. Tags can use letters and numbers representable in UTF-8, and the following characters: + - = . _ : / @. Tag keys and values are case-sensitive. Tag keys must be unique per resource. If you specify more than one tag, separate them by commas. */ tagResource(params: Athena.Types.TagResourceInput, callback?: (err: AWSError, data: Athena.Types.TagResourceOutput) => void): Request<Athena.Types.TagResourceOutput, AWSError>; /** * Adds one or more tags to an Athena resource. A tag is a label that you assign to a resource. In Athena, a resource can be a workgroup or data catalog. Each tag consists of a key and an optional value, both of which you define. For example, you can use tags to categorize Athena workgroups or data catalogs by purpose, owner, or environment. Use a consistent set of tag keys to make it easier to search and filter workgroups or data catalogs in your account. For best practices, see Tagging Best Practices. Tag keys can be from 1 to 128 UTF-8 Unicode characters, and tag values can be from 0 to 256 UTF-8 Unicode characters. Tags can use letters and numbers representable in UTF-8, and the following characters: + - = . _ : / @. Tag keys and values are case-sensitive. Tag keys must be unique per resource. If you specify more than one tag, separate them by commas. */ tagResource(callback?: (err: AWSError, data: Athena.Types.TagResourceOutput) => void): Request<Athena.Types.TagResourceOutput, AWSError>; /** * Removes one or more tags from a data catalog or workgroup resource. */ untagResource(params: Athena.Types.UntagResourceInput, callback?: (err: AWSError, data: Athena.Types.UntagResourceOutput) => void): Request<Athena.Types.UntagResourceOutput, AWSError>; /** * Removes one or more tags from a data catalog or workgroup resource. */ untagResource(callback?: (err: AWSError, data: Athena.Types.UntagResourceOutput) => void): Request<Athena.Types.UntagResourceOutput, AWSError>; /** * Updates the data catalog that has the specified name. */ updateDataCatalog(params: Athena.Types.UpdateDataCatalogInput, callback?: (err: AWSError, data: Athena.Types.UpdateDataCatalogOutput) => void): Request<Athena.Types.UpdateDataCatalogOutput, AWSError>; /** * Updates the data catalog that has the specified name. */ updateDataCatalog(callback?: (err: AWSError, data: Athena.Types.UpdateDataCatalogOutput) => void): Request<Athena.Types.UpdateDataCatalogOutput, AWSError>; /** * Updates the workgroup with the specified name. The workgroup's name cannot be changed. */ updateWorkGroup(params: Athena.Types.UpdateWorkGroupInput, callback?: (err: AWSError, data: Athena.Types.UpdateWorkGroupOutput) => void): Request<Athena.Types.UpdateWorkGroupOutput, AWSError>; /** * Updates the workgroup with the specified name. The workgroup's name cannot be changed. */ updateWorkGroup(callback?: (err: AWSError, data: Athena.Types.UpdateWorkGroupOutput) => void): Request<Athena.Types.UpdateWorkGroupOutput, AWSError>; } declare namespace Athena { export type AmazonResourceName = string; export interface BatchGetNamedQueryInput { /** * An array of query IDs. */ NamedQueryIds: NamedQueryIdList; } export interface BatchGetNamedQueryOutput { /** * Information about the named query IDs submitted. */ NamedQueries?: NamedQueryList; /** * Information about provided query IDs. */ UnprocessedNamedQueryIds?: UnprocessedNamedQueryIdList; } export interface BatchGetQueryExecutionInput { /** * An array of query execution IDs. */ QueryExecutionIds: QueryExecutionIdList; } export interface BatchGetQueryExecutionOutput { /** * Information about a query execution. */ QueryExecutions?: QueryExecutionList; /** * Information about the query executions that failed to run. */ UnprocessedQueryExecutionIds?: UnprocessedQueryExecutionIdList; } export type Boolean = boolean; export type BoxedBoolean = boolean; export type BytesScannedCutoffValue = number; export type CatalogNameString = string; export interface Column { /** * The name of the column. */ Name: NameString; /** * The data type of the column. */ Type?: TypeString; /** * Optional information about the column. */ Comment?: CommentString; } export interface ColumnInfo { /** * The catalog to which the query results belong. */ CatalogName?: String; /** * The schema name (database name) to which the query results belong. */ SchemaName?: String; /** * The table name for the query results. */ TableName?: String; /** * The name of the column. */ Name: String; /** * A column label. */ Label?: String; /** * The data type of the column. */ Type: String; /** * For DECIMAL data types, specifies the total number of digits, up to 38. For performance reasons, we recommend up to 18 digits. */ Precision?: Integer; /** * For DECIMAL data types, specifies the total number of digits in the fractional part of the value. Defaults to 0. */ Scale?: Integer; /** * Indicates the column's nullable status. */ Nullable?: ColumnNullable; /** * Indicates whether values in the column are case-sensitive. */ CaseSensitive?: Boolean; } export type ColumnInfoList = ColumnInfo[]; export type ColumnList = Column[]; export type ColumnNullable = "NOT_NULL"|"NULLABLE"|"UNKNOWN"|string; export type CommentString = string; export interface CreateDataCatalogInput { /** * The name of the data catalog to create. The catalog name must be unique for the AWS account and can use a maximum of 128 alphanumeric, underscore, at sign, or hyphen characters. */ Name: CatalogNameString; /** * The type of data catalog to create: LAMBDA for a federated catalog, GLUE for AWS Glue Catalog, or HIVE for an external hive metastore. */ Type: DataCatalogType; /** * A description of the data catalog to be created. */ Description?: DescriptionString; /** * Specifies the Lambda function or functions to use for creating the data catalog. This is a mapping whose values depend on the catalog type. For the HIVE data catalog type, use the following syntax. The metadata-function parameter is required. The sdk-version parameter is optional and defaults to the currently supported version. metadata-function=lambda_arn, sdk-version=version_number For the LAMBDA data catalog type, use one of the following sets of required parameters, but not both. If you have one Lambda function that processes metadata and another for reading the actual data, use the following syntax. Both parameters are required. metadata-function=lambda_arn, record-function=lambda_arn If you have a composite Lambda function that processes both metadata and data, use the following syntax to specify your Lambda function. function=lambda_arn The GLUE type has no parameters. */ Parameters?: ParametersMap; /** * A list of comma separated tags to add to the data catalog that is created. */ Tags?: TagList; } export interface CreateDataCatalogOutput { } export interface CreateNamedQueryInput { /** * The query name. */ Name: NameString; /** * The query description. */ Description?: DescriptionString; /** * The database to which the query belongs. */ Database: DatabaseString; /** * The contents of the query with all query statements. */ QueryString: QueryString; /** * A unique case-sensitive string used to ensure the request to create the query is idempotent (executes only once). If another CreateNamedQuery request is received, the same response is returned and another query is not created. If a parameter has changed, for example, the QueryString, an error is returned. This token is listed as not required because AWS SDKs (for example the AWS SDK for Java) auto-generate the token for users. If you are not using the AWS SDK or the AWS CLI, you must provide this token or the action will fail. */ ClientRequestToken?: IdempotencyToken; /** * The name of the workgroup in which the named query is being created. */ WorkGroup?: WorkGroupName; } export interface CreateNamedQueryOutput { /** * The unique ID of the query. */ NamedQueryId?: NamedQueryId; } export interface CreateWorkGroupInput { /** * The workgroup name. */ Name: WorkGroupName; /** * The configuration for the workgroup, which includes the location in Amazon S3 where query results are stored, the encryption configuration, if any, used for encrypting query results, whether the Amazon CloudWatch Metrics are enabled for the workgroup, the limit for the amount of bytes scanned (cutoff) per query, if it is specified, and whether workgroup's settings (specified with EnforceWorkGroupConfiguration) in the WorkGroupConfiguration override client-side settings. See WorkGroupConfiguration$EnforceWorkGroupConfiguration. */ Configuration?: WorkGroupConfiguration; /** * The workgroup description. */ Description?: WorkGroupDescriptionString; /** * A list of comma separated tags to add to the workgroup that is created. */ Tags?: TagList; } export interface CreateWorkGroupOutput { } export interface DataCatalog { /** * The name of the data catalog. The catalog name must be unique for the AWS account and can use a maximum of 128 alphanumeric, underscore, at sign, or hyphen characters. */ Name: CatalogNameString; /** * An optional description of the data catalog. */ Description?: DescriptionString; /** * The type of data catalog: LAMBDA for a federated catalog, GLUE for AWS Glue Catalog, or HIVE for an external hive metastore. */ Type: DataCatalogType; /** * Specifies the Lambda function or functions to use for the data catalog. This is a mapping whose values depend on the catalog type. For the HIVE data catalog type, use the following syntax. The metadata-function parameter is required. The sdk-version parameter is optional and defaults to the currently supported version. metadata-function=lambda_arn, sdk-version=version_number For the LAMBDA data catalog type, use one of the following sets of required parameters, but not both. If you have one Lambda function that processes metadata and another for reading the actual data, use the following syntax. Both parameters are required. metadata-function=lambda_arn, record-function=lambda_arn If you have a composite Lambda function that processes both metadata and data, use the following syntax to specify your Lambda function. function=lambda_arn The GLUE type has no parameters. */ Parameters?: ParametersMap; } export interface DataCatalogSummary { /** * The name of the data catalog. */ CatalogName?: CatalogNameString; /** * The data catalog type. */ Type?: DataCatalogType; } export type DataCatalogSummaryList = DataCatalogSummary[]; export type DataCatalogType = "LAMBDA"|"GLUE"|"HIVE"|string; export interface Database { /** * The name of the database. */ Name: NameString; /** * An optional description of the database. */ Description?: DescriptionString; /** * A set of custom key/value pairs. */ Parameters?: ParametersMap; } export type DatabaseList = Database[]; export type DatabaseString = string; export type _Date = Date; export interface Datum { /** * The value of the datum. */ VarCharValue?: datumString; } export interface DeleteDataCatalogInput { /** * The name of the data catalog to delete. */ Name: CatalogNameString; } export interface DeleteDataCatalogOutput { } export interface DeleteNamedQueryInput { /** * The unique ID of the query to delete. */ NamedQueryId: NamedQueryId; } export interface DeleteNamedQueryOutput { } export interface DeleteWorkGroupInput { /** * The unique name of the workgroup to delete. */ WorkGroup: WorkGroupName; /** * The option to delete the workgroup and its contents even if the workgroup contains any named queries. */ RecursiveDeleteOption?: BoxedBoolean; } export interface DeleteWorkGroupOutput { } export type DescriptionString = string; export interface EncryptionConfiguration { /** * Indicates whether Amazon S3 server-side encryption with Amazon S3-managed keys (SSE-S3), server-side encryption with KMS-managed keys (SSE-KMS), or client-side encryption with KMS-managed keys (CSE-KMS) is used. If a query runs in a workgroup and the workgroup overrides client-side settings, then the workgroup's setting for encryption is used. It specifies whether query results must be encrypted, for all queries that run in this workgroup. */ EncryptionOption: EncryptionOption; /** * For SSE-KMS and CSE-KMS, this is the KMS key ARN or ID. */ KmsKey?: String; } export type EncryptionOption = "SSE_S3"|"SSE_KMS"|"CSE_KMS"|string; export type ErrorCode = string; export type ErrorMessage = string; export type ExpressionString = string; export interface GetDataCatalogInput { /** * The name of the data catalog to return. */ Name: CatalogNameString; } export interface GetDataCatalogOutput { /** * The data catalog returned. */ DataCatalog?: DataCatalog; } export interface GetDatabaseInput { /** * The name of the data catalog that contains the database to return. */ CatalogName: CatalogNameString; /** * The name of the database to return. */ DatabaseName: NameString; } export interface GetDatabaseOutput { /** * The database returned. */ Database?: Database; } export interface GetNamedQueryInput { /** * The unique ID of the query. Use ListNamedQueries to get query IDs. */ NamedQueryId: NamedQueryId; } export interface GetNamedQueryOutput { /** * Information about the query. */ NamedQuery?: NamedQuery; } export interface GetQueryExecutionInput { /** * The unique ID of the query execution. */ QueryExecutionId: QueryExecutionId; } export interface GetQueryExecutionOutput { /** * Information about the query execution. */ QueryExecution?: QueryExecution; } export interface GetQueryResultsInput { /** * The unique ID of the query execution. */ QueryExecutionId: QueryExecutionId; /** * A token generated by the Athena service that specifies where to continue pagination if a previous request was truncated. To obtain the next set of pages, pass in the NextToken from the response object of the previous page call. */ NextToken?: Token; /** * The maximum number of results (rows) to return in this request. */ MaxResults?: MaxQueryResults; } export interface GetQueryResultsOutput { /** * The number of rows inserted with a CREATE TABLE AS SELECT statement. */ UpdateCount?: Long; /** * The results of the query execution. */ ResultSet?: ResultSet; /** * A token generated by the Athena service that specifies where to continue pagination if a previous request was truncated. To obtain the next set of pages, pass in the NextToken from the response object of the previous page call. */ NextToken?: Token; } export interface GetTableMetadataInput { /** * The name of the data catalog that contains the database and table metadata to return. */ CatalogName: CatalogNameString; /** * The name of the database that contains the table metadata to return. */ DatabaseName: NameString; /** * The name of the table for which metadata is returned. */ TableName: NameString; } export interface GetTableMetadataOutput { /** * An object that contains table metadata. */ TableMetadata?: TableMetadata; } export interface GetWorkGroupInput { /** * The name of the workgroup. */ WorkGroup: WorkGroupName; } export interface GetWorkGroupOutput { /** * Information about the workgroup. */ WorkGroup?: WorkGroup; } export type IdempotencyToken = string; export type Integer = number; export type KeyString = string; export interface ListDataCatalogsInput { /** * A token generated by the Athena service that specifies where to continue pagination if a previous request was truncated. To obtain the next set of pages, pass in the NextToken from the response object of the previous page call. */ NextToken?: Token; /** * Specifies the maximum number of data catalogs to return. */ MaxResults?: MaxDataCatalogsCount; } export interface ListDataCatalogsOutput { /** * A summary list of data catalogs. */ DataCatalogsSummary?: DataCatalogSummaryList; /** * A token generated by the Athena service that specifies where to continue pagination if a previous request was truncated. To obtain the next set of pages, pass in the NextToken from the response object of the previous page call. */ NextToken?: Token; } export interface ListDatabasesInput { /** * The name of the data catalog that contains the databases to return. */ CatalogName: CatalogNameString; /** * A token generated by the Athena service that specifies where to continue pagination if a previous request was truncated. To obtain the next set of pages, pass in the NextToken from the response object of the previous page call. */ NextToken?: Token; /** * Specifies the maximum number of results to return. */ MaxResults?: MaxDatabasesCount; } export interface ListDatabasesOutput { /** * A list of databases from a data catalog. */ DatabaseList?: DatabaseList; /** * A token generated by the Athena service that specifies where to continue pagination if a previous request was truncated. To obtain the next set of pages, pass in the NextToken from the response object of the previous page call. */ NextToken?: Token; } export interface ListNamedQueriesInput { /** * A token generated by the Athena service that specifies where to continue pagination if a previous request was truncated. To obtain the next set of pages, pass in the NextToken from the response object of the previous page call. */ NextToken?: Token; /** * The maximum number of queries to return in this request. */ MaxResults?: MaxNamedQueriesCount; /** * The name of the workgroup from which the named queries are being returned. If a workgroup is not specified, the saved queries for the primary workgroup are returned. */ WorkGroup?: WorkGroupName; } export interface ListNamedQueriesOutput { /** * The list of unique query IDs. */ NamedQueryIds?: NamedQueryIdList; /** * A token generated by the Athena service that specifies where to continue pagination if a previous request was truncated. To obtain the next set of pages, pass in the NextToken from the response object of the previous page call. */ NextToken?: Token; } export interface ListQueryExecutionsInput { /** * A token generated by the Athena service that specifies where to continue pagination if a previous request was truncated. To obtain the next set of pages, pass in the NextToken from the response object of the previous page call. */ NextToken?: Token; /** * The maximum number of query executions to return in this request. */ MaxResults?: MaxQueryExecutionsCount; /** * The name of the workgroup from which queries are being returned. If a workgroup is not specified, a list of available query execution IDs for the queries in the primary workgroup is returned. */ WorkGroup?: WorkGroupName; } export interface ListQueryExecutionsOutput { /** * The unique IDs of each query execution as an array of strings. */ QueryExecutionIds?: QueryExecutionIdList; /** * A token to be used by the next request if this request is truncated. */ NextToken?: Token; } export interface ListTableMetadataInput { /** * The name of the data catalog for which table metadata should be returned. */ CatalogName: CatalogNameString; /** * The name of the database for which table metadata should be returned. */ DatabaseName: NameString; /** * A regex filter that pattern-matches table names. If no expression is supplied, metadata for all tables are listed. */ Expression?: ExpressionString; /** * A token generated by the Athena service that specifies where to continue pagination if a previous request was truncated. To obtain the next set of pages, pass in the NextToken from the response object of the previous page call. */ NextToken?: Token; /** * Specifies the maximum number of results to return. */ MaxResults?: MaxTableMetadataCount; } export interface ListTableMetadataOutput { /** * A list of table metadata. */ TableMetadataList?: TableMetadataList; /** * A token generated by the Athena service that specifies where to continue pagination if a previous request was truncated. To obtain the next set of pages, pass in the NextToken from the response object of the previous page call. */ NextToken?: Token; } export interface ListTagsForResourceInput { /** * Lists the tags for the resource with the specified ARN. */ ResourceARN: AmazonResourceName; /** * The token for the next set of results, or null if there are no additional results for this request, where the request lists the tags for the resource with the specified ARN. */ NextToken?: Token; /** * The maximum number of results to be returned per request that lists the tags for the resource. */ MaxResults?: MaxTagsCount; } export interface ListTagsForResourceOutput { /** * The list of tags associated with the specified resource. */ Tags?: TagList; /** * A token to be used by the next request if this request is truncated. */ NextToken?: Token; } export interface ListWorkGroupsInput { /** * A token generated by the Athena service that specifies where to continue pagination if a previous request was truncated. To obtain the next set of pages, pass in the NextToken from the response object of the previous page call. */ NextToken?: Token; /** * The maximum number of workgroups to return in this request. */ MaxResults?: MaxWorkGroupsCount; } export interface ListWorkGroupsOutput { /** * The list of workgroups, including their names, descriptions, creation times, and states. */ WorkGroups?: WorkGroupsList; /** * A token generated by the Athena service that specifies where to continue pagination if a previous request was truncated. To obtain the next set of pages, pass in the NextToken from the response object of the previous page call. */ NextToken?: Token; } export type Long = number; export type MaxDataCatalogsCount = number; export type MaxDatabasesCount = number; export type MaxNamedQueriesCount = number; export type MaxQueryExecutionsCount = number; export type MaxQueryResults = number; export type MaxTableMetadataCount = number; export type MaxTagsCount = number; export type MaxWorkGroupsCount = number; export type NameString = string; export interface NamedQuery { /** * The query name. */ Name: NameString; /** * The query description. */ Description?: DescriptionString; /** * The database to which the query belongs. */ Database: DatabaseString; /** * The SQL query statements that comprise the query. */ QueryString: QueryString; /** * The unique identifier of the query. */ NamedQueryId?: NamedQueryId; /** * The name of the workgroup that contains the named query. */ WorkGroup?: WorkGroupName; } export type NamedQueryId = string; export type NamedQueryIdList = NamedQueryId[]; export type NamedQueryList = NamedQuery[]; export type ParametersMap = {[key: string]: ParametersMapValue}; export type ParametersMapValue = string; export interface QueryExecution { /** * The unique identifier for each query execution. */ QueryExecutionId?: QueryExecutionId; /** * The SQL query statements which the query execution ran. */ Query?: QueryString; /** * The type of query statement that was run. DDL indicates DDL query statements. DML indicates DML (Data Manipulation Language) query statements, such as CREATE TABLE AS SELECT. UTILITY indicates query statements other than DDL and DML, such as SHOW CREATE TABLE, or DESCRIBE &lt;table&gt;. */ StatementType?: StatementType; /** * The location in Amazon S3 where query results were stored and the encryption option, if any, used for query results. These are known as "client-side settings". If workgroup settings override client-side settings, then the query uses the location for the query results and the encryption configuration that are specified for the workgroup. */ ResultConfiguration?: ResultConfiguration; /** * The database in which the query execution occurred. */ QueryExecutionContext?: QueryExecutionContext; /** * The completion date, current state, submission time, and state change reason (if applicable) for the query execution. */ Status?: QueryExecutionStatus; /** * Query execution statistics, such as the amount of data scanned, the amount of time that the query took to process, and the type of statement that was run. */ Statistics?: QueryExecutionStatistics; /** * The name of the workgroup in which the query ran. */ WorkGroup?: WorkGroupName; } export interface QueryExecutionContext { /** * The name of the database used in the query execution. */ Database?: DatabaseString; /** * The name of the data catalog used in the query execution. */ Catalog?: CatalogNameString; } export type QueryExecutionId = string; export type QueryExecutionIdList = QueryExecutionId[]; export type QueryExecutionList = QueryExecution[]; export type QueryExecutionState = "QUEUED"|"RUNNING"|"SUCCEEDED"|"FAILED"|"CANCELLED"|string; export interface QueryExecutionStatistics { /** * The number of milliseconds that the query took to execute. */ EngineExecutionTimeInMillis?: Long; /** * The number of bytes in the data that was queried. */ DataScannedInBytes?: Long; /** * The location and file name of a data manifest file. The manifest file is saved to the Athena query results location in Amazon S3. The manifest file tracks files that the query wrote to Amazon S3. If the query fails, the manifest file also tracks files that the query intended to write. The manifest is useful for identifying orphaned files resulting from a failed query. For more information, see Working with Query Results, Output Files, and Query History in the Amazon Athena User Guide. */ DataManifestLocation?: String; /** * The number of milliseconds that Athena took to run the query. */ TotalExecutionTimeInMillis?: Long; /** * The number of milliseconds that the query was in your query queue waiting for resources. Note that if transient errors occur, Athena might automatically add the query back to the queue. */ QueryQueueTimeInMillis?: Long; /** * The number of milliseconds that Athena took to plan the query processing flow. This includes the time spent retrieving table partitions from the data source. Note that because the query engine performs the query planning, query planning time is a subset of engine processing time. */ QueryPlanningTimeInMillis?: Long; /** * The number of milliseconds that Athena took to finalize and publish the query results after the query engine finished running the query. */ ServiceProcessingTimeInMillis?: Long; } export interface QueryExecutionStatus { /** * The state of query execution. QUEUED indicates that the query has been submitted to the service, and Athena will execute the query as soon as resources are available. RUNNING indicates that the query is in execution phase. SUCCEEDED indicates that the query completed without errors. FAILED indicates that the query experienced an error and did not complete processing. CANCELLED indicates that a user input interrupted query execution. Athena automatically retries your queries in cases of certain transient errors. As a result, you may see the query state transition from RUNNING or FAILED to QUEUED. */ State?: QueryExecutionState; /** * Further detail about the status of the query. */ StateChangeReason?: String; /** * The date and time that the query was submitted. */ SubmissionDateTime?: _Date; /** * The date and time that the query completed. */ CompletionDateTime?: _Date; } export type QueryString = string; export interface ResultConfiguration { /** * The location in Amazon S3 where your query results are stored, such as s3://path/to/query/bucket/. To run the query, you must specify the query results location using one of the ways: either for individual queries using either this setting (client-side), or in the workgroup, using WorkGroupConfiguration. If none of them is set, Athena issues an error that no output location is provided. For more information, see Query Results. If workgroup settings override client-side settings, then the query uses the settings specified for the workgroup. See WorkGroupConfiguration$EnforceWorkGroupConfiguration. */ OutputLocation?: String; /** * If query results are encrypted in Amazon S3, indicates the encryption option used (for example, SSE-KMS or CSE-KMS) and key information. This is a client-side setting. If workgroup settings override client-side settings, then the query uses the encryption configuration that is specified for the workgroup, and also uses the location for storing query results specified in the workgroup. See WorkGroupConfiguration$EnforceWorkGroupConfiguration and Workgroup Settings Override Client-Side Settings. */ EncryptionConfiguration?: EncryptionConfiguration; } export interface ResultConfigurationUpdates { /** * The location in Amazon S3 where your query results are stored, such as s3://path/to/query/bucket/. For more information, see Query Results If workgroup settings override client-side settings, then the query uses the location for the query results and the encryption configuration that are specified for the workgroup. The "workgroup settings override" is specified in EnforceWorkGroupConfiguration (true/false) in the WorkGroupConfiguration. See WorkGroupConfiguration$EnforceWorkGroupConfiguration. */ OutputLocation?: String; /** * If set to "true", indicates that the previously-specified query results location (also known as a client-side setting) for queries in this workgroup should be ignored and set to null. If set to "false" or not set, and a value is present in the OutputLocation in ResultConfigurationUpdates (the client-side setting), the OutputLocation in the workgroup's ResultConfiguration will be updated with the new value. For more information, see Workgroup Settings Override Client-Side Settings. */ RemoveOutputLocation?: BoxedBoolean; /** * The encryption configuration for the query results. */ EncryptionConfiguration?: EncryptionConfiguration; /** * If set to "true", indicates that the previously-specified encryption configuration (also known as the client-side setting) for queries in this workgroup should be ignored and set to null. If set to "false" or not set, and a value is present in the EncryptionConfiguration in ResultConfigurationUpdates (the client-side setting), the EncryptionConfiguration in the workgroup's ResultConfiguration will be updated with the new value. For more information, see Workgroup Settings Override Client-Side Settings. */ RemoveEncryptionConfiguration?: BoxedBoolean; } export interface ResultSet { /** * The rows in the table. */ Rows?: RowList; /** * The metadata that describes the column structure and data types of a table of query results. */ ResultSetMetadata?: ResultSetMetadata; } export interface ResultSetMetadata { /** * Information about the columns returned in a query result metadata. */ ColumnInfo?: ColumnInfoList; } export interface Row { /** * The data that populates a row in a query result table. */ Data?: datumList; } export type RowList = Row[]; export interface StartQueryExecutionInput { /** * The SQL query statements to be executed. */ QueryString: QueryString; /** * A unique case-sensitive string used to ensure the request to create the query is idempotent (executes only once). If another StartQueryExecution request is received, the same response is returned and another query is not created. If a parameter has changed, for example, the QueryString, an error is returned. This token is listed as not required because AWS SDKs (for example the AWS SDK for Java) auto-generate the token for users. If you are not using the AWS SDK or the AWS CLI, you must provide this token or the action will fail. */ ClientRequestToken?: IdempotencyToken; /** * The database within which the query executes. */ QueryExecutionContext?: QueryExecutionContext; /** * Specifies information about where and how to save the results of the query execution. If the query runs in a workgroup, then workgroup's settings may override query settings. This affects the query results location. The workgroup settings override is specified in EnforceWorkGroupConfiguration (true/false) in the WorkGroupConfiguration. See WorkGroupConfiguration$EnforceWorkGroupConfiguration. */ ResultConfiguration?: ResultConfiguration; /** * The name of the workgroup in which the query is being started. */ WorkGroup?: WorkGroupName; } export interface StartQueryExecutionOutput { /** * The unique ID of the query that ran as a result of this request. */ QueryExecutionId?: QueryExecutionId; } export type StatementType = "DDL"|"DML"|"UTILITY"|string; export interface StopQueryExecutionInput { /** * The unique ID of the query execution to stop. */ QueryExecutionId: QueryExecutionId; } export interface StopQueryExecutionOutput { } export type String = string; export interface TableMetadata { /** * The name of the table. */ Name: NameString; /** * The time that the table was created. */ CreateTime?: Timestamp; /** * The last time the table was accessed. */ LastAccessTime?: Timestamp; /** * The type of table. In Athena, only EXTERNAL_TABLE is supported. */ TableType?: TableTypeString; /** * A list of the columns in the table. */ Columns?: ColumnList; /** * A list of the partition keys in the table. */ PartitionKeys?: ColumnList; /** * A set of custom key/value pairs for table properties. */ Parameters?: ParametersMap; } export type TableMetadataList = TableMetadata[]; export type TableTypeString = string; export interface Tag { /** * A tag key. The tag key length is from 1 to 128 Unicode characters in UTF-8. You can use letters and numbers representable in UTF-8, and the following characters: + - = . _ : / @. Tag keys are case-sensitive and must be unique per resource. */ Key?: TagKey; /** * A tag value. The tag value length is from 0 to 256 Unicode characters in UTF-8. You can use letters and numbers representable in UTF-8, and the following characters: + - = . _ : / @. Tag values are case-sensitive. */ Value?: TagValue; } export type TagKey = string; export type TagKeyList = TagKey[]; export type TagList = Tag[]; export interface TagResourceInput { /** * Specifies the ARN of the Athena resource (workgroup or data catalog) to which tags are to be added. */ ResourceARN: AmazonResourceName; /** * A collection of one or more tags, separated by commas, to be added to an Athena workgroup or data catalog resource. */ Tags: TagList; } export interface TagResourceOutput { } export type TagValue = string; export type Timestamp = Date; export type Token = string; export type TypeString = string; export interface UnprocessedNamedQueryId { /** * The unique identifier of the named query. */ NamedQueryId?: NamedQueryId; /** * The error code returned when the processing request for the named query failed, if applicable. */ ErrorCode?: ErrorCode; /** * The error message returned when the processing request for the named query failed, if applicable. */ ErrorMessage?: ErrorMessage; } export type UnprocessedNamedQueryIdList = UnprocessedNamedQueryId[]; export interface UnprocessedQueryExecutionId { /** * The unique identifier of the query execution. */ QueryExecutionId?: QueryExecutionId; /** * The error code returned when the query execution failed to process, if applicable. */ ErrorCode?: ErrorCode; /** * The error message returned when the query execution failed to process, if applicable. */ ErrorMessage?: ErrorMessage; } export type UnprocessedQueryExecutionIdList = UnprocessedQueryExecutionId[]; export interface UntagResourceInput { /** * Specifies the ARN of the resource from which tags are to be removed. */ ResourceARN: AmazonResourceName; /** * A comma-separated list of one or more tag keys whose tags are to be removed from the specified resource. */ TagKeys: TagKeyList; } export interface UntagResourceOutput { } export interface UpdateDataCatalogInput { /** * The name of the data catalog to update. The catalog name must be unique for the AWS account and can use a maximum of 128 alphanumeric, underscore, at sign, or hyphen characters. */ Name: CatalogNameString; /** * Specifies the type of data catalog to update. Specify LAMBDA for a federated catalog, GLUE for AWS Glue Catalog, or HIVE for an external hive metastore. */ Type: DataCatalogType; /** * New or modified text that describes the data catalog. */ Description?: DescriptionString; /** * Specifies the Lambda function or functions to use for updating the data catalog. This is a mapping whose values depend on the catalog type. For the HIVE data catalog type, use the following syntax. The metadata-function parameter is required. The sdk-version parameter is optional and defaults to the currently supported version. metadata-function=lambda_arn, sdk-version=version_number For the LAMBDA data catalog type, use one of the following sets of required parameters, but not both. If you have one Lambda function that processes metadata and another for reading the actual data, use the following syntax. Both parameters are required. metadata-function=lambda_arn, record-function=lambda_arn If you have a composite Lambda function that processes both metadata and data, use the following syntax to specify your Lambda function. function=lambda_arn The GLUE type has no parameters. */ Parameters?: ParametersMap; } export interface UpdateDataCatalogOutput { } export interface UpdateWorkGroupInput { /** * The specified workgroup that will be updated. */ WorkGroup: WorkGroupName; /** * The workgroup description. */ Description?: WorkGroupDescriptionString; /** * The workgroup configuration that will be updated for the given workgroup. */ ConfigurationUpdates?: WorkGroupConfigurationUpdates; /** * The workgroup state that will be updated for the given workgroup. */ State?: WorkGroupState; } export interface UpdateWorkGroupOutput { } export interface WorkGroup { /** * The workgroup name. */ Name: WorkGroupName; /** * The state of the workgroup: ENABLED or DISABLED. */ State?: WorkGroupState; /** * The configuration of the workgroup, which includes the location in Amazon S3 where query results are stored, the encryption configuration, if any, used for query results; whether the Amazon CloudWatch Metrics are enabled for the workgroup; whether workgroup settings override client-side settings; and the data usage limits for the amount of data scanned per query or per workgroup. The workgroup settings override is specified in EnforceWorkGroupConfiguration (true/false) in the WorkGroupConfiguration. See WorkGroupConfiguration$EnforceWorkGroupConfiguration. */ Configuration?: WorkGroupConfiguration; /** * The workgroup description. */ Description?: WorkGroupDescriptionString; /** * The date and time the workgroup was created. */ CreationTime?: _Date; } export interface WorkGroupConfiguration { /** * The configuration for the workgroup, which includes the location in Amazon S3 where query results are stored and the encryption option, if any, used for query results. To run the query, you must specify the query results location using one of the ways: either in the workgroup using this setting, or for individual queries (client-side), using ResultConfiguration$OutputLocation. If none of them is set, Athena issues an error that no output location is provided. For more information, see Query Results. */ ResultConfiguration?: ResultConfiguration; /** * If set to "true", the settings for the workgroup override client-side settings. If set to "false", client-side settings are used. For more information, see Workgroup Settings Override Client-Side Settings. */ EnforceWorkGroupConfiguration?: BoxedBoolean; /** * Indicates that the Amazon CloudWatch metrics are enabled for the workgroup. */ PublishCloudWatchMetricsEnabled?: BoxedBoolean; /** * The upper data usage limit (cutoff) for the amount of bytes a single query in a workgroup is allowed to scan. */ BytesScannedCutoffPerQuery?: BytesScannedCutoffValue; /** * If set to true, allows members assigned to a workgroup to reference Amazon S3 Requester Pays buckets in queries. If set to false, workgroup members cannot query data from Requester Pays buckets, and queries that retrieve data from Requester Pays buckets cause an error. The default is false. For more information about Requester Pays buckets, see Requester Pays Buckets in the Amazon Simple Storage Service Developer Guide. */ RequesterPaysEnabled?: BoxedBoolean; } export interface WorkGroupConfigurationUpdates { /** * If set to "true", the settings for the workgroup override client-side settings. If set to "false" client-side settings are used. For more information, see Workgroup Settings Override Client-Side Settings. */ EnforceWorkGroupConfiguration?: BoxedBoolean; /** * The result configuration information about the queries in this workgroup that will be updated. Includes the updated results location and an updated option for encrypting query results. */ ResultConfigurationUpdates?: ResultConfigurationUpdates; /** * Indicates whether this workgroup enables publishing metrics to Amazon CloudWatch. */ PublishCloudWatchMetricsEnabled?: BoxedBoolean; /** * The upper limit (cutoff) for the amount of bytes a single query in a workgroup is allowed to scan. */ BytesScannedCutoffPerQuery?: BytesScannedCutoffValue; /** * Indicates that the data usage control limit per query is removed. WorkGroupConfiguration$BytesScannedCutoffPerQuery */ RemoveBytesScannedCutoffPerQuery?: BoxedBoolean; /** * If set to true, allows members assigned to a workgroup to specify Amazon S3 Requester Pays buckets in queries. If set to false, workgroup members cannot query data from Requester Pays buckets, and queries that retrieve data from Requester Pays buckets cause an error. The default is false. For more information about Requester Pays buckets, see Requester Pays Buckets in the Amazon Simple Storage Service Developer Guide. */ RequesterPaysEnabled?: BoxedBoolean; } export type WorkGroupDescriptionString = string; export type WorkGroupName = string; export type WorkGroupState = "ENABLED"|"DISABLED"|string; export interface WorkGroupSummary { /** * The name of the workgroup. */ Name?: WorkGroupName; /** * The state of the workgroup. */ State?: WorkGroupState; /** * The workgroup description. */ Description?: WorkGroupDescriptionString; /** * The workgroup creation date and time. */ CreationTime?: _Date; } export type WorkGroupsList = WorkGroupSummary[]; export type datumList = Datum[]; export type datumString = string; /** * A string in YYYY-MM-DD format that represents the latest possible API version that can be used in this service. Specify 'latest' to use the latest possible version. */ export type apiVersion = "2017-05-18"|"latest"|string; export interface ClientApiVersions { /** * A string in YYYY-MM-DD format that represents the latest possible API version that can be used in this service. Specify 'latest' to use the latest possible version. */ apiVersion?: apiVersion; } export type ClientConfiguration = ServiceConfigurationOptions & ClientApiVersions; /** * Contains interfaces for use with the Athena client. */ export import Types = Athena; } export = Athena;
the_stack
export type QueryAST = (AndOp | OrOp | NotOp | FilterOp | NoOp) & { error?: Error; }; /** If ALL of of the operands are true, this resolves to true. There may be any number of operands. */ export interface AndOp { op: 'and'; operands: QueryAST[]; } /** If any of the operands is true, this resolves to true. There may be any number of operands. */ export interface OrOp { op: 'or'; operands: QueryAST[]; } /** An operator which negates the result of its only operand. */ export interface NotOp { op: 'not'; operand: QueryAST; } /** This represents one of our filter function definitions, such as is:, season:, etc. */ export interface FilterOp { op: 'filter'; /** * The name of the filter function, without any trailing :. The only weird case is * stats, which will appear like "stat:strength". */ type: string; /** * Any arguments to the filter function as a single string. e.g: haspower, arrivals, >=1000 */ args: string; } /** This is mostly for error cases and empty string */ interface NoOp { op: 'noop'; } /** * The lexer is implemented as a generator, but generators don't support peeking without advancing * the iterator. This wraps the generator in an object that buffers the next element if you call peek(). */ class PeekableGenerator<T> { private gen: Generator<T>; private next: T | undefined; constructor(gen: Generator<T>) { this.gen = gen; } /** * Get what the next item from the generator will be, without advancing it. */ peek(): T | undefined { if (!this.next) { this.next = this.gen.next().value; } return this.next; } /** * Get the next element from the generator and advance it to the next element. */ pop(): T | undefined { if (this.next) { const ret = this.next; this.next = undefined; return ret; } return this.gen.next().value; } } /** * A table of operator precedence for our three binary operators. Operators with higher precedence group together * before those with lower precedence. The "op" property maps them to an AST node. */ const operators = { // The implicit `and` (two statements separated by whitespace) has lower precedence than either the explicit or or and. implicit_and: { precedence: 1, op: 'and', }, or: { precedence: 2, op: 'or', }, and: { precedence: 3, op: 'and', }, } as const; /** * The query parser first lexes the string, then parses it into an AST (abstract syntax tree) * representing the logical structure of the query. This AST can then be walked to match up * to defined filters and generate an actual filter function. * * We choose to produce an AST instead of executing the search inline with parsing both to * make testing easier, and to allow for things like canonicalization of search queries. */ export function parseQuery(query: string): QueryAST { // This implements operator precedence via this mechanism: // https://eli.thegreenplace.net/2012/08/02/parsing-expressions-by-precedence-climbing /** * This extracts the next "atom" aka "value" from the token stream. An atom is either * an individual filter expression, or a grouped expression. Basically anything that's * not a binary operator. "not" is also in here because it's really just a modifier on an atom. */ function parseAtom(tokens: PeekableGenerator<Token>): QueryAST { const token: Token | undefined = tokens.pop(); if (!token) { throw new Error('expected an atom'); } switch (token[0]) { case 'filter': { const keyword = token[1]; if (keyword === 'not') { // `not:` a synonym for `-is:`. We could fix this up in filter execution but I chose to normalize it here. return { op: 'not', operand: { op: 'filter', type: 'is', args: token[2], }, }; } else { return { op: 'filter', type: keyword, args: token[2], }; } } case 'not': { return { op: 'not', // The operand should always be an atom operand: parseAtom(tokens), }; } case '(': { const result = parse(tokens); if (tokens.peek()?.[0] === ')') { tokens.pop(); } return result; } default: throw new Error('Unexpected token type, looking for an atom: ' + token + ', ' + query); } } /** * Parse a stream of tokens into an AST. `minPrecedence` determined the minimum operator precedence * of operators that will be included in this portion of the parse. */ function parse(tokens: PeekableGenerator<Token>, minPrecedence = 1): QueryAST { let ast: QueryAST = { op: 'noop' }; try { ast = parseAtom(tokens); let token: Token | undefined; while ((token = tokens.peek())) { if (token[0] === ')') { break; } const operator = operators[token[0] as keyof typeof operators]; if (!operator) { throw new Error('Expected an operator, got ' + token); } else if (operator.precedence < minPrecedence) { break; } tokens.pop(); const nextMinPrecedence = operator.precedence + 1; // all our operators are left-associative const rhs = parse(tokens, nextMinPrecedence); // Our operators allow for more than 2 operands, to avoid deep logic trees. // This logic tries to combine them where possible. if (isSameOp(operator.op, ast)) { ast.operands.push(rhs); } else { ast = { op: operator.op, operands: isSameOp(operator.op, rhs) ? [ast, ...rhs.operands] : [ast, rhs], }; } } } catch (e) { ast.error = e; } return ast; } const tokens = new PeekableGenerator(lexer(query)); try { if (!tokens.peek()) { return { op: 'noop' }; } } catch (e) { return { op: 'noop' }; } const ast = parse(tokens); return ast; } function isSameOp<T extends 'and' | 'or'>(binOp: T, op: QueryAST): op is AndOp | OrOp { return binOp === op.op; } /* **** Lexer **** */ // Lexer token types type NoArgTokenType = '(' | ')' | 'not' | 'or' | 'and' | 'implicit_and'; export type Token = [NoArgTokenType] | ['filter', string, string]; // Two different kind of quotes const quoteRegexes = { '"': /.*?"/y, "'": /.*?'/y, }; // Parens: `(` can be followed by whitespace, while `)` can be preceded by it const parens = /(\(\s*|\s*\))/y; // A `-` followed by any amount of whitespace is the same as "not" const negation = /-\s*/y; // `not`, `or`, and `and` keywords. or and not can be preceded by whitespace, and any of them can be followed by whitespace. // `not` can't be preceded by whitespace because that whitespace is an implicit `and`. const booleanKeywords = /(not|\s+or|\s+and)\s+/y; // Filter names like is:, stat:, etc const filterName = /[a-z]+:/y; // Arguments to filters are pretty unconstrained const filterArgs = /[^\s()]+/y; // Words without quotes are basically any non-whitespace that doesn't terminate a group const bareWords = /[^\s)]+/y; // Whitespace that doesn't match anything else is an implicit `and` const whitespace = /\s+/y; /** * The lexer yields a series of tokens representing the linear structure of the search query. * This throws an exception if it finds an invalid input. * * Example: "is:blue -is:maxpower" turns into: * ["filter", "is", "blue"], ["implicit_and"], ["not"], ["filter", "is", "maxpower"] */ export function* lexer(query: string): Generator<Token> { query = query.trim().toLowerCase(); // http://blog.tatedavies.com/2012/08/28/replace-microsoft-chars-in-javascript/ query = query.replace(/[\u2018-\u201A]/g, "'"); query = query.replace(/[\u201C-\u201E]/g, '"'); let match: string | undefined; let i = 0; const consume = (str: string) => (i += str.length); /** * If `query` matches `re` starting at `i`, return the matched portion of the string. Otherwise return undefined. * This avoids having to make slices of strings just to start the regex in the middle of a string. * * Note that regexes passed to this must have the "sticky" flag set (y) and should not use ^, which will match the * beginning of the string, ignoring the index we want to start from. The sticky flag ensures our regex will match * based on the beginning of the string. */ const extract = (re: RegExp): string | undefined => { // These checks only run in unit tests if ($DIM_FLAVOR === 'test') { if (!re.sticky) { throw new Error('regexp must be sticky'); } if (re.source.startsWith('^')) { throw new Error('regexp cannot start with ^ and be repositioned'); } } re.lastIndex = i; const match = re.exec(query); if (match) { const result = match[0]; if (result.length > 0) { consume(result); return result; } } return undefined; }; /** * Consume and return the contents of a quoted string. */ const consumeString = (startingQuoteChar: string) => { // Quoted string consume(startingQuoteChar); if ((match = extract(quoteRegexes[startingQuoteChar])) !== undefined) { // Slice off the last character return match.slice(0, match.length - 1); } else { throw new Error('Unterminated quotes: |' + query.slice(i) + '| ' + i); } }; while (i < query.length) { const char = query[i]; const startingIndex = i; if ((match = extract(parens)) !== undefined) { // Start/end group yield [match.trim() as NoArgTokenType]; } else if (char === '"' || char === "'") { // Quoted string yield ['filter', 'keyword', consumeString(char)]; } else if ((match = extract(negation)) !== undefined) { // minus sign is the same as "not" yield ['not']; } else if ((match = extract(booleanKeywords)) !== undefined) { // boolean keywords yield [match.trim() as NoArgTokenType]; } else if ((match = extract(filterName)) !== undefined) { // Keyword searches - is:, stat:discipline:, etc const keyword = match.slice(0, match.length - 1); const nextChar = query[i]; let args = ''; if (nextChar === '"' || nextChar === "'") { args = consumeString(nextChar); } else if ((match = extract(filterArgs)) !== undefined) { args = match; } else { throw new Error('missing keyword arguments for ' + match); } yield ['filter', keyword, args]; } else if ((match = extract(bareWords)) !== undefined) { // bare words that aren't keywords are effectively "keyword" type filters yield ['filter', 'keyword', match]; } else if ((match = extract(whitespace)) !== undefined) { yield ['implicit_and']; } else { throw new Error('unrecognized tokens: |' + query.slice(i) + '| ' + i); } if (startingIndex === i) { throw new Error('bug: forgot to consume characters'); } } } /** * Build a standardized version of the query as a string. This is useful for deduping queries. * Example: 'is:weapon and is:sniperrifle or not is:armor and modslot:arrival' => * '(-is:armor modslot:arrival) or (is:sniperrifle is:weapon)' */ export function canonicalizeQuery(query: QueryAST, depth = 0): string { switch (query.op) { case 'filter': return query.type === 'keyword' ? `${/\s/.test(query.args) ? `"${query.args}"` : query.args}` : `${query.type}:${/\s/.test(query.args) ? `"${query.args}"` : query.args}`; case 'not': return `-${canonicalizeQuery(query.operand, depth + 1)}`; case 'and': case 'or': { const joinedOperands = query.operands .map((q) => canonicalizeQuery(q, depth + 1)) .join( query.op === 'and' && !query.operands.some((op) => op.op === 'filter' && op.type === 'keyword') ? ' ' : ` ${query.op} ` ); return depth === 0 ? joinedOperands : `(${joinedOperands})`; } case 'noop': return ''; } }
the_stack
import { IElementAppliable } from "./tags"; export class CssProp implements IElementAppliable { public readonly name: string; constructor( public readonly key: string, public readonly value: string | 0) { this.name = key.replace(/[A-Z]/g, (m) => { return "-" + m.toLocaleLowerCase(); }); } /** * Set the attribute value on an HTMLElement * @param elem - the element on which to set the attribute. */ applyToElement(elem: HTMLElement) { (elem.style as any)[this.key] = this.value; } } export class CssPropSet implements IElementAppliable { private rest: (CssProp | CssPropSet)[]; constructor(...rest: (CssProp | CssPropSet)[]) { this.rest = rest; } /** * Set the attribute value on an HTMLElement * @param style - the element on which to set the attribute. */ applyToElement(elem: HTMLElement) { for (const prop of this.rest) { prop.applyToElement(elem); } } } /** * Combine style properties. **/ export function styles(...rest: (CssProp | CssPropSet)[]) { return new CssPropSet(...rest); } type globalValues = "inherit" | "initial" | "revert" | "unset"; export function alignContent(v: string) { return new CssProp("alignContent", v); } export function alignItems(v: string) { return new CssProp("alignItems", v); } export function alignSelf(v: string) { return new CssProp("alignSelf", v); } export function alignmentBaseline(v: string) { return new CssProp("alignmentBaseline", v); } export function all(v: string) { return new CssProp("all", v); } export function animation(v: string) { return new CssProp("animation", v); } export function animationDelay(v: string) { return new CssProp("animationDelay", v); } export function animationDirection(v: string) { return new CssProp("animationDirection", v); } export function animationDuration(v: string) { return new CssProp("animationDuration", v); } export function animationFillMode(v: string) { return new CssProp("animationFillMode", v); } export function animationIterationCount(v: string) { return new CssProp("animationIterationCount", v); } export function animationName(v: string) { return new CssProp("animationName", v); } export function animationPlayState(v: string) { return new CssProp("animationPlayState", v); } export function animationTimingFunction(v: string) { return new CssProp("animationTimingFunction", v); } export function appearance(v: string) { return new CssProp("appearance", v); } export function backdropFilter(v: string) { return new CssProp("backdropFilter", v); } export function backfaceVisibility(v: string) { return new CssProp("backfaceVisibility", v); } export function background(v: string) { return new CssProp("background", v); } export function backgroundAttachment(v: string) { return new CssProp("backgroundAttachment", v); } export function backgroundBlendMode(v: string) { return new CssProp("backgroundBlendMode", v); } export function backgroundClip(v: string) { return new CssProp("backgroundClip", v); } export function backgroundColor(v: string) { return new CssProp("backgroundColor", v); } export function backgroundImage(v: string) { return new CssProp("backgroundImage", v); } export function backgroundOrigin(v: string) { return new CssProp("backgroundOrigin", v); } export function backgroundPosition(v: string) { return new CssProp("backgroundPosition", v); } export function backgroundPositionX(v: string) { return new CssProp("backgroundPositionX", v); } export function backgroundPositionY(v: string) { return new CssProp("backgroundPositionY", v); } export function backgroundRepeat(v: string) { return new CssProp("backgroundRepeat", v); } export function backgroundRepeatX(v: string) { return new CssProp("backgroundRepeatX", v); } export function backgroundRepeatY(v: string) { return new CssProp("backgroundRepeatY", v); } export function backgroundSize(v: string) { return new CssProp("backgroundSize", v); } export function baselineShift(v: string) { return new CssProp("baselineShift", v); } export function blockSize(v: string) { return new CssProp("blockSize", v); } export function border(v: string | 0) { return new CssProp("border", v); } export function borderBlockEnd(v: string) { return new CssProp("borderBlockEnd", v); } export function borderBlockEndColor(v: string) { return new CssProp("borderBlockEndColor", v); } export function borderBlockEndStyle(v: string) { return new CssProp("borderBlockEndStyle", v); } export function borderBlockEndWidth(v: string) { return new CssProp("borderBlockEndWidth", v); } export function borderBlockStart(v: string) { return new CssProp("borderBlockStart", v); } export function borderBlockStartColor(v: string) { return new CssProp("borderBlockStartColor", v); } export function borderBlockStartStyle(v: string) { return new CssProp("borderBlockStartStyle", v); } export function borderBlockStartWidth(v: string) { return new CssProp("borderBlockStartWidth", v); } export function borderBottom(v: string) { return new CssProp("borderBottom", v); } export function borderBottomColor(v: string) { return new CssProp("borderBottomColor", v); } export function borderBottomLeftRadius(v: string) { return new CssProp("borderBottomLeftRadius", v); } export function borderBottomRightRadius(v: string) { return new CssProp("borderBottomRightRadius", v); } export function borderBottomStyle(v: string) { return new CssProp("borderBottomStyle", v); } export function borderBottomWidth(v: string) { return new CssProp("borderBottomWidth", v); } export function borderCollapse(v: string) { return new CssProp("borderCollapse", v); } export function borderColor(v: string) { return new CssProp("borderColor", v); } export function borderImage(v: string) { return new CssProp("borderImage", v); } export function borderImageOutset(v: string) { return new CssProp("borderImageOutset", v); } export function borderImageRepeat(v: string) { return new CssProp("borderImageRepeat", v); } export function borderImageSlice(v: string) { return new CssProp("borderImageSlice", v); } export function borderImageSource(v: string) { return new CssProp("borderImageSource", v); } export function borderImageWidth(v: string) { return new CssProp("borderImageWidth", v); } export function borderInlineEnd(v: string) { return new CssProp("borderInlineEnd", v); } export function borderInlineEndColor(v: string) { return new CssProp("borderInlineEndColor", v); } export function borderInlineEndStyle(v: string) { return new CssProp("borderInlineEndStyle", v); } export function borderInlineEndWidth(v: string) { return new CssProp("borderInlineEndWidth", v); } export function borderInlineStart(v: string) { return new CssProp("borderInlineStart", v); } export function borderInlineStartColor(v: string) { return new CssProp("borderInlineStartColor", v); } export function borderInlineStartStyle(v: string) { return new CssProp("borderInlineStartStyle", v); } export function borderInlineStartWidth(v: string) { return new CssProp("borderInlineStartWidth", v); } export function borderLeft(v: string) { return new CssProp("borderLeft", v); } export function borderLeftColor(v: string) { return new CssProp("borderLeftColor", v); } export function borderLeftStyle(v: string) { return new CssProp("borderLeftStyle", v); } export function borderLeftWidth(v: string) { return new CssProp("borderLeftWidth", v); } export function borderRadius(v: string) { return new CssProp("borderRadius", v); } export function borderRight(v: string) { return new CssProp("borderRight", v); } export function borderRightColor(v: string) { return new CssProp("borderRightColor", v); } export function borderRightStyle(v: string) { return new CssProp("borderRightStyle", v); } export function borderRightWidth(v: string) { return new CssProp("borderRightWidth", v); } export function borderSpacing(v: string) { return new CssProp("borderSpacing", v); } export function borderStyle(v: string) { return new CssProp("borderStyle", v); } export function borderTop(v: string) { return new CssProp("borderTop", v); } export function borderTopColor(v: string) { return new CssProp("borderTopColor", v); } export function borderTopLeftRadius(v: string) { return new CssProp("borderTopLeftRadius", v); } export function borderTopRightRadius(v: string) { return new CssProp("borderTopRightRadius", v); } export function borderTopStyle(v: string) { return new CssProp("borderTopStyle", v); } export function borderTopWidth(v: string) { return new CssProp("borderTopWidth", v); } export function borderWidth(v: string | 0) { return new CssProp("borderWidth", v); } export function bottom(v: string | 0) { return new CssProp("bottom", v); } export function boxShadow(v: string) { return new CssProp("boxShadow", v); } export function boxSizing(v: string) { return new CssProp("boxSizing", v); } export function breakAfter(v: string) { return new CssProp("breakAfter", v); } export function breakBefore(v: string) { return new CssProp("breakBefore", v); } export function breakInside(v: string) { return new CssProp("breakInside", v); } export function bufferedRendering(v: string) { return new CssProp("bufferedRendering", v); } export function captionSide(v: string) { return new CssProp("captionSide", v); } export function caretColor(v: string) { return new CssProp("caretColor", v); } export function clear(v: string) { return new CssProp("clear", v); } export function clip(v: string) { return new CssProp("clip", v); } export function clipPath(v: string) { return new CssProp("clipPath", v); } export function clipRule(v: string) { return new CssProp("clipRule", v); } export function color(v: string) { return new CssProp("color", v); } export function colorInterpolation(v: string) { return new CssProp("colorInterpolation", v); } export function colorInterpolationFilters(v: string) { return new CssProp("colorInterpolationFilters", v); } export function colorRendering(v: string) { return new CssProp("colorRendering", v); } export function colorScheme(v: string) { return new CssProp("colorScheme", v); } export function columnCount(v: string) { return new CssProp("columnCount", v); } export function columnFill(v: string) { return new CssProp("columnFill", v); } export function columnGap(v: string) { return new CssProp("columnGap", v); } export function columnRule(v: string) { return new CssProp("columnRule", v); } export function columnRuleColor(v: string) { return new CssProp("columnRuleColor", v); } export function columnRuleStyle(v: string) { return new CssProp("columnRuleStyle", v); } export function columnRuleWidth(v: string) { return new CssProp("columnRuleWidth", v); } export function columnSpan(v: string) { return new CssProp("columnSpan", v); } export function columnWidth(v: string) { return new CssProp("columnWidth", v); } export function columns(v: string) { return new CssProp("columns", v); } export function contain(v: string) { return new CssProp("contain", v); } export function containIntrinsicSize(v: string) { return new CssProp("containIntrinsicSize", v); } export function counterIncrement(v: string) { return new CssProp("counterIncrement", v); } export function counterReset(v: string) { return new CssProp("counterReset", v); } export function cursor(v: string) { return new CssProp("cursor", v); } export function cx(v: string) { return new CssProp("cx", v); } export function cy(v: string) { return new CssProp("cy", v); } export function d(v: string) { return new CssProp("d", v); } export function direction(v: string) { return new CssProp("direction", v); } export function display(v: string) { return new CssProp("display", v); } export function dominantBaseline(v: string) { return new CssProp("dominantBaseline", v); } export function emptyCells(v: string) { return new CssProp("emptyCells", v); } export function fill(v: string) { return new CssProp("fill", v); } export function fillOpacity(v: string) { return new CssProp("fillOpacity", v); } export function fillRule(v: string) { return new CssProp("fillRule", v); } export function filter(v: string) { return new CssProp("filter", v); } export function flex(v: string) { return new CssProp("flex", v); } export function flexBasis(v: string) { return new CssProp("flexBasis", v); } export function flexDirection(v: string) { return new CssProp("flexDirection", v); } export function flexFlow(v: string) { return new CssProp("flexFlow", v); } export function flexGrow(v: string) { return new CssProp("flexGrow", v); } export function flexShrink(v: string) { return new CssProp("flexShrink", v); } export function flexWrap(v: string) { return new CssProp("flexWrap", v); } export function float(v: string) { return new CssProp("float", v); } export function floodColor(v: string) { return new CssProp("floodColor", v); } export function floodOpacity(v: string) { return new CssProp("floodOpacity", v); } export function font(v: string) { return new CssProp("font", v); } export function fontDisplay(v: string) { return new CssProp("fontDisplay", v); } export function fontFamily(v: string) { return new CssProp("fontFamily", v); } export function fontFeatureSettings(v: string) { return new CssProp("fontFeatureSettings", v); } export function fontKerning(v: string) { return new CssProp("fontKerning", v); } export function fontOpticalSizing(v: string) { return new CssProp("fontOpticalSizing", v); } export function fontSize(v: string) { return new CssProp("fontSize", v); } export function fontStretch(v: string) { return new CssProp("fontStretch", v); } export function fontStyle(v: string) { return new CssProp("fontStyle", v); } export function fontVariant(v: string) { return new CssProp("fontVariant", v); } export function fontVariantCaps(v: string) { return new CssProp("fontVariantCaps", v); } export function fontVariantEastAsian(v: string) { return new CssProp("fontVariantEastAsian", v); } export function fontVariantLigatures(v: string) { return new CssProp("fontVariantLigatures", v); } export function fontVariantNumeric(v: string) { return new CssProp("fontVariantNumeric", v); } export function fontVariationSettings(v: string) { return new CssProp("fontVariationSettings", v); } export function fontWeight(v: string) { return new CssProp("fontWeight", v); } export function forcedColorAdjust(v: string) { return new CssProp("forcedColorAdjust", v); } export function gap(v: string) { return new CssProp("gap", v); } export function grid(v: string) { return new CssProp("grid", v); } export function gridArea(v: string) { return new CssProp("gridArea", v); } export function gridAutoColumns(v: string) { return new CssProp("gridAutoColumns", v); } type gridAutoFlowType = "row" | "column" | "dense" | "row dense" | "column dense" | globalValues; export function gridAutoFlow(v: gridAutoFlowType) { return new CssProp("gridAutoFlow", v); } export function gridAutoRows(v: string) { return new CssProp("gridAutoRows", v); } export function gridColumn(v: string) { return new CssProp("gridColumn", v); } export function gridColumnEnd(v: string) { return new CssProp("gridColumnEnd", v); } export function gridColumnGap(v: string) { return new CssProp("gridColumnGap", v); } export function gridColumnStart(v: string) { return new CssProp("gridColumnStart", v); } export function gridGap(v: string) { return new CssProp("gridGap", v); } export function gridRow(v: string) { return new CssProp("gridRow", v); } export function gridRowEnd(v: string) { return new CssProp("gridRowEnd", v); } export function gridRowGap(v: string) { return new CssProp("gridRowGap", v); } export function gridRowStart(v: string) { return new CssProp("gridRowStart", v); } export function gridTemplate(v: string) { return new CssProp("gridTemplate", v); } export function gridTemplateAreas(v: string) { return new CssProp("gridTemplateAreas", v); } export function gridTemplateColumns(v: string) { return new CssProp("gridTemplateColumns", v); } export function gridTemplateRows(v: string) { return new CssProp("gridTemplateRows", v); } export function height(v: string | 0) { return new CssProp("height", v); } export function hyphens(v: string) { return new CssProp("hyphens", v); } export function imageOrientation(v: string) { return new CssProp("imageOrientation", v); } export function imageRendering(v: string) { return new CssProp("imageRendering", v); } export function inlineSize(v: string) { return new CssProp("inlineSize", v); } export function isolation(v: string) { return new CssProp("isolation", v); } export function justifyContent(v: string) { return new CssProp("justifyContent", v); } export function justifyItems(v: string) { return new CssProp("justifyItems", v); } export function justifySelf(v: string) { return new CssProp("justifySelf", v); } export function left(v: string | 0) { return new CssProp("left", v); } export function letterSpacing(v: string) { return new CssProp("letterSpacing", v); } export function lightingColor(v: string) { return new CssProp("lightingColor", v); } export function lineBreak(v: string) { return new CssProp("lineBreak", v); } export function lineHeight(v: string) { return new CssProp("lineHeight", v); } export function listStyle(v: string) { return new CssProp("listStyle", v); } export function listStyleImage(v: string) { return new CssProp("listStyleImage", v); } export function listStylePosition(v: string) { return new CssProp("listStylePosition", v); } export function listStyleType(v: string) { return new CssProp("listStyleType", v); } export function margin(v: string | 0) { return new CssProp("margin", v); } export function marginBlockEnd(v: string) { return new CssProp("marginBlockEnd", v); } export function marginBlockStart(v: string) { return new CssProp("marginBlockStart", v); } export function marginBottom(v: string | 0) { return new CssProp("marginBottom", v); } export function marginInlineEnd(v: string) { return new CssProp("marginInlineEnd", v); } export function marginInlineStart(v: string) { return new CssProp("marginInlineStart", v); } export function marginLeft(v: string | 0) { return new CssProp("marginLeft", v); } export function marginRight(v: string | 0) { return new CssProp("marginRight", v); } export function marginTop(v: string | 0) { return new CssProp("marginTop", v); } export function marker(v: string) { return new CssProp("marker", v); } export function markerEnd(v: string) { return new CssProp("markerEnd", v); } export function markerMid(v: string) { return new CssProp("markerMid", v); } export function markerStart(v: string) { return new CssProp("markerStart", v); } export function mask(v: string) { return new CssProp("mask", v); } export function maskType(v: string) { return new CssProp("maskType", v); } export function maxBlockSize(v: string) { return new CssProp("maxBlockSize", v); } export function maxHeight(v: string | 0) { return new CssProp("maxHeight", v); } export function maxInlineSize(v: string) { return new CssProp("maxInlineSize", v); } export function maxWidth(v: string | 0) { return new CssProp("maxWidth", v); } export function maxZoom(v: string) { return new CssProp("maxZoom", v); } export function minBlockSize(v: string) { return new CssProp("minBlockSize", v); } export function minHeight(v: string | 0) { return new CssProp("minHeight", v); } export function minInlineSize(v: string) { return new CssProp("minInlineSize", v); } export function minWidth(v: string | 0) { return new CssProp("minWidth", v); } export function minZoom(v: string) { return new CssProp("minZoom", v); } export function mixBlendMode(v: string) { return new CssProp("mixBlendMode", v); } export function objectFit(v: string) { return new CssProp("objectFit", v); } export function objectPosition(v: string) { return new CssProp("objectPosition", v); } export function offset(v: string) { return new CssProp("offset", v); } export function offsetDistance(v: string) { return new CssProp("offsetDistance", v); } export function offsetPath(v: string) { return new CssProp("offsetPath", v); } export function offsetRotate(v: string) { return new CssProp("offsetRotate", v); } export function opacity(v: string) { return new CssProp("opacity", v); } export function order(v: string) { return new CssProp("order", v); } export function orientation(v: string) { return new CssProp("orientation", v); } export function orphans(v: string) { return new CssProp("orphans", v); } export function outline(v: string) { return new CssProp("outline", v); } export function outlineColor(v: string) { return new CssProp("outlineColor", v); } export function outlineOffset(v: string) { return new CssProp("outlineOffset", v); } export function outlineStyle(v: string) { return new CssProp("outlineStyle", v); } export function outlineWidth(v: string | 0) { return new CssProp("outlineWidth", v); } export function overflow(v: string) { return new CssProp("overflow", v); } export function overflowAnchor(v: string) { return new CssProp("overflowAnchor", v); } export function overflowWrap(v: string) { return new CssProp("overflowWrap", v); } export function overflowX(v: string) { return new CssProp("overflowX", v); } export function overflowY(v: string) { return new CssProp("overflowY", v); } export function overscrollBehavior(v: string) { return new CssProp("overscrollBehavior", v); } export function overscrollBehaviorBlock(v: string) { return new CssProp("overscrollBehaviorBlock", v); } export function overscrollBehaviorInline(v: string) { return new CssProp("overscrollBehaviorInline", v); } export function overscrollBehaviorX(v: string) { return new CssProp("overscrollBehaviorX", v); } export function overscrollBehaviorY(v: string) { return new CssProp("overscrollBehaviorY", v); } export function padding(v: string | 0) { return new CssProp("padding", v); } export function paddingBlockEnd(v: string) { return new CssProp("paddingBlockEnd", v); } export function paddingBlockStart(v: string) { return new CssProp("paddingBlockStart", v); } export function paddingBottom(v: string | 0) { return new CssProp("paddingBottom", v); } export function paddingInlineEnd(v: string) { return new CssProp("paddingInlineEnd", v); } export function paddingInlineStart(v: string) { return new CssProp("paddingInlineStart", v); } export function paddingLeft(v: string | 0) { return new CssProp("paddingLeft", v); } export function paddingRight(v: string | 0) { return new CssProp("paddingRight", v); } export function paddingTop(v: string | 0) { return new CssProp("paddingTop", v); } export function pageBreakAfter(v: string) { return new CssProp("pageBreakAfter", v); } export function pageBreakBefore(v: string) { return new CssProp("pageBreakBefore", v); } export function pageBreakInside(v: string) { return new CssProp("pageBreakInside", v); } export function paintOrder(v: string) { return new CssProp("paintOrder", v); } export function perspective(v: string) { return new CssProp("perspective", v); } export function perspectiveOrigin(v: string) { return new CssProp("perspectiveOrigin", v); } export function placeContent(v: string) { return new CssProp("placeContent", v); } export function placeItems(v: string) { return new CssProp("placeItems", v); } export function placeSelf(v: string) { return new CssProp("placeSelf", v); } export function pointerEvents(v: string) { return new CssProp("pointerEvents", v); } export function position(v: string) { return new CssProp("position", v); } export function quotes(v: string) { return new CssProp("quotes", v); } export function r(v: string) { return new CssProp("r", v); } export function resize(v: string) { return new CssProp("resize", v); } export function right(v: string | 0) { return new CssProp("right", v); } export function rowGap(v: string | 0) { return new CssProp("rowGap", v); } export function rubyPosition(v: string) { return new CssProp("rubyPosition", v); } export function rx(v: string) { return new CssProp("rx", v); } export function ry(v: string) { return new CssProp("ry", v); } export function scrollBehavior(v: string) { return new CssProp("scrollBehavior", v); } export function scrollMargin(v: string | 0) { return new CssProp("scrollMargin", v); } export function scrollMarginBlock(v: string) { return new CssProp("scrollMarginBlock", v); } export function scrollMarginBlockEnd(v: string) { return new CssProp("scrollMarginBlockEnd", v); } export function scrollMarginBlockStart(v: string) { return new CssProp("scrollMarginBlockStart", v); } export function scrollMarginBottom(v: string | 0) { return new CssProp("scrollMarginBottom", v); } export function scrollMarginInline(v: string) { return new CssProp("scrollMarginInline", v); } export function scrollMarginInlineEnd(v: string) { return new CssProp("scrollMarginInlineEnd", v); } export function scrollMarginInlineStart(v: string) { return new CssProp("scrollMarginInlineStart", v); } export function scrollMarginLeft(v: string | 0) { return new CssProp("scrollMarginLeft", v); } export function scrollMarginRight(v: string | 0) { return new CssProp("scrollMarginRight", v); } export function scrollMarginTop(v: string | 0) { return new CssProp("scrollMarginTop", v); } export function scrollPadding(v: string | 0) { return new CssProp("scrollPadding", v); } export function scrollPaddingBlock(v: string) { return new CssProp("scrollPaddingBlock", v); } export function scrollPaddingBlockEnd(v: string) { return new CssProp("scrollPaddingBlockEnd", v); } export function scrollPaddingBlockStart(v: string) { return new CssProp("scrollPaddingBlockStart", v); } export function scrollPaddingBottom(v: string | 0) { return new CssProp("scrollPaddingBottom", v); } export function scrollPaddingInline(v: string) { return new CssProp("scrollPaddingInline", v); } export function scrollPaddingInlineEnd(v: string) { return new CssProp("scrollPaddingInlineEnd", v); } export function scrollPaddingInlineStart(v: string) { return new CssProp("scrollPaddingInlineStart", v); } export function scrollPaddingLeft(v: string | 0) { return new CssProp("scrollPaddingLeft", v); } export function scrollPaddingRight(v: string | 0) { return new CssProp("scrollPaddingRight", v); } export function scrollPaddingTop(v: string | 0) { return new CssProp("scrollPaddingTop", v); } export function scrollSnapAlign(v: string) { return new CssProp("scrollSnapAlign", v); } export function scrollSnapStop(v: string) { return new CssProp("scrollSnapStop", v); } export function scrollSnapType(v: string) { return new CssProp("scrollSnapType", v); } export function shapeImageThreshold(v: string) { return new CssProp("shapeImageThreshold", v); } export function shapeMargin(v: string) { return new CssProp("shapeMargin", v); } export function shapeOutside(v: string) { return new CssProp("shapeOutside", v); } export function shapeRendering(v: string) { return new CssProp("shapeRendering", v); } export function speak(v: string) { return new CssProp("speak", v); } export function stopColor(v: string) { return new CssProp("stopColor", v); } export function stopOpacity(v: string) { return new CssProp("stopOpacity", v); } export function stroke(v: string) { return new CssProp("stroke", v); } export function strokeDasharray(v: string) { return new CssProp("strokeDasharray", v); } export function strokeDashoffset(v: string) { return new CssProp("strokeDashoffset", v); } export function strokeLinecap(v: string) { return new CssProp("strokeLinecap", v); } export function strokeLinejoin(v: string) { return new CssProp("strokeLinejoin", v); } export function strokeMiterlimit(v: string) { return new CssProp("strokeMiterlimit", v); } export function strokeOpacity(v: string) { return new CssProp("strokeOpacity", v); } export function strokeWidth(v: string | 0) { return new CssProp("strokeWidth", v); } export function tabSize(v: string) { return new CssProp("tabSize", v); } export function tableLayout(v: string) { return new CssProp("tableLayout", v); } export function textAlign(v: string) { return new CssProp("textAlign", v); } export function textAlignLast(v: string) { return new CssProp("textAlignLast", v); } export function textAnchor(v: string) { return new CssProp("textAnchor", v); } export function textCombineUpright(v: string) { return new CssProp("textCombineUpright", v); } export function textDecoration(v: string) { return new CssProp("textDecoration", v); } export function textDecorationColor(v: string) { return new CssProp("textDecorationColor", v); } export function textDecorationLine(v: string) { return new CssProp("textDecorationLine", v); } export function textDecorationSkipInk(v: string) { return new CssProp("textDecorationSkipInk", v); } export function textDecorationStyle(v: string) { return new CssProp("textDecorationStyle", v); } export function textIndent(v: string) { return new CssProp("textIndent", v); } export function textOrientation(v: string) { return new CssProp("textOrientation", v); } export function textOverflow(v: string) { return new CssProp("textOverflow", v); } export function textRendering(v: string) { return new CssProp("textRendering", v); } export function textShadow(v: string) { return new CssProp("textShadow", v); } export function textSizeAdjust(v: string) { return new CssProp("textSizeAdjust", v); } export function textTransform(v: string) { return new CssProp("textTransform", v); } export function textUnderlinePosition(v: string) { return new CssProp("textUnderlinePosition", v); } export function top(v: string | 0) { return new CssProp("top", v); } export function touchAction(v: string) { return new CssProp("touchAction", v); } export function transform(v: string) { return new CssProp("transform", v); } export function transformBox(v: string) { return new CssProp("transformBox", v); } export function transformOrigin(v: string) { return new CssProp("transformOrigin", v); } export function transformStyle(v: string) { return new CssProp("transformStyle", v); } export function transition(v: string) { return new CssProp("transition", v); } export function transitionDelay(v: string | 0) { return new CssProp("transitionDelay", v); } export function transitionDuration(v: string | 0) { return new CssProp("transitionDuration", v); } export function transitionProperty(v: string) { return new CssProp("transitionProperty", v); } export function transitionTimingFunction(v: string) { return new CssProp("transitionTimingFunction", v); } export function unicodeBidi(v: string) { return new CssProp("unicodeBidi", v); } export function unicodeRange(v: string) { return new CssProp("unicodeRange", v); } export function userSelect(v: string) { return new CssProp("userSelect", v); } export function userZoom(v: string) { return new CssProp("userZoom", v); } export function vectorEffect(v: string) { return new CssProp("vectorEffect", v); } export function verticalAlign(v: string) { return new CssProp("verticalAlign", v); } export function visibility(v: string) { return new CssProp("visibility", v); } export function whiteSpace(v: string) { return new CssProp("whiteSpace", v); } export function widows(v: string) { return new CssProp("widows", v); } export function width(v: string | 0) { return new CssProp("width", v); } export function willChange(v: string) { return new CssProp("willChange", v); } export function wordBreak(v: string) { return new CssProp("wordBreak", v); } export function wordSpacing(v: string) { return new CssProp("wordSpacing", v); } export function wordWrap(v: string) { return new CssProp("wordWrap", v); } export function writingMode(v: string) { return new CssProp("writingMode", v); } export function x(v: string | 0) { return new CssProp("x", v); } export function y(v: string | 0) { return new CssProp("y", v); } export function zIndex(v: number) { return new CssProp("zIndex", v.toFixed(0)); } export function zoom(v: number) { return new CssProp("zoom", v.toFixed(0)); } /** * A selection of fonts for preferred monospace rendering. **/ export function getMonospaceFonts() { return "ui-monospace, 'Droid Sans Mono', 'Cascadia Mono', 'Segoe UI Mono', 'Ubuntu Mono', 'Roboto Mono', Menlo, Monaco, Consolas, monospace"; } /** * A selection of fonts for preferred monospace rendering. **/ export function getMonospaceFamily() { return fontFamily(getMonospaceFonts()); } /** * A selection of fonts that should match whatever the user's operating system normally uses. **/ export function getSystemFonts() { return "system-ui, -apple-system, '.SFNSText-Regular', 'San Francisco', 'Segoe UI', 'Ubuntu', 'Roboto', 'Noto Sans' 'Droid Sans', sans-serif"; } /** * A selection of fonts that should match whatever the user's operating system normally uses. **/ export function getSystemFamily() { return fontFamily(getSystemFonts()); } /** * A selection of serif fonts. **/ export function getSerifFonts() { return "Georgia, Cambria, 'Times New Roman', Times, serif"; } export function getSerifFamily() { return fontFamily(getSerifFonts()); } export class CSSInJSRule { constructor(private selector: string, private props: CssProp[]) { } apply(sheet: CSSStyleSheet) { const style = this.props .map(prop => `${prop.name}: ${prop.value};`) .join(""); sheet.insertRule( `${this.selector} {${style}}`, sheet.cssRules.length); } } export function rule(selector: string, ...props: CssProp[]): CSSInJSRule { return new CSSInJSRule(selector, props); }
the_stack
import 'jest-extended'; import { xLuceneFieldType, GeoShapeType, GeoShapePoint, GeoShapePolygon, GeoShapeMultiPolygon, GeoShapeRelation, } from '@terascope/types'; import { VariableState, toXluceneQuery, CreateJoinQueryOptions } from '../../src/transforms/helpers'; describe('Utils', () => { describe('VariableState', () => { it('can return variables', () => { const vState = new VariableState(); expect(vState.getVariables()).toEqual({}); }); it('can set variables', () => { const vState = new VariableState(); const newVariableName = vState.createVariable('hello', 'world'); expect(newVariableName).toEqual('$hello_1'); expect(vState.getVariables()).toEqual({ hello_1: 'world' }); }); it('can set with same field', () => { const vState = new VariableState(); const newVariableName1 = vState.createVariable('hello', 'world'); const newVariableName2 = vState.createVariable('hello', 'goodbye'); expect(newVariableName1).toEqual('$hello_1'); expect(newVariableName2).toEqual('$hello_2'); expect(vState.getVariables()).toEqual({ hello_1: 'world', hello_2: 'goodbye' }); }); it('can respect existing variables', () => { const vState = new VariableState({ hello_1: 'stuff' }); const newVariableName = vState.createVariable('hello', 'world'); expect(newVariableName).toEqual('$hello_2'); expect(vState.getVariables()).toEqual({ hello_1: 'stuff', hello_2: 'world' }); }); it('can respect values that are already variables', () => { const variables = { person: { some: 'data' } }; const vState = new VariableState(variables); const newVariableName = vState.createVariable('hello', '$person'); expect(newVariableName).toEqual('$person'); expect(vState.getVariables()).toEqual(variables); }); it('will throw if value variable is not provided', () => { const variables = { other: { some: 'data' } }; const vState = new VariableState(variables); expect(() => vState.createVariable('hello', '$person')).toThrowError('Must provide variable "person" in the variables config'); }); }); describe('join queries', () => { it('will return an empty string with an empty input object', () => { const input = {}; const { query, variables } = toXluceneQuery(input); expect(query).toEqual(''); expect(variables).toEqual({}); }); it('queries with matching fields have differing variable names', () => { const input = { hello: 'world' }; const { query, variables } = toXluceneQuery(input, { variables: { hello_1: 'first' } }); expect(query).toEqual('hello: $hello_2'); expect(variables).toEqual({ hello_1: 'first', hello_2: 'world' }); }); it('inputs that have variable will be kept', () => { const input = { profile: '$person' }; const { query, variables } = toXluceneQuery(input, { variables: { person: 'John' } }); expect(query).toEqual('profile: $person'); expect(variables).toEqual({ person: 'John' }); }); it('will do basic "AND" joins with simple values', () => { const input = { hello: 'world', goodBye: 'Dave' }; const { query, variables } = toXluceneQuery(input); expect(query).toEqual('hello: $hello_1 AND goodBye: $goodBye_1'); // eslint-disable-next-line @typescript-eslint/naming-convention expect(variables).toEqual({ hello_1: 'world', goodBye_1: 'Dave' }); }); it('will do basic "OR" joins with simple values', () => { const input = { hello: 'world', goodBye: 'Dave', myName: 'isSteve' }; const options: CreateJoinQueryOptions = { joinBy: 'OR' }; const { query, variables } = toXluceneQuery(input, options); expect(query).toEqual('hello: $hello_1 OR goodBye: $goodBye_1 OR myName: $myName_1'); // eslint-disable-next-line @typescript-eslint/naming-convention expect(variables).toEqual({ hello_1: 'world', goodBye_1: 'Dave', myName_1: 'isSteve' }); }); it('values will be escaped properly', () => { const input = { hello: 'wor " ld', goodBye: '"Dave"' }; const { query, variables } = toXluceneQuery(input); expect(query).toEqual('hello: $hello_1 AND goodBye: $goodBye_1'); // eslint-disable-next-line @typescript-eslint/naming-convention expect(variables).toEqual({ hello_1: 'wor " ld', goodBye_1: '"Dave"' }); }); it('array input values are passed through', () => { const input = { foo: 'bar', baz: [1, 2, 3] }; const options: CreateJoinQueryOptions = {}; const { query, variables } = toXluceneQuery(input, options); expect(query).toEqual('foo: $foo_1 AND baz: $baz_1'); expect(variables).toEqual({ foo_1: 'bar', baz_1: [1, 2, 3] }); }); it('can make a geoDistance join if field type is set to "geo"', () => { const input = { location: '60,90' }; const options: CreateJoinQueryOptions = { typeConfig: { location: xLuceneFieldType.Geo } }; const { query, variables } = toXluceneQuery(input, options); expect(query).toEqual('location:geoDistance(point: $point_1, distance: $distance_1)'); expect(variables).toEqual({ point_1: input.location, distance_1: '100m' }); }); // TODO: this test can be removed when GEO is removed from code it('can add additional fieldParams for geoDistance', () => { const input = { location: '60,90' }; const options: CreateJoinQueryOptions = { fieldParams: { location: '50km' }, typeConfig: { location: xLuceneFieldType.Geo } }; const { query, variables } = toXluceneQuery(input, options); expect(query).toEqual('location:geoDistance(point: $point_1, distance: $distance_1)'); expect(variables).toEqual({ point_1: input.location, distance_1: '50km' }); }); it('can make a geoDistance join if field type is set to "geoPoint" with geoPoint data', () => { const input = { location: '60,90' }; const options: CreateJoinQueryOptions = { fieldParams: { location: '50km' }, typeConfig: { location: xLuceneFieldType.GeoPoint } }; const { query, variables } = toXluceneQuery(input, options); expect(query).toEqual('location:geoDistance(point: $point_1, distance: $distance_1)'); expect(variables).toEqual({ point_1: input.location, distance_1: '50km' }); }); it('will return an empty string if field type is set to "geoPoint" with bad data', () => { const input = { location: 23452345 }; const options: CreateJoinQueryOptions = { typeConfig: { location: xLuceneFieldType.GeoPoint } }; const { query } = toXluceneQuery(input, options); expect(query).toEqual(''); }); it('can make a geo geoDistance join if field type is set to "geoPoint" with GeoJSON point data', () => { const data: GeoShapePoint = { type: GeoShapeType.Point, coordinates: [90, 60] }; const input = { location: data }; const options: CreateJoinQueryOptions = { fieldParams: { location: '50km' }, typeConfig: { location: xLuceneFieldType.GeoPoint } }; const { query, variables } = toXluceneQuery(input, options); expect(query).toEqual('location:geoDistance(point: $point_1, distance: $distance_1)'); expect(variables).toEqual({ point_1: '60,90', distance_1: '50km' }); }); it('can make a geo polygon join if field type is set to "geoPoint" with GeoJSON polygon data', () => { const data: GeoShapePolygon = { type: GeoShapeType.Polygon, coordinates: [[[10, 10], [50, 10], [50, 50], [10, 50], [10, 10]]] }; const input = { location: data }; const options: CreateJoinQueryOptions = { typeConfig: { location: xLuceneFieldType.GeoPoint } }; const { query, variables } = toXluceneQuery(input, options); expect(query).toEqual('location:geoPolygon(points: $points_1)'); expect(variables).toEqual({ points_1: input.location }); }); it('can make a geo bbox join if field type is set to "geoPoint" with GeoJSON multi-polygon data', () => { const data: GeoShapeMultiPolygon = { type: GeoShapeType.MultiPolygon, coordinates: [ [[[10, 10], [50, 10], [50, 50], [10, 50]]], [[[-10, -10], [-50, -10], [-50, -50], [-10, -50]]] ] }; const input = { location: data }; const options: CreateJoinQueryOptions = { typeConfig: { location: xLuceneFieldType.GeoPoint } }; const { query, variables } = toXluceneQuery(input, options); expect(query).toEqual('location:geoPolygon(points: $points_1)'); expect(variables).toEqual({ points_1: input.location }); }); it('unrecognized geoJSON input with field type is set to "geoPoint" will return an empty string', () => { const data = { type: 'LineString', coordinates: [[1, 1], [3, 5], [6, 8]] }; const input = { location: data }; const options: CreateJoinQueryOptions = { typeConfig: { location: xLuceneFieldType.GeoPoint } }; const { query, variables } = toXluceneQuery(input, options); expect(query).toEqual(''); expect(variables).toEqual({}); }); it('will return an empty string if field type is set to "geoJSON" with bad data', () => { const input = { location: 23452345 }; const options: CreateJoinQueryOptions = { typeConfig: { location: xLuceneFieldType.GeoJSON } }; const { query, variables } = toXluceneQuery(input, options); expect(query).toEqual(''); expect(variables).toEqual({}); }); it('can make a geoContainsPoint join if field type is set to "geoJSON" with geoPoint data', () => { const input = { location: '60,90' }; const options: CreateJoinQueryOptions = { typeConfig: { location: xLuceneFieldType.GeoJSON } }; const { query, variables } = toXluceneQuery(input, options); expect(query).toEqual('location:geoContainsPoint(point: $point_1)'); expect(variables).toEqual({ point_1: '60,90' }); }); it('can make a geoContainsPoint join if field type is set to "geoJSON" with geoJSON point data', () => { const data: GeoShapePoint = { type: GeoShapeType.Point, coordinates: [90, 60] }; const input = { location: data }; const options: CreateJoinQueryOptions = { typeConfig: { location: xLuceneFieldType.GeoJSON } }; const { query, variables } = toXluceneQuery(input, options); expect(query).toEqual('location:geoContainsPoint(point: $point_1)'); expect(variables).toEqual({ point_1: '60,90' }); }); it('can make a geoPolygon join if field type is set to "geoJSON" with geoJSON polygon data', () => { const data: GeoShapePolygon = { type: GeoShapeType.Polygon, coordinates: [[[10, 10], [50, 10], [50, 50], [10, 50]]] }; const input = { location: data }; const options: CreateJoinQueryOptions = { typeConfig: { location: xLuceneFieldType.GeoJSON } }; const { query, variables } = toXluceneQuery(input, options); expect(query).toEqual('location:geoPolygon(points: $points_1)'); expect(variables).toEqual({ points_1: input.location }); }); it('can make a geoPolygon join if field type is set to "geoJSON" with geoJSON polygon data and fieldParam disjoint', () => { const data: GeoShapePolygon = { type: GeoShapeType.Polygon, coordinates: [[[10, 10], [50, 10], [50, 50], [10, 50]]] }; const input = { location: data }; const options: CreateJoinQueryOptions = { fieldParams: { location: GeoShapeRelation.Disjoint }, typeConfig: { location: xLuceneFieldType.GeoJSON } }; const { query, variables } = toXluceneQuery(input, options); expect(query).toEqual('location:geoPolygon(points: $points_1, relation: $relation_1)'); expect(variables).toEqual({ points_1: input.location, relation_1: 'disjoint' }); }); it('can make a geoPolygon join if field type is set to "geoJSON" with geoJSON multi-polygon data', () => { const data: GeoShapeMultiPolygon = { type: GeoShapeType.MultiPolygon, coordinates: [ [[[10, 10], [50, 10], [50, 50], [10, 50]]], [[[-10, -10], [-50, -10], [-50, -50], [-10, -50]]] ] }; const input = { location: data }; const options: CreateJoinQueryOptions = { typeConfig: { location: xLuceneFieldType.GeoJSON } }; const { query, variables } = toXluceneQuery(input, options); expect(query).toEqual('location:geoPolygon(points: $points_1)'); expect(variables).toEqual({ points_1: input.location }); }); it('can make a geoPolygon join if field type is set to "geoJSON" with geoJSON polygon data with holes', () => { const data: GeoShapePolygon = { type: GeoShapeType.Polygon, coordinates: [ [[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]], [[100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]] ] }; const input = { location: data }; const options: CreateJoinQueryOptions = { typeConfig: { location: xLuceneFieldType.GeoJSON } }; const { query, variables } = toXluceneQuery(input, options); expect(query).toEqual('location:geoPolygon(points: $points_1)'); expect(variables).toEqual({ points_1: input.location }); }); it('can make a geoPolygon join if field type is set to "GeoPoint" with geoJSON polygon data with holes and relation set to disjoint', () => { const data: GeoShapePolygon = { type: GeoShapeType.Polygon, coordinates: [ [[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]], [[100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]] ] }; const input = { location: data }; const options: CreateJoinQueryOptions = { fieldParams: { location: GeoShapeRelation.Within }, typeConfig: { location: xLuceneFieldType.GeoPoint } }; const { query, variables } = toXluceneQuery(input, options); expect(query).toEqual('location:geoPolygon(points: $points_1, relation: $relation_1)'); expect(variables).toEqual({ points_1: input.location, relation_1: 'within' }); }); it('can make a geoPolygon join if field type is set to "GeoPoint" with geoJSON multi-polygon data that has holes', () => { const data: GeoShapeMultiPolygon = { type: GeoShapeType.MultiPolygon, coordinates: [ [ [[10, 10], [50, 10], [50, 50], [10, 50], [10, 10]], [[20, 20], [40, 20], [40, 40], [20, 40], [20, 20]] ], [ [[-10, -10], [-50, -10], [-50, -50], [-10, -50], [-10, -10]], [[-20, -20], [-40, -20], [-40, -40], [-20, -40], [-20, -20]] ] ] }; const input = { location: data }; const options: CreateJoinQueryOptions = { typeConfig: { location: xLuceneFieldType.GeoPoint } }; const { query, variables } = toXluceneQuery(input, options); expect(query).toEqual('location:geoPolygon(points: $points_1)'); expect(variables).toEqual({ points_1: input.location }); }); it('can make a geoPolygon join if field type is set to "geoJSON" with geoJSON polygon data with holes and relation set to disjoint', () => { const data: GeoShapePolygon = { type: GeoShapeType.Polygon, coordinates: [ [[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]], [[100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2]] ] }; const input = { location: data }; const options: CreateJoinQueryOptions = { fieldParams: { location: GeoShapeRelation.Within }, typeConfig: { location: xLuceneFieldType.GeoJSON } }; const { query, variables } = toXluceneQuery(input, options); expect(query).toEqual('location:geoPolygon(points: $points_1, relation: $relation_1)'); expect(variables).toEqual({ points_1: input.location, relation_1: 'within' }); }); it('can make a geoPolygon join if field type is set to "geoJSON" with geoJSON multi-polygon data that has holes', () => { const data: GeoShapeMultiPolygon = { type: GeoShapeType.MultiPolygon, coordinates: [ [ [[10, 10], [50, 10], [50, 50], [10, 50], [10, 10]], [[20, 20], [40, 20], [40, 40], [20, 40], [20, 20]] ], [ [[-10, -10], [-50, -10], [-50, -50], [-10, -50], [-10, -10]], [[-20, -20], [-40, -20], [-40, -40], [-20, -40], [-20, -20]] ] ] }; const input = { location: data }; const options: CreateJoinQueryOptions = { typeConfig: { location: xLuceneFieldType.GeoJSON } }; const { query, variables } = toXluceneQuery(input, options); expect(query).toEqual('location:geoPolygon(points: $points_1)'); expect(variables).toEqual({ points_1: input.location }); }); }); });
the_stack
import { encodeERC20AssetData, ERC20ProxyContract, ERC20Wrapper } from '@0x/contracts-asset-proxy'; import { DummyERC20TokenContract } from '@0x/contracts-erc20'; import { blockchainTests, constants, expect, LogDecoder, OrderFactory, orderHashUtils, orderUtils, randomAddress, TransactionFactory, transactionHashUtils, } from '@0x/contracts-test-utils'; import { SignatureType, SignedOrder, SignedZeroExTransaction } from '@0x/types'; import { BigNumber, ExchangeRevertErrors, hexUtils, StringRevertError } from '@0x/utils'; import { LogWithDecodedArgs } from 'ethereum-types'; import ethUtil = require('ethereumjs-util'); import { artifacts } from './artifacts'; import { ExchangeContract, ExchangeSignatureValidatorApprovalEventArgs, IEIP1271DataContract, TestValidatorWalletContract, } from './wrappers'; enum ValidatorWalletAction { Reject = 0, Accept = 1, Revert = 2, UpdateState = 3, MatchSignatureHash = 4, ReturnTrue = 5, ReturnNothing = 6, NTypes = 7, } // tslint:disable:no-unnecessary-type-assertion blockchainTests.resets('MixinSignatureValidator', env => { let chainId: number; let exchange: ExchangeContract; let validatorWallet: TestValidatorWalletContract; let validatorWalletRevertReason: string; let signerAddress: string; let signerPrivateKey: Buffer; let notSignerAddress: string; let accounts: string[]; let owner: string; let makerAddress: string; let takerAddress: string; let feeRecipientAddress: string; const eip1271Data = new IEIP1271DataContract(constants.NULL_ADDRESS, env.provider, env.txDefaults); before(async () => { chainId = await env.getChainIdAsync(); accounts = await env.getAccountAddressesAsync(); [owner, signerAddress, notSignerAddress, makerAddress, takerAddress, feeRecipientAddress] = accounts; exchange = await ExchangeContract.deployFrom0xArtifactAsync( artifacts.Exchange, env.provider, env.txDefaults, {}, new BigNumber(chainId), ); validatorWallet = await TestValidatorWalletContract.deployFrom0xArtifactAsync( artifacts.TestValidatorWallet, env.provider, env.txDefaults, {}, exchange.address, ); validatorWalletRevertReason = await validatorWallet.REVERT_REASON().callAsync(); // Approve the validator for both signers. await Promise.all( [signerAddress, notSignerAddress].map(async (addr: string) => { return exchange .setSignatureValidatorApproval(validatorWallet.address, true) .awaitTransactionSuccessAsync({ from: addr }); }), ); signerPrivateKey = constants.TESTRPC_PRIVATE_KEYS[accounts.indexOf(signerAddress)]; }); const SIGNATURE_LENGTH = 65; const generateRandomSignature = (): string => hexUtils.random(SIGNATURE_LENGTH); const hashBytes = (bytesHex: string): string => ethUtil.bufferToHex(ethUtil.sha3(ethUtil.toBuffer(bytesHex))); const signDataHex = (dataHex: string, privateKey: Buffer): string => { const ecSignature = ethUtil.ecsign(ethUtil.toBuffer(dataHex), privateKey); return hexUtils.concat(ecSignature.v, ecSignature.r, ecSignature.s); }; type ValidateHashSignatureAsync = ( hashHex: string, signerAddress: string, signatureHex: string, validatorAction?: ValidatorWalletAction, validatorExpectedSignatureHex?: string, ) => Promise<any>; const createHashSignatureTests = ( getCurrentHashHex: (signerAddress?: string) => string, validateAsync: ValidateHashSignatureAsync, ) => { it('should revert when signature is empty', async () => { const hashHex = getCurrentHashHex(); const emptySignature = constants.NULL_BYTES; const expectedError = new ExchangeRevertErrors.SignatureError( ExchangeRevertErrors.SignatureErrorCode.InvalidLength, hashHex, signerAddress, emptySignature, ); const tx = validateAsync(hashHex, signerAddress, emptySignature); return expect(tx).to.revertWith(expectedError); }); it('should revert when signature type is unsupported', async () => { const hashHex = getCurrentHashHex(); const signatureHex = hexUtils.concat(SignatureType.NSignatureTypes); const expectedError = new ExchangeRevertErrors.SignatureError( ExchangeRevertErrors.SignatureErrorCode.Unsupported, hashHex, signerAddress, signatureHex, ); const tx = validateAsync(hashHex, signerAddress, signatureHex); return expect(tx).to.revertWith(expectedError); }); it('should revert when SignatureType=Illegal', async () => { const hashHex = getCurrentHashHex(); const signatureHex = hexUtils.concat(SignatureType.Illegal); const expectedError = new ExchangeRevertErrors.SignatureError( ExchangeRevertErrors.SignatureErrorCode.Illegal, hashHex, signerAddress, signatureHex, ); const tx = validateAsync(hashHex, signerAddress, signatureHex); return expect(tx).to.revertWith(expectedError); }); it('should return false when SignatureType=Invalid and signature has a length of zero', async () => { const hashHex = getCurrentHashHex(); const signatureHex = hexUtils.concat(SignatureType.Invalid); const isValidSignature = await validateAsync(hashHex, signerAddress, signatureHex); expect(isValidSignature).to.be.false(); }); it('should revert when SignatureType=Invalid and signature length is non-zero', async () => { const hashHex = getCurrentHashHex(); const signatureHex = hexUtils.concat('0xdeadbeef', SignatureType.Invalid); const expectedError = new ExchangeRevertErrors.SignatureError( ExchangeRevertErrors.SignatureErrorCode.InvalidLength, hashHex, signerAddress, signatureHex, ); const tx = validateAsync(hashHex, signerAddress, signatureHex); return expect(tx).to.revertWith(expectedError); }); it('should return true when SignatureType=EIP712 and signature is valid', async () => { const hashHex = getCurrentHashHex(); const signatureHex = hexUtils.concat(signDataHex(hashHex, signerPrivateKey), SignatureType.EIP712); const isValidSignature = await validateAsync(hashHex, signerAddress, signatureHex); expect(isValidSignature).to.be.true(); }); it('should return false when SignatureType=EIP712 and signature is invalid', async () => { const hashHex = getCurrentHashHex(); const signatureHex = hexUtils.concat(generateRandomSignature(), SignatureType.EIP712); const isValidSignature = await validateAsync(hashHex, signerAddress, signatureHex); expect(isValidSignature).to.be.false(); }); it('should return true when SignatureType=EthSign and signature is valid', async () => { // Create EthSign signature const hashHex = getCurrentHashHex(); const orderHashWithEthSignPrefixHex = ethUtil.bufferToHex( ethUtil.hashPersonalMessage(ethUtil.toBuffer(hashHex)), ); const signatureHex = hexUtils.concat( signDataHex(orderHashWithEthSignPrefixHex, signerPrivateKey), SignatureType.EthSign, ); const isValidSignature = await validateAsync(hashHex, signerAddress, signatureHex); expect(isValidSignature).to.be.true(); }); it('should return false when SignatureType=EthSign and signature is invalid', async () => { const hashHex = getCurrentHashHex(); // Create EthSign signature const signatureHex = hexUtils.concat(generateRandomSignature(), SignatureType.EthSign); const isValidSignature = await validateAsync(hashHex, signerAddress, signatureHex); expect(isValidSignature).to.be.false(); }); it('should return true when SignatureType=Wallet and signature is valid', async () => { const hashHex = getCurrentHashHex(validatorWallet.address); // Doesn't have to contain a real signature since our wallet contract // just does a hash comparison. const signatureDataHex = generateRandomSignature(); const signatureHex = hexUtils.concat(signatureDataHex, SignatureType.Wallet); const isValidSignature = await validateAsync( hashHex, validatorWallet.address, signatureHex, ValidatorWalletAction.MatchSignatureHash, signatureDataHex, ); expect(isValidSignature).to.be.true(); }); it('should return false when SignatureType=Wallet and signature is invalid', async () => { const hashHex = getCurrentHashHex(validatorWallet.address); // Doesn't have to contain a real signature since our wallet contract // just does a hash comparison. const signatureDataHex = generateRandomSignature(); const notSignatureDataHex = generateRandomSignature(); const signatureHex = hexUtils.concat(notSignatureDataHex, SignatureType.Wallet); // Validate signature const isValidSignature = await validateAsync( hashHex, validatorWallet.address, signatureHex, ValidatorWalletAction.MatchSignatureHash, signatureDataHex, ); expect(isValidSignature).to.be.false(); }); it('should revert when validator attempts to update state and SignatureType=Wallet', async () => { const hashHex = getCurrentHashHex(validatorWallet.address); // Doesn't have to contain a real signature since our wallet contract // just does a hash comparison. const signatureHex = hexUtils.concat(generateRandomSignature(), SignatureType.Wallet); const expectedError = new ExchangeRevertErrors.SignatureWalletError( hashHex, validatorWallet.address, signatureHex, constants.NULL_BYTES, ); const tx = validateAsync(hashHex, validatorWallet.address, signatureHex, ValidatorWalletAction.UpdateState); return expect(tx).to.revertWith(expectedError); }); it('should revert when signer is an EOA and SignatureType=Wallet', async () => { const hashHex = getCurrentHashHex(); const signatureHex = hexUtils.concat(SignatureType.Wallet); const expectedError = new ExchangeRevertErrors.SignatureWalletError( hashHex, signerAddress, signatureHex, constants.NULL_BYTES, ); const tx = validateAsync(hashHex, signerAddress, signatureHex); return expect(tx).to.revertWith(expectedError); }); it('should return false when validator returns `true` and SignatureType=Wallet', async () => { const hashHex = getCurrentHashHex(); const signatureHex = hexUtils.concat(SignatureType.Wallet); const isValidSignature = await validateAsync( hashHex, validatorWallet.address, signatureHex, ValidatorWalletAction.ReturnTrue, ); expect(isValidSignature).to.be.false(); }); it('should revert when validator returns nothing and SignatureType=Wallet', async () => { const hashHex = getCurrentHashHex(validatorWallet.address); const signatureHex = hexUtils.concat(SignatureType.Wallet); const expectedError = new ExchangeRevertErrors.SignatureWalletError( hashHex, validatorWallet.address, signatureHex, constants.NULL_BYTES, ); const tx = validateAsync( hashHex, validatorWallet.address, signatureHex, ValidatorWalletAction.ReturnNothing, ); return expect(tx).to.revertWith(expectedError); }); it('should revert when validator reverts and SignatureType=Wallet', async () => { const hashHex = getCurrentHashHex(validatorWallet.address); // Doesn't have to contain a real signature since our wallet contract // just does a hash comparison. const signatureHex = hexUtils.concat(generateRandomSignature(), SignatureType.Wallet); const expectedError = new ExchangeRevertErrors.SignatureWalletError( hashHex, validatorWallet.address, signatureHex, new StringRevertError(validatorWalletRevertReason).encode(), ); const tx = validateAsync(hashHex, validatorWallet.address, signatureHex, ValidatorWalletAction.Revert); return expect(tx).to.revertWith(expectedError); }); it('should return true when SignatureType=Presigned and signer has presigned hash', async () => { const hashHex = getCurrentHashHex(); // Presign the hash await exchange.preSign(hashHex).awaitTransactionSuccessAsync({ from: signerAddress }); // Validate presigned signature const signatureHex = hexUtils.concat(SignatureType.PreSigned); const isValidSignature = await validateAsync(hashHex, signerAddress, signatureHex); expect(isValidSignature).to.be.true(); }); it('should return false when SignatureType=Presigned and signer has not presigned hash', async () => { const hashHex = getCurrentHashHex(); const signatureHex = hexUtils.concat(SignatureType.PreSigned); const isValidSignature = await validateAsync(hashHex, signerAddress, signatureHex); expect(isValidSignature).to.be.false(); }); }; describe('isValidHashSignature', () => { let hashHex: string; beforeEach(async () => { hashHex = orderUtils.generatePseudoRandomOrderHash(); }); const validateAsync = async ( _hashHex: string, _signerAddress: string, signatureHex: string, validatorAction?: ValidatorWalletAction, validatorExpectedSignatureHex?: string, ) => { const expectedSignatureHashHex = validatorExpectedSignatureHex === undefined ? constants.NULL_BYTES : hashBytes(validatorExpectedSignatureHex); if (validatorAction !== undefined) { await validatorWallet .prepare(_hashHex, validatorAction, expectedSignatureHashHex) .awaitTransactionSuccessAsync(); } return exchange.isValidHashSignature(_hashHex, _signerAddress, signatureHex).callAsync(); }; it('should revert when signerAddress == 0', async () => { const signatureHex = hexUtils.concat(SignatureType.EIP712); const expectedError = new ExchangeRevertErrors.SignatureError( ExchangeRevertErrors.SignatureErrorCode.InvalidSigner, hashHex, constants.NULL_ADDRESS, signatureHex, ); const tx = validateAsync(hashHex, constants.NULL_ADDRESS, signatureHex); return expect(tx).to.revertWith(expectedError); }); it('should revert when SignatureType=Validator', async () => { const signatureHex = hexUtils.concat(SignatureType.Validator); const expectedError = new ExchangeRevertErrors.SignatureError( ExchangeRevertErrors.SignatureErrorCode.InappropriateSignatureType, hashHex, signerAddress, signatureHex, ); const tx = validateAsync(hashHex, signerAddress, signatureHex); return expect(tx).to.revertWith(expectedError); }); it('should revert when SignatureType=EIP1271Wallet', async () => { const signatureHex = hexUtils.concat(SignatureType.EIP1271Wallet); const expectedError = new ExchangeRevertErrors.SignatureError( ExchangeRevertErrors.SignatureErrorCode.InappropriateSignatureType, hashHex, signerAddress, signatureHex, ); const tx = validateAsync(hashHex, signerAddress, signatureHex); return expect(tx).to.revertWith(expectedError); }); it('should return true when message was signed by a Trezor One (firmware version 1.6.2)', async () => { // messageHash translates to 0x2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b const messageHash = ethUtil.bufferToHex(ethUtil.toBuffer('++++++++++++++++++++++++++++++++')); const signer = '0xc28b145f10f0bcf0fc000e778615f8fd73490bad'; const v = ethUtil.toBuffer('0x1c'); const r = ethUtil.toBuffer('0x7b888b596ccf87f0bacab0dcb483124973f7420f169b4824d7a12534ac1e9832'); const s = ethUtil.toBuffer('0x0c8e14f7edc01459e13965f1da56e0c23ed11e2cca932571eee1292178f90424'); const trezorSignatureType = ethUtil.toBuffer(`0x${SignatureType.EthSign}`); const signature = Buffer.concat([v, r, s, trezorSignatureType]); const signatureHex = ethUtil.bufferToHex(signature); const isValidSignature = await exchange.isValidHashSignature(messageHash, signer, signatureHex).callAsync(); expect(isValidSignature).to.be.true(); }); it('should return true when message was signed by a Trezor Model T (firmware version 2.0.7)', async () => { // messageHash translates to 0x2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b2b const messageHash = ethUtil.bufferToHex(ethUtil.toBuffer('++++++++++++++++++++++++++++++++')); const signer = '0x98ce6d9345e8ffa7d99ee0822272fae9d2c0e895'; const v = ethUtil.toBuffer('0x1c'); const r = ethUtil.toBuffer('0x423b71062c327f0ec4fe199b8da0f34185e59b4c1cb4cc23df86cac4a601fb3f'); const s = ethUtil.toBuffer('0x53810d6591b5348b7ee08ee812c874b0fdfb942c9849d59512c90e295221091f'); const trezorSignatureType = ethUtil.toBuffer(`0x${SignatureType.EthSign}`); const signature = Buffer.concat([v, r, s, trezorSignatureType]); const signatureHex = ethUtil.bufferToHex(signature); const isValidSignature = await exchange.isValidHashSignature(messageHash, signer, signatureHex).callAsync(); expect(isValidSignature).to.be.true(); }); createHashSignatureTests((_signerAddress?: string) => hashHex, validateAsync); }); describe('isValidOrderSignature', () => { let orderFactory: OrderFactory; let signedOrder: SignedOrder; before(async () => { const defaultOrderParams = { ...constants.STATIC_ORDER_PARAMS, makerAddress: signerAddress, feeRecipientAddress: randomAddress(), makerAssetData: encodeERC20AssetData(randomAddress()), takerAssetData: encodeERC20AssetData(randomAddress()), makerFeeAssetData: encodeERC20AssetData(randomAddress()), takerFeeAssetData: encodeERC20AssetData(randomAddress()), makerFee: constants.ZERO_AMOUNT, takerFee: constants.ZERO_AMOUNT, exchangeAddress: exchange.address, chainId, }; orderFactory = new OrderFactory(signerPrivateKey, defaultOrderParams); }); beforeEach(async () => { signedOrder = await orderFactory.newSignedOrderAsync(); }); const validateAsync = async ( order: SignedOrder, signatureHex: string, validatorAction?: ValidatorWalletAction, validatorExpectedSignatureHex?: string, ) => { const orderHashHex = orderHashUtils.getOrderHashHex(order); const expectedSignatureHashHex = validatorExpectedSignatureHex === undefined ? constants.NULL_BYTES : hashBytes(validatorExpectedSignatureHex); if (validatorAction !== undefined) { await validatorWallet .prepare(orderHashHex, validatorAction, expectedSignatureHashHex) .awaitTransactionSuccessAsync(); } return exchange.isValidOrderSignature(order, signatureHex).callAsync(); }; it('should revert when signerAddress == 0', async () => { const signatureHex = hexUtils.concat(SignatureType.EIP712); const nullMakerOrder = { ...signedOrder, makerAddress: constants.NULL_ADDRESS, }; const orderHashHex = orderHashUtils.getOrderHashHex(nullMakerOrder); const expectedError = new ExchangeRevertErrors.SignatureError( ExchangeRevertErrors.SignatureErrorCode.InvalidSigner, orderHashHex, constants.NULL_ADDRESS, signatureHex, ); const tx = exchange.isValidOrderSignature(nullMakerOrder, signatureHex).callAsync(); return expect(tx).to.revertWith(expectedError); }); it('should return true when SignatureType=Validator, signature is valid and validator is approved', async () => { // Doesn't have to contain a real signature since our wallet contract // just does a hash comparison. const signatureDataHex = generateRandomSignature(); const signatureHex = hexUtils.concat(signatureDataHex, validatorWallet.address, SignatureType.Validator); const isValidSignature = await validateAsync( signedOrder, signatureHex, ValidatorWalletAction.MatchSignatureHash, signatureDataHex, ); expect(isValidSignature).to.be.true(); }); it('should return false when SignatureType=Validator, signature is invalid and validator is approved', async () => { // Doesn't have to contain a real signature since our wallet contract // just does a hash comparison. const signatureDataHex = generateRandomSignature(); const notSignatureDataHex = generateRandomSignature(); const signatureHex = hexUtils.concat(notSignatureDataHex, validatorWallet.address, SignatureType.Validator); const isValidSignature = await validateAsync( signedOrder, signatureHex, ValidatorWalletAction.MatchSignatureHash, signatureDataHex, ); expect(isValidSignature).to.be.false(); }); it('should return false when validator returns `true` and SignatureType=Validator', async () => { const signatureDataHex = generateRandomSignature(); const signatureHex = hexUtils.concat(signatureDataHex, validatorWallet.address, SignatureType.Validator); const isValidSignature = await validateAsync( signedOrder, signatureHex, ValidatorWalletAction.ReturnTrue, signatureDataHex, ); expect(isValidSignature).to.be.false(); }); it('should revert when validator returns nothing and SignatureType=Validator', async () => { const signatureDataHex = generateRandomSignature(); const signatureHex = hexUtils.concat(signatureDataHex, validatorWallet.address, SignatureType.Validator); const orderHashHex = orderHashUtils.getOrderHashHex(signedOrder); const data = eip1271Data.OrderWithHash(signedOrder, orderHashHex).getABIEncodedTransactionData(); const expectedError = new ExchangeRevertErrors.EIP1271SignatureError( validatorWallet.address, data, signatureHex, constants.NULL_BYTES, ); const tx = validateAsync(signedOrder, signatureHex, ValidatorWalletAction.ReturnNothing, signatureDataHex); return expect(tx).to.revertWith(expectedError); }); it('should revert when validator attempts to update state and SignatureType=Validator', async () => { // Doesn't have to contain a real signature since our wallet contract // just does a hash comparison. const signatureDataHex = generateRandomSignature(); const signatureHex = hexUtils.concat(signatureDataHex, validatorWallet.address, SignatureType.Validator); const orderHashHex = orderHashUtils.getOrderHashHex(signedOrder); const data = eip1271Data.OrderWithHash(signedOrder, orderHashHex).getABIEncodedTransactionData(); const expectedError = new ExchangeRevertErrors.EIP1271SignatureError( validatorWallet.address, data, signatureHex, constants.NULL_BYTES, ); const tx = validateAsync(signedOrder, signatureHex, ValidatorWalletAction.UpdateState); return expect(tx).to.revertWith(expectedError); }); it('should revert when validator reverts and SignatureType=Validator', async () => { // Doesn't have to contain a real signature since our wallet contract // just does a hash comparison. const signatureDataHex = generateRandomSignature(); const signatureHex = hexUtils.concat(signatureDataHex, validatorWallet.address, SignatureType.Validator); const orderHashHex = orderHashUtils.getOrderHashHex(signedOrder); const data = eip1271Data.OrderWithHash(signedOrder, orderHashHex).getABIEncodedTransactionData(); const expectedError = new ExchangeRevertErrors.EIP1271SignatureError( validatorWallet.address, data, signatureHex, new StringRevertError(validatorWalletRevertReason).encode(), ); const tx = validateAsync(signedOrder, signatureHex, ValidatorWalletAction.Revert); return expect(tx).to.revertWith(expectedError); }); it('should revert when SignatureType=Validator and signature is shorter than 21 bytes', async () => { // Set approval of signature validator to false await exchange .setSignatureValidatorApproval(validatorWallet.address, false) .awaitTransactionSuccessAsync({ from: signedOrder.makerAddress }); // Doesn't have to contain a real signature since our wallet contract // just does a hash comparison. const signatureHex = hexUtils.concat(SignatureType.Validator); const orderHashHex = orderHashUtils.getOrderHashHex(signedOrder); const expectedError = new ExchangeRevertErrors.SignatureError( ExchangeRevertErrors.SignatureErrorCode.InvalidLength, orderHashHex, signedOrder.makerAddress, signatureHex, ); const tx = validateAsync(signedOrder, signatureHex, ValidatorWalletAction.MatchSignatureHash); return expect(tx).to.revertWith(expectedError); }); it('should revert when SignatureType=Validator, signature is valid and validator is not approved', async () => { // Set approval of signature validator to false await exchange .setSignatureValidatorApproval(validatorWallet.address, false) .awaitTransactionSuccessAsync({ from: signedOrder.makerAddress }); // Doesn't have to contain a real signature since our wallet contract // just does a hash comparison. const signatureDataHex = generateRandomSignature(); const signatureHex = hexUtils.concat(signatureDataHex, validatorWallet.address, SignatureType.Validator); const expectedError = new ExchangeRevertErrors.SignatureValidatorNotApprovedError( signedOrder.makerAddress, validatorWallet.address, ); const tx = validateAsync(signedOrder, signatureHex, ValidatorWalletAction.Revert); return expect(tx).to.revertWith(expectedError); }); it('should return true when SignatureType=EIP1271Wallet and signature is valid', async () => { signedOrder.makerAddress = validatorWallet.address; // Doesn't have to contain a real signature since our wallet contract // just does a hash comparison. const signatureDataHex = generateRandomSignature(); const signatureHex = hexUtils.concat(signatureDataHex, SignatureType.EIP1271Wallet); // Validate signature const isValidSignature = await validateAsync( signedOrder, signatureHex, ValidatorWalletAction.MatchSignatureHash, signatureDataHex, ); expect(isValidSignature).to.be.true(); }); it('should return false when SignatureType=EIP1271Wallet and signature is invalid', async () => { signedOrder.makerAddress = validatorWallet.address; // Doesn't have to contain a real signature since our wallet contract // just does a hash comparison. const signatureDataHex = generateRandomSignature(); const notSignatureDataHex = generateRandomSignature(); const signatureHex = hexUtils.concat(notSignatureDataHex, SignatureType.EIP1271Wallet); // Validate signature const isValidSignature = await validateAsync( signedOrder, signatureHex, ValidatorWalletAction.MatchSignatureHash, signatureDataHex, ); expect(isValidSignature).to.be.false(); }); it('should return false when validator returns `true` and SignatureType=EIP1271Wallet', async () => { signedOrder.makerAddress = validatorWallet.address; const signatureDataHex = generateRandomSignature(); const signatureHex = hexUtils.concat(signatureDataHex, SignatureType.EIP1271Wallet); // Validate signature const isValidSignature = await validateAsync( signedOrder, signatureHex, ValidatorWalletAction.ReturnTrue, signatureDataHex, ); expect(isValidSignature).to.be.false(); }); it('should revert when validator returns nothing and SignatureType=EIP1271Wallet', async () => { signedOrder.makerAddress = validatorWallet.address; const signatureDataHex = generateRandomSignature(); const signatureHex = hexUtils.concat(signatureDataHex, SignatureType.EIP1271Wallet); const orderHashHex = orderHashUtils.getOrderHashHex(signedOrder); const data = eip1271Data.OrderWithHash(signedOrder, orderHashHex).getABIEncodedTransactionData(); const expectedError = new ExchangeRevertErrors.EIP1271SignatureError( validatorWallet.address, data, signatureHex, constants.NULL_BYTES, ); const tx = validateAsync(signedOrder, signatureHex, ValidatorWalletAction.ReturnNothing, signatureDataHex); return expect(tx).to.revertWith(expectedError); }); it('should revert when validator attempts to update state and SignatureType=EIP1271Wallet', async () => { signedOrder.makerAddress = validatorWallet.address; // Doesn't have to contain a real signature since our wallet contract // just does a hash comparison. const signatureHex = hexUtils.concat(generateRandomSignature(), SignatureType.EIP1271Wallet); const orderHashHex = orderHashUtils.getOrderHashHex(signedOrder); const data = eip1271Data.OrderWithHash(signedOrder, orderHashHex).getABIEncodedTransactionData(); const expectedError = new ExchangeRevertErrors.EIP1271SignatureError( validatorWallet.address, data, signatureHex, constants.NULL_BYTES, ); const tx = validateAsync(signedOrder, signatureHex, ValidatorWalletAction.UpdateState); return expect(tx).to.revertWith(expectedError); }); it('should revert when validator reverts and SignatureType=EIP1271Wallet', async () => { signedOrder.makerAddress = validatorWallet.address; const signatureHex = hexUtils.concat(SignatureType.EIP1271Wallet); const orderHashHex = orderHashUtils.getOrderHashHex(signedOrder); const data = eip1271Data.OrderWithHash(signedOrder, orderHashHex).getABIEncodedTransactionData(); const expectedError = new ExchangeRevertErrors.EIP1271SignatureError( validatorWallet.address, data, signatureHex, new StringRevertError(validatorWalletRevertReason).encode(), ); const tx = validateAsync(signedOrder, signatureHex, ValidatorWalletAction.Revert); return expect(tx).to.revertWith(expectedError); }); it('should revert when signer is an EOA and SignatureType=EIP1271Wallet', async () => { const signatureHex = hexUtils.concat(SignatureType.EIP1271Wallet); signedOrder.makerAddress = notSignerAddress; const orderHashHex = orderHashUtils.getOrderHashHex(signedOrder); const data = eip1271Data.OrderWithHash(signedOrder, orderHashHex).getABIEncodedTransactionData(); const expectedError = new ExchangeRevertErrors.EIP1271SignatureError( notSignerAddress, data, signatureHex, constants.NULL_BYTES, ); const tx = exchange.isValidOrderSignature(signedOrder, signatureHex).callAsync(); return expect(tx).to.revertWith(expectedError); }); it('should revert when signer is an EOA and SignatureType=Validator', async () => { const signatureHex = hexUtils.concat(notSignerAddress, SignatureType.Validator); const orderHashHex = orderHashUtils.getOrderHashHex(signedOrder); const data = eip1271Data.OrderWithHash(signedOrder, orderHashHex).getABIEncodedTransactionData(); const expectedError = new ExchangeRevertErrors.EIP1271SignatureError( notSignerAddress, data, signatureHex, constants.NULL_BYTES, ); // Register an EOA as a validator. await exchange .setSignatureValidatorApproval(notSignerAddress, true) .awaitTransactionSuccessAsync({ from: signerAddress }); const tx = exchange.isValidOrderSignature(signedOrder, signatureHex).callAsync(); return expect(tx).to.revertWith(expectedError); }); // Run hash-only signature type tests as well. const validateOrderHashAsync = async ( _hashHex: string, _signerAddress: string, signatureHex: string, validatorAction?: ValidatorWalletAction, validatorExpectedSignatureHex?: string, ): Promise<any> => { signedOrder.makerAddress = _signerAddress; return validateAsync(signedOrder, signatureHex, validatorAction, validatorExpectedSignatureHex); }; createHashSignatureTests((_signerAddress?: string) => { signedOrder.makerAddress = _signerAddress === undefined ? signerAddress : _signerAddress; return orderHashUtils.getOrderHashHex(signedOrder); }, validateOrderHashAsync); }); describe('isValidTransactionSignature', () => { let transactionFactory: TransactionFactory; let signedTransaction: SignedZeroExTransaction; const TRANSACTION_DATA_LENGTH = 100; before(async () => { transactionFactory = new TransactionFactory(signerPrivateKey, exchange.address, chainId); }); beforeEach(async () => { // We don't actually do anything with the transaction so we can just // fill it with random data. signedTransaction = await transactionFactory.newSignedTransactionAsync({ data: hexUtils.random(TRANSACTION_DATA_LENGTH), }); }); const validateAsync = async ( transaction: SignedZeroExTransaction, signatureHex: string, validatorAction?: ValidatorWalletAction, validatorExpectedSignatureHex?: string, ) => { const transactionHashHex = transactionHashUtils.getTransactionHashHex(transaction); const expectedSignatureHashHex = validatorExpectedSignatureHex === undefined ? constants.NULL_BYTES : hashBytes(validatorExpectedSignatureHex); if (validatorAction !== undefined) { await validatorWallet .prepare(transactionHashHex, validatorAction, expectedSignatureHashHex) .awaitTransactionSuccessAsync(); } return exchange.isValidTransactionSignature(transaction, signatureHex).callAsync(); }; it('should revert when signerAddress == 0', async () => { const signatureHex = hexUtils.concat(SignatureType.EIP712); const nullSignerTransaction = { ...signedTransaction, signerAddress: constants.NULL_ADDRESS, }; const transactionHashHex = transactionHashUtils.getTransactionHashHex(nullSignerTransaction); const expectedError = new ExchangeRevertErrors.SignatureError( ExchangeRevertErrors.SignatureErrorCode.InvalidSigner, transactionHashHex, constants.NULL_ADDRESS, signatureHex, ); const tx = exchange.isValidTransactionSignature(nullSignerTransaction, signatureHex).callAsync(); return expect(tx).to.revertWith(expectedError); }); it('should return true when SignatureType=Validator, signature is valid and validator is approved', async () => { // Doesn't have to contain a real signature since our wallet contract // just does a hash comparison. const signatureDataHex = generateRandomSignature(); const signatureHex = hexUtils.concat(signatureDataHex, validatorWallet.address, SignatureType.Validator); const isValidSignature = await validateAsync( signedTransaction, signatureHex, ValidatorWalletAction.MatchSignatureHash, signatureDataHex, ); expect(isValidSignature).to.be.true(); }); it('should return false when SignatureType=Validator, signature is invalid and validator is approved', async () => { // Doesn't have to contain a real signature since our wallet contract // just does a hash comparison. const signatureDataHex = generateRandomSignature(); const notSignatureDataHex = generateRandomSignature(); const signatureHex = hexUtils.concat(notSignatureDataHex, validatorWallet.address, SignatureType.Validator); const isValidSignature = await validateAsync( signedTransaction, signatureHex, ValidatorWalletAction.MatchSignatureHash, signatureDataHex, ); expect(isValidSignature).to.be.false(); }); it('should return false when validator returns `true` and SignatureType=Validator', async () => { const signatureDataHex = generateRandomSignature(); const signatureHex = hexUtils.concat(signatureDataHex, validatorWallet.address, SignatureType.Validator); const isValidSignature = await validateAsync( signedTransaction, signatureHex, ValidatorWalletAction.ReturnTrue, signatureDataHex, ); expect(isValidSignature).to.be.false(); }); it('should revert when SignatureType=Validator and signature is shorter than 21 bytes', async () => { // Set approval of signature validator to false await exchange .setSignatureValidatorApproval(validatorWallet.address, false) .awaitTransactionSuccessAsync({ from: signedTransaction.signerAddress }); // Doesn't have to contain a real signature since our wallet contract // just does a hash comparison. const signatureHex = hexUtils.concat(SignatureType.Validator); const transactionHashHex = transactionHashUtils.getTransactionHashHex(signedTransaction); const expectedError = new ExchangeRevertErrors.SignatureError( ExchangeRevertErrors.SignatureErrorCode.InvalidLength, transactionHashHex, signedTransaction.signerAddress, signatureHex, ); const tx = validateAsync(signedTransaction, signatureHex, ValidatorWalletAction.MatchSignatureHash); return expect(tx).to.revertWith(expectedError); }); it('should revert when validator returns nothing and SignatureType=Validator', async () => { const signatureDataHex = generateRandomSignature(); const signatureHex = hexUtils.concat(signatureDataHex, validatorWallet.address, SignatureType.Validator); const transactionHashHex = transactionHashUtils.getTransactionHashHex(signedTransaction); const data = eip1271Data .ZeroExTransactionWithHash(signedTransaction, transactionHashHex) .getABIEncodedTransactionData(); const expectedError = new ExchangeRevertErrors.EIP1271SignatureError( validatorWallet.address, data, signatureHex, constants.NULL_BYTES, ); const tx = validateAsync( signedTransaction, signatureHex, ValidatorWalletAction.ReturnNothing, signatureDataHex, ); return expect(tx).to.revertWith(expectedError); }); it('should revert when validator attempts to update state and SignatureType=Validator', async () => { // Doesn't have to contain a real signature since our wallet contract // just does a hash comparison. const signatureDataHex = generateRandomSignature(); const signatureHex = hexUtils.concat(signatureDataHex, validatorWallet.address, SignatureType.Validator); const transactionHashHex = transactionHashUtils.getTransactionHashHex(signedTransaction); const data = eip1271Data .ZeroExTransactionWithHash(signedTransaction, transactionHashHex) .getABIEncodedTransactionData(); const expectedError = new ExchangeRevertErrors.EIP1271SignatureError( validatorWallet.address, data, signatureHex, constants.NULL_BYTES, ); const tx = validateAsync(signedTransaction, signatureHex, ValidatorWalletAction.UpdateState); return expect(tx).to.revertWith(expectedError); }); it('should revert when validator reverts and SignatureType=Validator', async () => { // Doesn't have to contain a real signature since our wallet contract // just does a hash comparison. const signatureDataHex = generateRandomSignature(); const signatureHex = hexUtils.concat(signatureDataHex, validatorWallet.address, SignatureType.Validator); const transactionHashHex = transactionHashUtils.getTransactionHashHex(signedTransaction); const data = eip1271Data .ZeroExTransactionWithHash(signedTransaction, transactionHashHex) .getABIEncodedTransactionData(); const expectedError = new ExchangeRevertErrors.EIP1271SignatureError( validatorWallet.address, data, signatureHex, new StringRevertError(validatorWalletRevertReason).encode(), ); const tx = validateAsync(signedTransaction, signatureHex, ValidatorWalletAction.Revert); return expect(tx).to.revertWith(expectedError); }); it('should revert when SignatureType=Validator, signature is valid and validator is not approved', async () => { // Set approval of signature validator to false await exchange .setSignatureValidatorApproval(validatorWallet.address, false) .awaitTransactionSuccessAsync({ from: signedTransaction.signerAddress }); // Doesn't have to contain a real signature since our wallet contract // just does a hash comparison. const signatureDataHex = generateRandomSignature(); const signatureHex = hexUtils.concat(signatureDataHex, validatorWallet.address, SignatureType.Validator); const expectedError = new ExchangeRevertErrors.SignatureValidatorNotApprovedError( signedTransaction.signerAddress, validatorWallet.address, ); const tx = validateAsync(signedTransaction, signatureHex, ValidatorWalletAction.Revert); return expect(tx).to.revertWith(expectedError); }); it('should return true when SignatureType=EIP1271Wallet and signature is valid', async () => { signedTransaction.signerAddress = validatorWallet.address; // Doesn't have to contain a real signature since our wallet contract // just does a hash comparison. const signatureDataHex = generateRandomSignature(); const signatureHex = hexUtils.concat(signatureDataHex, SignatureType.EIP1271Wallet); // Validate signature const isValidSignature = await validateAsync( signedTransaction, signatureHex, ValidatorWalletAction.MatchSignatureHash, signatureDataHex, ); expect(isValidSignature).to.be.true(); }); it('should return false when SignatureType=EIP1271Wallet and signature is invalid', async () => { signedTransaction.signerAddress = validatorWallet.address; // Doesn't have to contain a real signature since our wallet contract // just does a hash comparison. const signatureDataHex = generateRandomSignature(); const notSignatureDataHex = generateRandomSignature(); const signatureHex = hexUtils.concat(notSignatureDataHex, SignatureType.EIP1271Wallet); // Validate signature const isValidSignature = await validateAsync( signedTransaction, signatureHex, ValidatorWalletAction.MatchSignatureHash, signatureDataHex, ); expect(isValidSignature).to.be.false(); }); it('should return false when validator returns `true` and SignatureType=EIP1271Wallet', async () => { signedTransaction.signerAddress = validatorWallet.address; const signatureDataHex = generateRandomSignature(); const signatureHex = hexUtils.concat(signatureDataHex, SignatureType.EIP1271Wallet); // Validate signature const isValidSignature = await validateAsync( signedTransaction, signatureHex, ValidatorWalletAction.ReturnTrue, signatureDataHex, ); expect(isValidSignature).to.be.false(); }); it('should revert when validator returns nothing and SignatureType=EIP1271Wallet', async () => { signedTransaction.signerAddress = validatorWallet.address; const signatureDataHex = generateRandomSignature(); const signatureHex = hexUtils.concat(signatureDataHex, SignatureType.EIP1271Wallet); const transactionHashHex = transactionHashUtils.getTransactionHashHex(signedTransaction); const data = eip1271Data .ZeroExTransactionWithHash(signedTransaction, transactionHashHex) .getABIEncodedTransactionData(); const expectedError = new ExchangeRevertErrors.EIP1271SignatureError( validatorWallet.address, data, signatureHex, constants.NULL_BYTES, ); const tx = validateAsync( signedTransaction, signatureHex, ValidatorWalletAction.ReturnNothing, signatureDataHex, ); return expect(tx).to.revertWith(expectedError); }); it('should revert when validator attempts to update state and SignatureType=EIP1271Wallet', async () => { signedTransaction.signerAddress = validatorWallet.address; // Doesn't have to contain a real signature since our wallet contract // just does a hash comparison. const signatureHex = hexUtils.concat(generateRandomSignature(), SignatureType.EIP1271Wallet); const transactionHashHex = transactionHashUtils.getTransactionHashHex(signedTransaction); const data = eip1271Data .ZeroExTransactionWithHash(signedTransaction, transactionHashHex) .getABIEncodedTransactionData(); const expectedError = new ExchangeRevertErrors.EIP1271SignatureError( validatorWallet.address, data, signatureHex, constants.NULL_BYTES, ); const tx = validateAsync(signedTransaction, signatureHex, ValidatorWalletAction.UpdateState); return expect(tx).to.revertWith(expectedError); }); it('should revert when validator reverts and SignatureType=EIP1271Wallet', async () => { signedTransaction.signerAddress = validatorWallet.address; // Doesn't have to contain a real signature since our wallet contract // just does a hash comparison. const signatureHex = hexUtils.concat(generateRandomSignature(), SignatureType.EIP1271Wallet); const transactionHashHex = transactionHashUtils.getTransactionHashHex(signedTransaction); const data = eip1271Data .ZeroExTransactionWithHash(signedTransaction, transactionHashHex) .getABIEncodedTransactionData(); const expectedError = new ExchangeRevertErrors.EIP1271SignatureError( validatorWallet.address, data, signatureHex, new StringRevertError(validatorWalletRevertReason).encode(), ); const tx = validateAsync(signedTransaction, signatureHex, ValidatorWalletAction.Revert); return expect(tx).to.revertWith(expectedError); }); it('should revert when signer is an EOA and SignatureType=EIP1271Wallet', async () => { const signatureHex = hexUtils.concat(SignatureType.EIP1271Wallet); const transactionHashHex = transactionHashUtils.getTransactionHashHex(signedTransaction); const data = eip1271Data .ZeroExTransactionWithHash(signedTransaction, transactionHashHex) .getABIEncodedTransactionData(); const expectedError = new ExchangeRevertErrors.EIP1271SignatureError( signedTransaction.signerAddress, data, signatureHex, constants.NULL_BYTES, ); const tx = exchange.isValidTransactionSignature(signedTransaction, signatureHex).callAsync(); return expect(tx).to.revertWith(expectedError); }); it('should revert when signer is an EOA and SignatureType=Validator', async () => { const signatureHex = hexUtils.concat(notSignerAddress, SignatureType.Validator); const transactionHashHex = transactionHashUtils.getTransactionHashHex(signedTransaction); const data = eip1271Data .ZeroExTransactionWithHash(signedTransaction, transactionHashHex) .getABIEncodedTransactionData(); const expectedError = new ExchangeRevertErrors.EIP1271SignatureError( notSignerAddress, data, signatureHex, constants.NULL_BYTES, ); // Register an EOA as a validator. await exchange .setSignatureValidatorApproval(notSignerAddress, true) .awaitTransactionSuccessAsync({ from: signerAddress }); const tx = exchange.isValidTransactionSignature(signedTransaction, signatureHex).callAsync(); return expect(tx).to.revertWith(expectedError); }); // Run hash-only signature type tests as well. const validateOrderHashAsync = async ( _hashHex: string, _signerAddress: string, signatureHex: string, validatorAction?: ValidatorWalletAction, validatorExpectedSignatureHex?: string, ): Promise<any> => { signedTransaction.signerAddress = _signerAddress; return validateAsync(signedTransaction, signatureHex, validatorAction, validatorExpectedSignatureHex); }; createHashSignatureTests((_signerAddress?: string) => { signedTransaction.signerAddress = _signerAddress === undefined ? signerAddress : _signerAddress; return transactionHashUtils.getTransactionHashHex(signedTransaction); }, validateOrderHashAsync); }); describe('setSignatureValidatorApproval', () => { let signatureValidatorLogDecoder: LogDecoder; before(async () => { signatureValidatorLogDecoder = new LogDecoder(env.web3Wrapper, artifacts); }); it('should emit a SignatureValidatorApprovalSet with correct args when a validator is approved', async () => { const approval = true; const res = await exchange .setSignatureValidatorApproval(validatorWallet.address, approval) .awaitTransactionSuccessAsync({ from: signerAddress, }); expect(res.logs.length).to.equal(1); const log = signatureValidatorLogDecoder.decodeLogOrThrow(res.logs[0]) as LogWithDecodedArgs< ExchangeSignatureValidatorApprovalEventArgs >; const logArgs = log.args; expect(logArgs.signerAddress).to.equal(signerAddress); expect(logArgs.validatorAddress).to.equal(validatorWallet.address); expect(logArgs.isApproved).to.equal(approval); }); it('should emit a SignatureValidatorApprovalSet with correct args when a validator is disapproved', async () => { const approval = false; const res = await exchange .setSignatureValidatorApproval(validatorWallet.address, approval) .awaitTransactionSuccessAsync({ from: signerAddress, }); expect(res.logs.length).to.equal(1); const log = signatureValidatorLogDecoder.decodeLogOrThrow(res.logs[0]) as LogWithDecodedArgs< ExchangeSignatureValidatorApprovalEventArgs >; const logArgs = log.args; expect(logArgs.signerAddress).to.equal(signerAddress); expect(logArgs.validatorAddress).to.equal(validatorWallet.address); expect(logArgs.isApproved).to.equal(approval); }); }); describe('fillOrder integration tests', () => { let erc20Wrapper: ERC20Wrapper; let erc20Proxy: ERC20ProxyContract; let erc20TokenA: DummyERC20TokenContract; let erc20TokenB: DummyERC20TokenContract; let feeToken: DummyERC20TokenContract; let orderFactory: OrderFactory; let signedOrder: SignedOrder; before(async () => { // Deploy ERC20 proxy and tokens erc20Wrapper = new ERC20Wrapper(env.provider, accounts, owner); erc20Proxy = await erc20Wrapper.deployProxyAsync(); const numDummyErc20ToDeploy = 3; [erc20TokenA, erc20TokenB, feeToken] = await erc20Wrapper.deployDummyTokensAsync( numDummyErc20ToDeploy, constants.DUMMY_TOKEN_DECIMALS, ); await erc20Wrapper.setBalancesAndAllowancesAsync(); // Configure ERC20 proxy and exchange await erc20Proxy.addAuthorizedAddress(exchange.address).awaitTransactionSuccessAsync({ from: owner }); await exchange.registerAssetProxy(erc20Proxy.address).awaitTransactionSuccessAsync({ from: owner }); // Configure order defaults const defaultMakerAssetAddress = erc20TokenA.address; const defaultTakerAssetAddress = erc20TokenB.address; const defaultFeeAssetAddress = feeToken.address; const defaultOrderParams = { ...constants.STATIC_ORDER_PARAMS, makerAddress, feeRecipientAddress, makerAssetData: encodeERC20AssetData(defaultMakerAssetAddress), takerAssetData: encodeERC20AssetData(defaultTakerAssetAddress), makerFeeAssetData: encodeERC20AssetData(defaultFeeAssetAddress), takerFeeAssetData: encodeERC20AssetData(defaultFeeAssetAddress), exchangeAddress: exchange.address, chainId, }; const privateKey = constants.TESTRPC_PRIVATE_KEYS[accounts.indexOf(makerAddress)]; orderFactory = new OrderFactory(privateKey, defaultOrderParams); // Approve the ERC20 proxy with the test validator wallet. await validatorWallet .approveERC20(erc20TokenA.address, erc20Proxy.address, constants.INITIAL_ERC20_ALLOWANCE) .awaitTransactionSuccessAsync(); // Mint some ERC20 tokens to the test validator wallet. await erc20TokenA .setBalance(validatorWallet.address, constants.INITIAL_ERC20_BALANCE) .awaitTransactionSuccessAsync(); // Approve the validator. await exchange.setSignatureValidatorApproval(validatorWallet.address, true).awaitTransactionSuccessAsync({ from: makerAddress, }); signedOrder = await orderFactory.newSignedOrderAsync({ makerFee: constants.ZERO_AMOUNT, takerFee: constants.ZERO_AMOUNT, }); }); it('should revert if `Validator` signature type rejects during a second fill', async () => { const signatureHex = hexUtils.concat(validatorWallet.address, SignatureType.Validator); signedOrder.signature = signatureHex; const orderHashHex = orderHashUtils.getOrderHashHex(signedOrder); // Allow the signature check for the first fill. await validatorWallet .prepare(orderHashHex, ValidatorWalletAction.Accept, constants.NULL_BYTES) .awaitTransactionSuccessAsync(); const fillAmount = signedOrder.takerAssetAmount.div(10); await exchange.fillOrder(signedOrder, fillAmount, signedOrder.signature).awaitTransactionSuccessAsync({ from: takerAddress, }); // Reject the signature check for the second fill. await validatorWallet .prepare(orderHashHex, ValidatorWalletAction.Reject, constants.NULL_BYTES) .awaitTransactionSuccessAsync(); const tx = exchange.fillOrder(signedOrder, fillAmount, signedOrder.signature).awaitTransactionSuccessAsync({ from: takerAddress, }); const expectedError = new ExchangeRevertErrors.SignatureError( ExchangeRevertErrors.SignatureErrorCode.BadOrderSignature, orderHashHex, signedOrder.makerAddress, signedOrder.signature, ); return expect(tx).to.revertWith(expectedError); }); it('should revert if `Wallet` signature type rejects during a second fill', async () => { const signatureHex = hexUtils.concat(SignatureType.Wallet); signedOrder.makerAddress = validatorWallet.address; signedOrder.signature = signatureHex; const orderHashHex = orderHashUtils.getOrderHashHex(signedOrder); // Allow the signature check for the first fill. await validatorWallet .prepare(orderHashHex, ValidatorWalletAction.Accept, constants.NULL_BYTES) .awaitTransactionSuccessAsync(); const fillAmount = signedOrder.takerAssetAmount.div(10); await exchange.fillOrder(signedOrder, fillAmount, signedOrder.signature).awaitTransactionSuccessAsync({ from: takerAddress, }); // Reject the signature check for the second fill. await validatorWallet .prepare(orderHashHex, ValidatorWalletAction.Reject, constants.NULL_BYTES) .awaitTransactionSuccessAsync(); const tx = exchange.fillOrder(signedOrder, fillAmount, signedOrder.signature).awaitTransactionSuccessAsync({ from: takerAddress, }); const expectedError = new ExchangeRevertErrors.SignatureError( ExchangeRevertErrors.SignatureErrorCode.BadOrderSignature, orderHashHex, signedOrder.makerAddress, signedOrder.signature, ); return expect(tx).to.revertWith(expectedError); }); it('should revert if `EIP1271Wallet` signature type rejects during a second fill', async () => { const signatureHex = hexUtils.concat(SignatureType.EIP1271Wallet); signedOrder.makerAddress = validatorWallet.address; signedOrder.signature = signatureHex; const orderHashHex = orderHashUtils.getOrderHashHex(signedOrder); // Allow the signature check for the first fill. await validatorWallet .prepare(orderHashHex, ValidatorWalletAction.Accept, constants.NULL_BYTES) .awaitTransactionSuccessAsync(); const fillAmount = signedOrder.takerAssetAmount.div(10); await exchange.fillOrder(signedOrder, fillAmount, signedOrder.signature).awaitTransactionSuccessAsync({ from: takerAddress, }); // Reject the signature check for the second fill. await validatorWallet .prepare(orderHashHex, ValidatorWalletAction.Reject, constants.NULL_BYTES) .awaitTransactionSuccessAsync(); const tx = exchange.fillOrder(signedOrder, fillAmount, signedOrder.signature).awaitTransactionSuccessAsync({ from: takerAddress, }); const expectedError = new ExchangeRevertErrors.SignatureError( ExchangeRevertErrors.SignatureErrorCode.BadOrderSignature, orderHashHex, signedOrder.makerAddress, signedOrder.signature, ); return expect(tx).to.revertWith(expectedError); }); }); }); // tslint:disable:max-file-line-count // tslint:enable:no-unnecessary-type-assertion
the_stack
import * as Platform from '../../core/platform/platform.js'; import * as SDK from '../../core/sdk/sdk.js'; import type * as Protocol from '../../generated/protocol.js'; import {RecordType, TimelineData} from './TimelineModel.js'; import type {TracingLayerPayload, TracingLayerTile} from './TracingLayerTree.js'; import {TracingLayerTree} from './TracingLayerTree.js'; export class TimelineFrameModel { private readonly categoryMapper: (arg0: SDK.TracingModel.Event) => string; private frames!: TimelineFrame[]; private frameById!: { [x: number]: TimelineFrame, }; private beginFrameQueue!: TimelineFrameBeginFrameQueue; private minimumRecordTime!: number; private lastFrame!: TimelineFrame|null; private mainFrameCommitted!: boolean; private mainFrameRequested!: boolean; private lastLayerTree!: TracingFrameLayerTree|null; private framePendingActivation!: PendingFrame|null; private currentTaskTimeByCategory!: { [x: string]: number, }; private target!: SDK.Target.Target|null; private framePendingCommit?: PendingFrame|null; private lastBeginFrame?: number|null; private lastDroppedFrame?: number|null; private lastNeedsBeginFrame?: number|null; private lastTaskBeginTime?: number|null; private layerTreeId?: number|null; private currentProcessMainThread?: SDK.TracingModel.Thread|null; constructor(categoryMapper: (arg0: SDK.TracingModel.Event) => string) { this.categoryMapper = categoryMapper; this.reset(); } getFrames(): TimelineFrame[] { return this.frames; } getFramesWithinWindow(startTime: number, endTime: number): TimelineFrame[] { const firstFrame = Platform.ArrayUtilities.lowerBound(this.frames, startTime || 0, (time, frame) => time - frame.endTime); const lastFrame = Platform.ArrayUtilities.lowerBound(this.frames, endTime || Infinity, (time, frame) => time - frame.startTime); return this.frames.slice(firstFrame, lastFrame); } hasRasterTile(rasterTask: SDK.TracingModel.Event): boolean { const data = rasterTask.args['tileData']; if (!data) { return false; } const frameId = data['sourceFrameNumber']; const frame = frameId && this.frameById[frameId]; if (!frame || !frame.layerTree) { return false; } return true; } rasterTilePromise(rasterTask: SDK.TracingModel.Event): Promise<{ rect: Protocol.DOM.Rect, snapshot: SDK.PaintProfiler.PaintProfilerSnapshot, }|null> { if (!this.target) { return Promise.resolve(null); } const data = rasterTask.args['tileData']; const frameId = (data['sourceFrameNumber'] as number); const tileId = data['tileId'] && data['tileId']['id_ref']; const frame = frameId && this.frameById[frameId]; if (!frame || !frame.layerTree || !tileId) { return Promise.resolve(null); } return frame.layerTree.layerTreePromise().then(layerTree => layerTree && layerTree.pictureForRasterTile(tileId)); } reset(): void { this.minimumRecordTime = Infinity; this.frames = []; this.frameById = {}; this.beginFrameQueue = new TimelineFrameBeginFrameQueue(); this.lastFrame = null; this.lastLayerTree = null; this.mainFrameCommitted = false; this.mainFrameRequested = false; this.framePendingCommit = null; this.lastBeginFrame = null; this.lastDroppedFrame = null; this.lastNeedsBeginFrame = null; this.framePendingActivation = null; this.lastTaskBeginTime = null; this.target = null; this.layerTreeId = null; this.currentTaskTimeByCategory = {}; } handleBeginFrame(startTime: number, seqId: number): void { if (!this.lastFrame) { this.startFrame(startTime); } this.lastBeginFrame = startTime; this.beginFrameQueue.addFrameIfNotExists(seqId, startTime, false); } handleDroppedFrame(startTime: number, seqId: number): void { if (!this.lastFrame) { this.startFrame(startTime); } // This line handles the case where no BeginFrame event is issued for // the dropped frame. In this situation, add a BeginFrame to the queue // as if it actually occurred. this.beginFrameQueue.addFrameIfNotExists(seqId, startTime, true); this.beginFrameQueue.setDropped(seqId, true); } handleDrawFrame(startTime: number, seqId: number): void { if (!this.lastFrame) { this.startFrame(startTime); return; } // - if it wasn't drawn, it didn't happen! // - only show frames that either did not wait for the main thread frame or had one committed. if (this.mainFrameCommitted || !this.mainFrameRequested) { if (this.lastNeedsBeginFrame) { const idleTimeEnd = this.framePendingActivation ? this.framePendingActivation.triggerTime : (this.lastBeginFrame || this.lastNeedsBeginFrame); if (idleTimeEnd > this.lastFrame.startTime) { this.lastFrame.idle = true; this.lastBeginFrame = null; } this.lastNeedsBeginFrame = null; } const framesToVisualize = this.beginFrameQueue.processPendingBeginFramesOnDrawFrame(seqId); // Visualize the current frame and all pending frames before it. for (const frame of framesToVisualize) { const isLastFrameIdle = this.lastFrame.idle; // If |frame| is the first frame after an idle period, the CPU time // will be logged ("committed") under |frame| if applicable. this.startFrame(frame.startTime); if (isLastFrameIdle && this.framePendingActivation) { this.commitPendingFrame(); } if (frame.isDropped) { this.lastFrame.dropped = true; } } } this.mainFrameCommitted = false; } handleActivateLayerTree(): void { if (!this.lastFrame) { return; } if (this.framePendingActivation && !this.lastNeedsBeginFrame) { this.commitPendingFrame(); } } handleRequestMainThreadFrame(): void { if (!this.lastFrame) { return; } this.mainFrameRequested = true; } handleCompositeLayers(): void { if (!this.framePendingCommit) { return; } this.framePendingActivation = this.framePendingCommit; this.framePendingCommit = null; this.mainFrameRequested = false; this.mainFrameCommitted = true; } handleLayerTreeSnapshot(layerTree: TracingFrameLayerTree): void { this.lastLayerTree = layerTree; } handleNeedFrameChanged(startTime: number, needsBeginFrame: boolean): void { if (needsBeginFrame) { this.lastNeedsBeginFrame = startTime; } } private startFrame(startTime: number): void { if (this.lastFrame) { this.flushFrame(this.lastFrame, startTime); } this.lastFrame = new TimelineFrame(startTime, startTime - this.minimumRecordTime); } private flushFrame(frame: TimelineFrame, endTime: number): void { frame.setLayerTree(this.lastLayerTree); frame.setEndTime(endTime); if (this.lastLayerTree) { this.lastLayerTree.setPaints(frame.paints); } const lastFrame = this.frames[this.frames.length - 1]; if (this.frames.length && lastFrame && (frame.startTime !== lastFrame.endTime || frame.startTime > frame.endTime)) { console.assert( false, `Inconsistent frame time for frame ${this.frames.length} (${frame.startTime} - ${frame.endTime})`); } this.frames.push(frame); if (typeof frame.mainFrameId === 'number') { this.frameById[frame.mainFrameId] = frame; } } private commitPendingFrame(): void { if (!this.framePendingActivation || !this.lastFrame) { return; } this.lastFrame.addTimeForCategories(this.framePendingActivation.timeByCategory); this.lastFrame.paints = this.framePendingActivation.paints; this.lastFrame.mainFrameId = this.framePendingActivation.mainFrameId; this.framePendingActivation = null; } addTraceEvents(target: SDK.Target.Target|null, events: SDK.TracingModel.Event[], threadData: { thread: SDK.TracingModel.Thread, time: number, }[]): void { this.target = target; let j = 0; this.currentProcessMainThread = threadData.length && threadData[0].thread || null; for (let i = 0; i < events.length; ++i) { while (j + 1 < threadData.length && threadData[j + 1].time <= events[i].startTime) { this.currentProcessMainThread = threadData[++j].thread; } this.addTraceEvent(events[i]); } this.currentProcessMainThread = null; } private addTraceEvent(event: SDK.TracingModel.Event): void { if (event.startTime && event.startTime < this.minimumRecordTime) { this.minimumRecordTime = event.startTime; } if (event.name === RecordType.SetLayerTreeId) { this.layerTreeId = event.args['layerTreeId'] || event.args['data']['layerTreeId']; } else if ( event.id && event.phase === SDK.TracingModel.Phase.SnapshotObject && event.name === RecordType.LayerTreeHostImplSnapshot && Number(event.id) === this.layerTreeId && this.target) { const snapshot = (event as SDK.TracingModel.ObjectSnapshot); this.handleLayerTreeSnapshot(new TracingFrameLayerTree(this.target, snapshot)); } else { this.processCompositorEvents(event); if (event.thread === this.currentProcessMainThread) { this.addMainThreadTraceEvent(event); } else if (this.lastFrame && event.selfTime && !SDK.TracingModel.TracingModel.isTopLevelEvent(event)) { this.lastFrame.addTimeForCategory(this.categoryMapper(event), event.selfTime); } } } private processCompositorEvents(event: SDK.TracingModel.Event): void { if (event.args['layerTreeId'] !== this.layerTreeId) { return; } const timestamp = event.startTime; if (event.name === RecordType.BeginFrame) { this.handleBeginFrame(timestamp, event.args['frameSeqId']); } else if (event.name === RecordType.DrawFrame) { this.handleDrawFrame(timestamp, event.args['frameSeqId']); } else if (event.name === RecordType.ActivateLayerTree) { this.handleActivateLayerTree(); } else if (event.name === RecordType.RequestMainThreadFrame) { this.handleRequestMainThreadFrame(); } else if (event.name === RecordType.NeedsBeginFrameChanged) { this.handleNeedFrameChanged(timestamp, event.args['data'] && event.args['data']['needsBeginFrame']); } else if (event.name === RecordType.DroppedFrame) { this.handleDroppedFrame(timestamp, event.args['frameSeqId']); } } private addMainThreadTraceEvent(event: SDK.TracingModel.Event): void { if (SDK.TracingModel.TracingModel.isTopLevelEvent(event)) { this.currentTaskTimeByCategory = {}; this.lastTaskBeginTime = event.startTime; } if (!this.framePendingCommit && TimelineFrameModel.mainFrameMarkers.indexOf(event.name as RecordType) >= 0) { this.framePendingCommit = new PendingFrame(this.lastTaskBeginTime || event.startTime, this.currentTaskTimeByCategory); } if (!this.framePendingCommit) { this.addTimeForCategory(this.currentTaskTimeByCategory, event); return; } this.addTimeForCategory(this.framePendingCommit.timeByCategory, event); if (event.name === RecordType.BeginMainThreadFrame && event.args['data'] && event.args['data']['frameId']) { this.framePendingCommit.mainFrameId = event.args['data']['frameId']; } if (event.name === RecordType.Paint && event.args['data']['layerId'] && TimelineData.forEvent(event).picture && this.target) { this.framePendingCommit.paints.push(new LayerPaintEvent(event, this.target)); } if (event.name === RecordType.CompositeLayers && event.args['layerTreeId'] === this.layerTreeId) { this.handleCompositeLayers(); } } private addTimeForCategory( timeByCategory: { [x: string]: number, }, event: SDK.TracingModel.Event): void { if (!event.selfTime) { return; } const categoryName = this.categoryMapper(event); timeByCategory[categoryName] = (timeByCategory[categoryName] || 0) + event.selfTime; } private static readonly mainFrameMarkers: RecordType[] = [ RecordType.ScheduleStyleRecalculation, RecordType.InvalidateLayout, RecordType.BeginMainThreadFrame, RecordType.ScrollLayer, ]; } export class TracingFrameLayerTree { private readonly target: SDK.Target.Target; private readonly snapshot: SDK.TracingModel.ObjectSnapshot; private paintsInternal!: LayerPaintEvent[]|undefined; constructor(target: SDK.Target.Target, snapshot: SDK.TracingModel.ObjectSnapshot) { this.target = target; this.snapshot = snapshot; } async layerTreePromise(): Promise<TracingLayerTree|null> { const result = (await this.snapshot.objectPromise() as unknown as { active_tiles: TracingLayerTile[], device_viewport_size: { width: number, height: number, }, active_tree: { root_layer: TracingLayerPayload, layers: TracingLayerPayload[], }, }); if (!result) { return null; } const viewport = result['device_viewport_size']; const tiles = result['active_tiles']; const rootLayer = result['active_tree']['root_layer']; const layers = result['active_tree']['layers']; const layerTree = new TracingLayerTree(this.target); layerTree.setViewportSize(viewport); layerTree.setTiles(tiles); await layerTree.setLayers(rootLayer, layers, this.paintsInternal || []); return layerTree; } paints(): LayerPaintEvent[] { return this.paintsInternal || []; } setPaints(paints: LayerPaintEvent[]): void { this.paintsInternal = paints; } } export class TimelineFrame { startTime: number; startTimeOffset: number; endTime: number; duration: number; timeByCategory: { [x: string]: number, }; cpuTime: number; idle: boolean; dropped: boolean; layerTree: TracingFrameLayerTree|null; paints: LayerPaintEvent[]; mainFrameId: number|undefined; constructor(startTime: number, startTimeOffset: number) { this.startTime = startTime; this.startTimeOffset = startTimeOffset; this.endTime = this.startTime; this.duration = 0; this.timeByCategory = {}; this.cpuTime = 0; this.idle = false; this.dropped = false; this.layerTree = null; this.paints = []; this.mainFrameId = undefined; } hasWarnings(): boolean { return false; } setEndTime(endTime: number): void { this.endTime = endTime; this.duration = this.endTime - this.startTime; } setLayerTree(layerTree: TracingFrameLayerTree|null): void { this.layerTree = layerTree; } addTimeForCategories(timeByCategory: { [x: string]: number, }): void { for (const category in timeByCategory) { this.addTimeForCategory(category, timeByCategory[category]); } } addTimeForCategory(category: string, time: number): void { this.timeByCategory[category] = (this.timeByCategory[category] || 0) + time; this.cpuTime += time; } } export class LayerPaintEvent { private readonly eventInternal: SDK.TracingModel.Event; private readonly target: SDK.Target.Target|null; constructor(event: SDK.TracingModel.Event, target: SDK.Target.Target|null) { this.eventInternal = event; this.target = target; } layerId(): string { return this.eventInternal.args['data']['layerId']; } event(): SDK.TracingModel.Event { return this.eventInternal; } picturePromise(): Promise<{ rect: Array<number>, serializedPicture: string, }|null> { const picture = TimelineData.forEvent(this.eventInternal).picture; if (!picture) { return Promise.resolve(null); } // TODO(crbug.com/1172300) Ignored during the jsdoc to ts migration) // eslint-disable-next-line @typescript-eslint/no-explicit-any return picture.objectPromise().then((result: any) => { if (!result) { return null; } const rect = result['params'] && result['params']['layer_rect']; const picture = result['skp64']; return rect && picture ? {rect: rect, serializedPicture: picture} : null; }); } async snapshotPromise(): Promise<{ rect: Array<number>, snapshot: SDK.PaintProfiler.PaintProfilerSnapshot, }|null> { const paintProfilerModel = this.target && this.target.model(SDK.PaintProfiler.PaintProfilerModel); const picture = await this.picturePromise(); if (!picture || !paintProfilerModel) { return null; } const snapshot = await paintProfilerModel.loadSnapshot(picture.serializedPicture); return snapshot ? {rect: picture.rect, snapshot: snapshot} : null; } } export class PendingFrame { timeByCategory: { [x: string]: number, }; paints: LayerPaintEvent[]; mainFrameId: number|undefined; triggerTime: number; constructor(triggerTime: number, timeByCategory: { [x: string]: number, }) { this.timeByCategory = timeByCategory; this.paints = []; this.mainFrameId = undefined; this.triggerTime = triggerTime; } } // The parameters of an impl-side BeginFrame. class BeginFrameInfo { seqId: number; startTime: number; isDropped: boolean; constructor(seqId: number, startTime: number, isDropped: boolean) { this.seqId = seqId; this.startTime = startTime; this.isDropped = isDropped; } } // A queue of BeginFrames pending visualization. // BeginFrames are added into this queue as they occur; later when their // corresponding DrawFrames occur (or lack thereof), the BeginFrames are removed // from the queue and their timestamps are used for visualization. export class TimelineFrameBeginFrameQueue { private queueFrames!: number[]; // Maps frameSeqId to BeginFrameInfo. private mapFrames!: { [x: number]: BeginFrameInfo, }; constructor() { this.queueFrames = []; this.mapFrames = {}; } // Add a BeginFrame to the queue, if it does not already exit. addFrameIfNotExists(seqId: number, startTime: number, isDropped: boolean): void { if (!(seqId in this.mapFrames)) { this.mapFrames[seqId] = new BeginFrameInfo(seqId, startTime, isDropped); this.queueFrames.push(seqId); } } // Set a BeginFrame in queue as dropped. setDropped(seqId: number, isDropped: boolean): void { if (seqId in this.mapFrames) { this.mapFrames[seqId].isDropped = isDropped; } } processPendingBeginFramesOnDrawFrame(seqId: number): BeginFrameInfo[] { const framesToVisualize: BeginFrameInfo[] = []; // Do not visualize this frame in the rare case where the current DrawFrame // does not have a corresponding BeginFrame. if (seqId in this.mapFrames) { // Pop all BeginFrames before the current frame, and add only the dropped // ones in |frames_to_visualize|. // Non-dropped frames popped here are BeginFrames that are never // drawn (but not considered dropped either for some reason). // Those frames do not require an proactive visualization effort and will // be naturally presented as continuationss of other frames. while (this.queueFrames[0] !== seqId) { const currentSeqId = this.queueFrames[0]; if (this.mapFrames[currentSeqId].isDropped) { framesToVisualize.push(this.mapFrames[currentSeqId]); } delete this.mapFrames[currentSeqId]; this.queueFrames.shift(); } // Pop the BeginFrame associated with the current DrawFrame. framesToVisualize.push(this.mapFrames[seqId]); delete this.mapFrames[seqId]; this.queueFrames.shift(); } return framesToVisualize; } }
the_stack
import { Component, Injectable, ViewChild } from '@angular/core'; import { ComponentFixture, TestBed } from '@angular/core/testing'; import { MATERIAL_SANITY_CHECKS } from '@angular/material/core'; import { NoopAnimationsModule } from '@angular/platform-browser/animations'; import { deepClone } from '@angular-ru/cdk/object'; import { Any, Nullable, PlainObject, SortOrderType } from '@angular-ru/cdk/typings'; import { TableBuilderComponent, TableBuilderModule, TableFilterType, TableSortTypes } from '@angular-ru/cdk/virtual-table'; import { WebWorkerThreadService } from '@angular-ru/cdk/webworker'; import { FilterDescriptor } from '../../../../../virtual-table/src/services/filterable/filter-descriptor'; @Component({ selector: 'app-ngx-table-builder-mock', template: ` <ngx-table-builder enable-selection enable-filtering [source]="data" [sort-types]="sortTypes" [filter-definition]="filterDefinition"></ngx-table-builder> ` }) class NgxTableBuilderMockComponent { @ViewChild(TableBuilderComponent, { static: true }) public tableBuilderComponent!: TableBuilderComponent<PlainObject>; public data: PlainObject[] = [ { id: 1, name: 'Max', lastName: 'Ivanov' }, { id: 2, name: 'Ivan', lastName: 'Petrov' }, { id: 3, name: 'Petr', lastName: 'Sidorov' }, { id: 4, name: null, lastName: null } ]; public filterDefinition: Nullable<FilterDescriptor[]> = null; public sortTypes: TableSortTypes = null; } @Injectable() class MockWebWorkerThreadService { public run<T, K>(workerFunction: (input?: K) => T, data?: K): Promise<T> { return Promise.resolve(workerFunction(deepClone(data)!)); } } describe('[TEST] Table builder', (): void => { let componentFixture: ComponentFixture<NgxTableBuilderMockComponent>; let component: NgxTableBuilderMockComponent; beforeEach((): void => { TestBed.configureTestingModule({ declarations: [NgxTableBuilderMockComponent], imports: [TableBuilderModule, NoopAnimationsModule], providers: [ { provide: WebWorkerThreadService, useClass: MockWebWorkerThreadService }, { provide: MATERIAL_SANITY_CHECKS, useValue: false } ] }).compileComponents(); const someSortableService = TestBed.createComponent(TableBuilderComponent).componentInstance.sortable; jest.spyOn(someSortableService.constructor.prototype, 'idleResolve').mockImplementation( (resolve: Any, sorted: unknown) => resolve(sorted) ); componentFixture = TestBed.createComponent(NgxTableBuilderMockComponent); component = componentFixture.componentInstance; componentFixture.autoDetectChanges(); }); afterAll((): void => { const someSortableService = TestBed.createComponent(TableBuilderComponent).componentInstance.sortable; someSortableService.constructor.prototype.idleResolve.mockRestore(); }); it('should correct sort by input', async (): Promise<void> => { const tableBuilderComponent: TableBuilderComponent<PlainObject> = component.tableBuilderComponent; component.sortTypes = { id: 'desc' }; componentFixture.detectChanges(); await componentFixture.whenStable(); expect(tableBuilderComponent.source).toEqual([ { id: 4, name: null, lastName: null }, { id: 3, name: 'Petr', lastName: 'Sidorov' }, { id: 2, name: 'Ivan', lastName: 'Petrov' }, { id: 1, name: 'Max', lastName: 'Ivanov' } ]); // eslint-disable-next-line require-atomic-updates component.sortTypes = { name: SortOrderType.ASC }; componentFixture.detectChanges(); await componentFixture.whenStable(); expect(tableBuilderComponent.source).toEqual([ { id: 4, name: null, lastName: null }, { id: 2, name: 'Ivan', lastName: 'Petrov' }, { id: 1, name: 'Max', lastName: 'Ivanov' }, { id: 3, name: 'Petr', lastName: 'Sidorov' } ]); }); it('should correct select after sort', async (): Promise<void> => { const mockClientEvent = new MouseEvent('click'); const mockShiftClientEvent = new MouseEvent('click', { shiftKey: true }); const tableBuilderComponent: TableBuilderComponent<PlainObject> = component.tableBuilderComponent; componentFixture.detectChanges(); await componentFixture.whenStable(); expect(tableBuilderComponent.source).toEqual([ { id: 1, name: 'Max', lastName: 'Ivanov' }, { id: 2, name: 'Ivan', lastName: 'Petrov' }, { id: 3, name: 'Petr', lastName: 'Sidorov' }, { id: 4, name: null, lastName: null } ]); tableBuilderComponent.selection.selectRow(tableBuilderComponent.source![0], mockClientEvent); expect(tableBuilderComponent.selection.selectionModel.entries).toEqual({ 1: true }); tableBuilderComponent.selection.selectRow(tableBuilderComponent.source![1], mockShiftClientEvent); expect(tableBuilderComponent.selection.selectionModel.entries).toEqual({ 1: true, 2: true }); // eslint-disable-next-line require-atomic-updates component.sortTypes = { name: SortOrderType.DESC }; componentFixture.detectChanges(); await componentFixture.whenStable(); expect(tableBuilderComponent.source).toEqual([ { id: 3, name: 'Petr', lastName: 'Sidorov' }, { id: 1, name: 'Max', lastName: 'Ivanov' }, { id: 2, name: 'Ivan', lastName: 'Petrov' }, { id: 4, name: null, lastName: null } ]); tableBuilderComponent.selection.selectRow(tableBuilderComponent.source![0], mockClientEvent); expect(tableBuilderComponent.selection.selectionModel.entries).toEqual({ 3: true }); tableBuilderComponent.selection.selectRow(tableBuilderComponent.source![1], mockShiftClientEvent); expect(tableBuilderComponent.selection.selectionModel.entries).toEqual({ 3: true, 1: true }); tableBuilderComponent.selection.selectRow(tableBuilderComponent.source![2], mockShiftClientEvent); expect(tableBuilderComponent.selection.selectionModel.entries).toEqual({ 3: true, 1: true, 2: true }); }); it('should correctly filter table', async (): Promise<void> => { const tableBuilderComponent: TableBuilderComponent<PlainObject> = component.tableBuilderComponent; tableBuilderComponent.filterable.setDefinition([ { value: 'Si', type: TableFilterType.START_WITH, key: 'lastName' } ]); await tableBuilderComponent.sortAndFilter(); expect(tableBuilderComponent.source).toEqual([{ id: 3, name: 'Petr', lastName: 'Sidorov' }]); tableBuilderComponent.filterable.setDefinition([ { value: 'rov', type: TableFilterType.END_WITH, key: 'lastName' } ]); await tableBuilderComponent.sortAndFilter(); expect(tableBuilderComponent.source).toEqual([ { id: 2, name: 'Ivan', lastName: 'Petrov' }, { id: 3, name: 'Petr', lastName: 'Sidorov' } ]); tableBuilderComponent.filterable.setDefinition([ { value: 'i', type: TableFilterType.CONTAINS, key: 'lastName' } ]); await tableBuilderComponent.sortAndFilter(); expect(tableBuilderComponent.source).toEqual([ { id: 1, name: 'Max', lastName: 'Ivanov' }, { id: 3, name: 'Petr', lastName: 'Sidorov' } ]); tableBuilderComponent.filterable.setDefinition([ { value: 'i', type: TableFilterType.DOES_NOT_CONTAIN, key: 'lastName' } ]); await tableBuilderComponent.sortAndFilter(); expect(tableBuilderComponent.source).toEqual([ { id: 2, name: 'Ivan', lastName: 'Petrov' }, { id: 4, lastName: null, name: null } ]); tableBuilderComponent.filterable.setDefinition([ { value: 'ivanov', type: TableFilterType.EQUALS, key: 'lastName' } ]); await tableBuilderComponent.sortAndFilter(); expect(tableBuilderComponent.source).toEqual([{ id: 1, name: 'Max', lastName: 'Ivanov' }]); tableBuilderComponent.filterable.setDefinition([ { value: 'petrov', type: TableFilterType.DOES_NOT_EQUAL, key: 'lastName' } ]); await tableBuilderComponent.sortAndFilter(); expect(tableBuilderComponent.source).toEqual([ { id: 1, name: 'Max', lastName: 'Ivanov' }, { id: 3, name: 'Petr', lastName: 'Sidorov' }, { id: 4, lastName: null, name: null } ]); tableBuilderComponent.filterable.setDefinition([{ value: 2, type: TableFilterType.MORE_THAN, key: 'id' }]); await tableBuilderComponent.sortAndFilter(); expect(tableBuilderComponent.source).toEqual([ { id: 3, name: 'Petr', lastName: 'Sidorov' }, { id: 4, lastName: null, name: null } ]); tableBuilderComponent.filterable.setDefinition([{ value: 2, type: TableFilterType.MORE_OR_EQUAL, key: 'id' }]); await tableBuilderComponent.sortAndFilter(); expect(tableBuilderComponent.source).toEqual([ { id: 2, name: 'Ivan', lastName: 'Petrov' }, { id: 3, name: 'Petr', lastName: 'Sidorov' }, { id: 4, lastName: null, name: null } ]); tableBuilderComponent.filterable.setDefinition([{ value: 2, type: TableFilterType.LESS_THAN, key: 'id' }]); await tableBuilderComponent.sortAndFilter(); expect(tableBuilderComponent.source).toEqual([{ id: 1, name: 'Max', lastName: 'Ivanov' }]); tableBuilderComponent.filterable.setDefinition([{ value: 2, type: TableFilterType.LESS_OR_EQUAL, key: 'id' }]); await tableBuilderComponent.sortAndFilter(); expect(tableBuilderComponent.source).toEqual([ { id: 1, name: 'Max', lastName: 'Ivanov' }, { id: 2, name: 'Ivan', lastName: 'Petrov' } ]); }); it('should correctly filter table by input', async (): Promise<void> => { const tableBuilderComponent: TableBuilderComponent<PlainObject> = component.tableBuilderComponent; expect(tableBuilderComponent.source).toEqual([ { id: 1, name: 'Max', lastName: 'Ivanov' }, { id: 2, name: 'Ivan', lastName: 'Petrov' }, { id: 3, name: 'Petr', lastName: 'Sidorov' }, { id: 4, name: null, lastName: null } ]); component.filterDefinition = [{ value: 2, type: TableFilterType.LESS_OR_EQUAL, key: 'id' }]; componentFixture.detectChanges(); /** * Caretaker note: * since the filtering happens several times and outside the zone, * there is no way to catch the moment when the filtering is completed using `whenStable` */ // eslint-disable-next-line no-restricted-globals await new Promise((resolve) => setTimeout(resolve, 1000)); expect(tableBuilderComponent.source).toEqual([ { id: 1, name: 'Max', lastName: 'Ivanov' }, { id: 2, name: 'Ivan', lastName: 'Petrov' } ]); }); });
the_stack
import { ExampleError } from "@effect/core/test/stm/STM/test-utils" describe.concurrent("STM", () => { describe.concurrent("Using `STM.atomically` to perform different computations and call:", () => { describe.concurrent("absolve to convert", () => { it("a successful Right computation into the success channel", async () => { const program = STM.succeed(Either.right(42)).absolve().commit() const result = await program.unsafeRunPromise() assert.strictEqual(result, 42) }) it("a successful Left computation into the error channel", async () => { const program = STM.succeed(Either.left("oh no!")).absolve().commit() const result = await program.unsafeRunPromiseExit() assert.isTrue(result.untraced() == Exit.fail("oh no!")) }) }) it("catchAll errors", async () => { const program = (STM.fail("uh oh!") > STM.succeed("everything is fine")) .catchAll((s) => STM.succeed(`${s} phew`)) .commit() const result = await program.unsafeRunPromise() assert.strictEqual(result, "uh oh! phew") }) describe.concurrent("catchSome errors", () => { it("catch the specified error", async () => { type ErrorTest = "Error1" const program = ( STM.fail<ErrorTest>("Error1") > STM.succeed("everything is fine") ) .catchSome((e) => e === "Error1" ? Option.some(STM.succeed("gotcha")) : Option.none) .commit() const result = await program.unsafeRunPromise() assert.strictEqual(result, "gotcha") }) it("lets the error pass", async () => { type ErrorTest = "Error1" | "Error2" const program = ( STM.fail<ErrorTest>("Error2") > STM.succeed("everything is fine") ) .catchSome((e) => e === "Error1" ? Option.some(STM.succeed("gotcha")) : Option.none) .commit() const result = await program.unsafeRunPromiseExit() assert.isTrue(result.untraced() == Exit.fail("Error2")) }) }) // TODO: implement after TQueue it.skip("repeatWhile to run effect while it satisfies predicate", async () => { // (for { // a <- TQueue.bounded[Int](5) // _ <- a.offerAll(List(0, 0, 0, 1, 2)) // n <- a.take.repeatWhile(_ == 0) // } yield assert(n)(equalTo(1))).commit }) // TODO: implement after TQueue it.skip("repeatUntil to run effect until it satisfies predicate", async () => { // (for { // a <- TQueue.bounded[Int](5) // _ <- a.offerAll(List(0, 0, 0, 1, 2)) // b <- a.take.repeatUntil(_ == 1) // } yield assert(b)(equalTo(1))).commit }) describe.concurrent("either to convert", () => { it("a successful computation into Right(a)", async () => { const program = STM.succeed(42).either().commit() const result = await program.unsafeRunPromise() assert.isTrue(result == Either.right(42)) }) it("a failed computation into Left(e)", async () => { const program = STM.fail("oh no!").either().commit() const result = await program.unsafeRunPromise() assert.isTrue(result == Either.left("oh no!")) }) }) it("eventually succeeds", async () => { function stm(ref: TRef<number>): STM<unknown, string, number> { return ref .get() .flatMap((n) => n < 10 ? ref.update((n) => n + 1) > STM.fail("ouch") : STM.succeed(n)) } const program = TRef.make(0) .flatMap((ref) => stm(ref).eventually()) .commit() const result = await program.unsafeRunPromise() assert.strictEqual(result, 10) }) it("failed to make a failed computation and check the value", async () => { const program = STM.fail("bye bye world").commit() const result = await program.unsafeRunPromiseExit() assert.isTrue(result.untraced() == Exit.fail("bye bye world")) }) it("filter filters a collection using an effectual predicate", async () => { const program = STM.Do() .bind("ref", () => TRef.make(Chunk.empty<number>())) .bind( "results", ({ ref }) => STM.filter([2, 4, 6, 3, 5, 6], (n) => ref.update((chunk) => chunk.append(n)).as(n % 2 === 0)) ) .bind("effects", ({ ref }) => ref.get()) .commit() const { effects, results } = await program.unsafeRunPromise() assert.isTrue(results == Chunk(2, 4, 6, 6)) assert.isTrue(effects == Chunk(2, 4, 6, 3, 5, 6)) }) it("filterOrDie dies when predicate fails", async () => { const program = STM.succeed(1) .filterOrDie((n) => n !== 1, ExampleError) .commit() const result = await program.unsafeRunPromiseExit() assert.isTrue(result.untraced() == Exit.die(ExampleError)) }) it("filterOrDieMessage dies with message when predicate fails ", async () => { const program = STM.succeed(1) .filterOrDieMessage((n) => n !== 1, "dies") .commit() const result = await program.unsafeRunPromiseExit() assert.isTrue( result.isFailure() && result.cause.isDieType() && result.cause.value instanceof RuntimeError && result.cause.value.message === "dies" ) }) describe.concurrent("filterOrElse", () => { it("returns checked failure", async () => { const program = STM.succeed(1) .filterOrElse((n) => n === 1, STM.succeed(2)) .commit() const result = await program.unsafeRunPromise() assert.strictEqual(result, 1) }) it("returns held value", async () => { const program = STM.succeed(1) .filterOrElse((n) => n !== 1, STM.succeed(2)) .commit() const result = await program.unsafeRunPromise() assert.strictEqual(result, 2) }) }) describe.concurrent("filterOrElseWith", () => { it("returns checked failure", async () => { const program = STM.succeed(1) .filterOrElseWith( (n) => n === 1, (n) => STM.succeed(n + 1) ) .commit() const result = await program.unsafeRunPromise() assert.strictEqual(result, 1) }) it("returns held value", async () => { const program = STM.succeed(1) .filterOrElseWith( (n) => n !== 1, (n) => STM.succeed(n + 1) ) .commit() const result = await program.unsafeRunPromise() assert.strictEqual(result, 2) }) it("returns error", async () => { const program = (STM.fail(ExampleError) > STM.succeed(1)) .filterOrElseWith( (n) => n !== 1, (n) => STM.succeed(n + 1) ) .commit() const result = await program.unsafeRunPromiseExit() assert.isTrue(result.untraced() == Exit.fail(ExampleError)) }) }) it("filterOrFail returns failure when predicate fails", async () => { const program = STM.succeed(1) .filterOrFail((n) => n !== 1, ExampleError) .commit() const result = await program.unsafeRunPromiseExit() assert.isTrue(result.untraced() == Exit.fail(ExampleError)) }) it("flatMapError to flatMap from one error to another", async () => { const program = STM.fail(-1) .flatMapError((n) => STM.succeed(`log: ${n}`)) .commit() const result = await program.unsafeRunPromiseExit() assert.isTrue(result.untraced() == Exit.fail("log: -1")) }) it("flatten", async () => { const program = STM.Do() .bind("result1", () => STM.succeed(STM.succeed("test")).flatten()) .bind("result2", () => STM.flatten(STM.succeed(STM.succeed("test")))) .commit() const { result1, result2 } = await program.unsafeRunPromise() assert.strictEqual(result1, "test") assert.isTrue(result1 === result2) }) describe.concurrent("flattenErrorOption", () => { it("with an existing error and return it", async () => { const program = STM.fail(Option.some("oh no!")) .flattenErrorOption("default error") .commit() const result = await program.unsafeRunPromiseExit() assert.isTrue(result.untraced() == Exit.fail("oh no!")) }) it("with no error and default to value", async () => { const program = STM.fail(Option.none) .flattenErrorOption("default error") .commit() const result = await program.unsafeRunPromiseExit() assert.isTrue(result.untraced() == Exit.fail("default error")) }) }) it("fold to handle both failure and success", async () => { const program = STM.Do() .bind("result1", () => STM.succeed("yes").fold( () => -1, () => 1 )) .bind("result2", () => STM.fail("no").fold( () => -1, () => 1 )) .commit() const { result1, result2 } = await program.unsafeRunPromise() assert.strictEqual(result1, 1) assert.strictEqual(result2, -1) }) it("foldSTM to fold over the `STM` effect, and handle failure and success", async () => { const program = STM.Do() .bind("result1", () => STM.succeed("yes").foldSTM(() => STM.succeed("no"), STM.succeedNow)) .bind("result2", () => STM.fail("no").foldSTM(STM.succeedNow, () => STM.succeed("yes"))) .commit() const { result1, result2 } = await program.unsafeRunPromise() assert.strictEqual(result1, "yes") assert.strictEqual(result2, "no") }) describe.concurrent("foldLeft", () => { it("with a successful step function sums the list properly", async () => { const program = STM.reduce(List(1, 2, 3, 4, 5), 0, (acc, n) => STM.succeed(acc + n)).commit() const result = await program.unsafeRunPromise() assert.strictEqual(result, 15) }) it("with a failing step function returns a failed transaction", async () => { const program = STM.reduce(List(1), 0, () => STM.fail("fail")).commit() const result = await program.unsafeRunPromiseExit() assert.isTrue(result.untraced() == Exit.fail("fail")) }) it("run sequentially from left to right", async () => { const program = STM.reduce( List(1, 2, 3, 4, 5), List.empty<number>(), (acc: List<number>, n) => STM.succeed(acc.prepend(n)) ) .map((list: List<number>) => list.reverse()) .commit() const result = await program.unsafeRunPromise() assert.isTrue(result == List(1, 2, 3, 4, 5)) }) }) describe.concurrent("foldRight", () => { it("with a successful step function sums the list properly", async () => { const program = STM.reduceRight(List(1, 2, 3, 4, 5), 0, (acc, n) => STM.succeed(acc + n)).commit() const result = await program.unsafeRunPromise() assert.strictEqual(result, 15) }) it("with a failing step function returns a failed transaction", async () => { const program = STM.reduceRight(List(1, 2, 3, 4, 5), 0, (acc, n) => STM.fail("fail")).commit() const result = await program.unsafeRunPromiseExit() assert.isTrue(result.untraced() == Exit.fail("fail")) }) it("run sequentially from right to left", async () => { const program = STM.reduceRight( List(1, 2, 3, 4, 5), List.empty<number>(), (n, acc: List<number>) => STM.succeed(acc.prepend(n)) ) .map((list: List<number>) => list.reverse()) .commit() const result = await program.unsafeRunPromise() assert.isTrue(result == List(5, 4, 3, 2, 1)) }) }) describe.concurrent("head", () => { it("extracts the value from the List", async () => { const program = STM.succeed(List(1, 2)).head.commit() const result = await program.unsafeRunPromise() assert.strictEqual(result, 1) }) it("returns None if list is Empty", async () => { const program = STM.succeed(List.empty<number>()).head.commit() const result = await program.unsafeRunPromiseExit() assert.isTrue(result.untraced() == Exit.fail(Option.none)) }) it("returns the Error around Some", async () => { const program = STM.fromEither( Either.leftW<string, List<number>>("my error") ).head.commit() const result = await program.unsafeRunPromiseExit() assert.isTrue(result.untraced() == Exit.fail(Option.some("my error"))) }) }) describe.concurrent("ifSTM", () => { it("runs `onTrue` if result of `b` is `true`", async () => { const program = STM.ifSTM( STM.succeed(true), STM.succeed(true), STM.succeed(false) ).commit() const result = await program.unsafeRunPromise() assert.isTrue(result) }) it("runs `onFalse` if result of `b` is `false`", async () => { const program = STM.ifSTM( STM.succeed(false), STM.succeed(true), STM.succeed(false) ).commit() const result = await program.unsafeRunPromise() assert.isFalse(result) }) }) describe.concurrent("left", () => { it("on Left value", async () => { const program = STM.succeed(Either.left("left")).left.commit() const result = await program.unsafeRunPromise() assert.strictEqual(result, "left") }) it("on Right value", async () => { const program = STM.succeed(Either.right("right")).left.commit() const result = await program.unsafeRunPromiseExit() assert.isTrue(result.untraced() == Exit.fail(Either.right("right"))) }) it("on failure", async () => { const program = STM.fail("fail").left.commit() const result = await program.unsafeRunPromiseExit() assert.isTrue(result.untraced() == Exit.fail(Either.left("fail"))) }) it("lifting a value", async () => { const program = STM.left(42).commit() const result = await program.unsafeRunPromise() assert.isTrue(result == Either.left(42)) }) }) describe.concurrent("mapBoth when", () => { it("having a success value", async () => { const program = STM.succeed(1) .mapBoth( () => -1, (n) => `${n} as string` ) .commit() const result = await program.unsafeRunPromise() assert.strictEqual(result, "1 as string") }) it("having a fail value", async () => { const program = STM.fail(-1) .mapBoth( (n) => `${n} as string`, () => 0 ) .commit() const result = await program.unsafeRunPromiseExit() assert.isTrue(result.untraced() == Exit.fail("-1 as string")) }) }) it("mapError to map from one error to another", async () => { const program = STM.fail(-1) .mapError(() => "oh no!") .commit() const result = await program.unsafeRunPromiseExit() assert.isTrue(result.untraced() == Exit.fail("oh no!")) }) describe.concurrent("merge", () => { it("on error with same type", async () => { const program = STM.fromEither<number, number>(Either.left(1)).merge().commit() const result = await program.unsafeRunPromise() assert.strictEqual(result, 1) }) it("when having a successful value", async () => { const program = STM.fromEither<number, number>(Either.right(1)).merge().commit() const result = await program.unsafeRunPromise() assert.strictEqual(result, 1) }) }) describe.concurrent("none", () => { it("when A is None", async () => { const program = STM.succeed(Option.none).noneOrFail().commit() const result = await program.unsafeRunPromise() assert.isUndefined(result) }) it("when Error", async () => { const program = STM.fail(ExampleError).noneOrFail().commit() const result = await program.unsafeRunPromiseExit() assert.isTrue(result.untraced() == Exit.fail(Option.some(ExampleError))) }) it("when A is Some(a)", async () => { const program = STM.succeed(Option.some(1)).noneOrFail().commit() const result = await program.unsafeRunPromiseExit() assert.isTrue(result.untraced() == Exit.fail(Option.none)) }) it("lifting a value", async () => { const program = STM.none.commit() const result = await program.unsafeRunPromise() assert.isTrue(result == Option.none) }) }) describe.concurrent("option to convert:", () => { it("a successful computation into Some(a)", async () => { const program = STM.succeed(42).option().commit() const result = await program.unsafeRunPromise() assert.isTrue(result == Option.some(42)) }) it("a failed computation into None", async () => { const program = STM.fail("oh no!").option().commit() const result = await program.unsafeRunPromise() assert.isTrue(result == Option.none) }) }) describe.concurrent("optional to convert:", () => { it("a Some(e) in E to a e in E", async () => { const program = STM.fromEither<Option<string>, number>( Either.left(Option.some("my error")) ) .unsome() .commit() const result = await program.unsafeRunPromiseExit() assert.isTrue(result.untraced() == Exit.fail("my error")) }) it("a None in E into None in A", async () => { const program = STM.fromEither<Option<string>, number>(Either.left(Option.none)) .unsome() .commit() const result = await program.unsafeRunPromise() assert.isTrue(result == Option.none) }) it("no error", async () => { const program = STM.fromEither<Option<string>, number>(Either.right(42)) .unsome() .commit() const result = await program.unsafeRunPromise() assert.isTrue(result == Option.some(42)) }) }) describe.concurrent("orDie", () => { it("when failure should die", async () => { const program = STM.fail(() => { throw ExampleError }) .orDie() .commit() const result = await program.unsafeRunPromiseExit() assert.isTrue(result.untraced() == Exit.die(ExampleError)) }) it("when succeed should keep going", async () => { const program = STM.succeed(1).orDie().commit() const result = await program.unsafeRunPromise() assert.strictEqual(result, 1) }) }) describe.concurrent("orDieWith", () => { it("when failure should die", async () => { const program = STM.fail("-1") .orDieWith((s) => new Error(s)) .commit() const result = await program.unsafeRunPromiseExit() assert.isTrue( result.isFailure() && result.cause.isDieType() && result.cause.value instanceof Error && result.cause.value.message === "-1" ) }) it("when succeed should keep going", async () => { const program = STM.fromEither<string, number>(Either.right(1)) .orDieWith((s) => new Error(s)) .commit() const result = await program.unsafeRunPromise() assert.strictEqual(result, 1) }) }) describe.concurrent("partition", () => { it("collects only successes", async () => { const input = Chunk.range(0, 9) const program = STM.partition(input, STM.succeedNow).commit() const { tuple: [left, right] } = await program.unsafeRunPromise() assert.isTrue(left.isEmpty()) assert.isTrue(right == input) }) it("collects only failures", async () => { const input = List.from(Array.from({ length: 10 }, () => 0)) const program = STM.partition(input, STM.failNow).commit() const { tuple: [left, right] } = await program.unsafeRunPromise() assert.isTrue(left == input) assert.isTrue(right.isEmpty()) }) it("collects failures and successes", async () => { const input = Chunk.range(0, 9) const program = STM.partition(input, (n) => n % 2 === 0 ? STM.fail(n) : STM.succeed(n)).commit() const { tuple: [left, right] } = await program.unsafeRunPromise() assert.isTrue(left == Chunk(0, 2, 4, 6, 8)) assert.isTrue(right == Chunk(1, 3, 5, 7, 9)) }) it("evaluates effects in correct order", async () => { const input = List(2, 4, 6, 3, 5, 6) const program = STM.Do() .bind("ref", () => TRef.make<List<number>>(List.empty())) .tap(({ ref }) => STM.partition(input, (n) => ref.update((list) => list.prepend(n)))) .flatMap(({ ref }) => ref.get().map((list) => list.reverse())) .commit() const result = await program.unsafeRunPromise() assert.isTrue(result == List(2, 4, 6, 3, 5, 6)) }) }) describe.concurrent("reject", () => { it("returns failure ignoring value", async () => { const program = STM.succeed(0) .reject((n) => (n !== 0 ? Option.some("partial failed") : Option.none)) .commit() const result = await program.unsafeRunPromise() assert.strictEqual(result, 0) }) it("returns failure ignoring value", async () => { const program = STM.succeed(1) .reject((n) => (n !== 0 ? Option.some("partial failed") : Option.none)) .commit() const result = await program.unsafeRunPromiseExit() assert.isTrue(result.untraced() == Exit.fail("partial failed")) }) }) describe.concurrent("rejectSTM", () => { it("doesnt collect value", async () => { const program = STM.succeed(0) .rejectSTM((n) => n !== 0 ? Option.some(STM.succeed("partial failed")) : Option.none) .commit() const result = await program.unsafeRunPromise() assert.strictEqual(result, 0) }) it("returns failure ignoring value", async () => { const program = STM.succeed(1) .rejectSTM((n) => n !== 0 ? Option.some(STM.succeed("partial failed")) : Option.none) .commit() const result = await program.unsafeRunPromiseExit() assert.isTrue(result.untraced() == Exit.fail("partial failed")) }) }) describe.concurrent("replicate", () => { it("zero", async () => { const program = STM.collectAll(STM.replicate(0, STM.succeed(12))).commit() const result = await program.unsafeRunPromise() assert.isTrue(result.isEmpty()) }) it("negative", async () => { const program = STM.collectAll(STM.replicate(-2, STM.succeed(12))).commit() const result = await program.unsafeRunPromise() assert.isTrue(result.isEmpty()) }) it("positive", async () => { const program = STM.collectAll(STM.replicate(2, STM.succeed(12))).commit() const result = await program.unsafeRunPromise() assert.isTrue(result == Chunk(12, 12)) }) }) describe.concurrent("right", () => { it("on Right value", async () => { const program = STM.succeed(Either.right("right")).right.commit() const result = await program.unsafeRunPromise() assert.strictEqual(result, "right") }) it("on Left value", async () => { const program = STM.succeed(Either.left("left")).right.commit() const result = await program.unsafeRunPromiseExit() assert.isTrue(result.untraced() == Exit.fail(Either.left("left"))) }) it("on failure", async () => { const program = STM.fail("fail").right.commit() const result = await program.unsafeRunPromiseExit() assert.isTrue(result.untraced() == Exit.fail(Either.right("fail"))) }) it("lifting a value", async () => { const program = STM.right(42).commit() const result = await program.unsafeRunPromise() assert.isTrue(result == Either.right(42)) }) }) describe.concurrent("some", () => { it("extracts the value from Some", async () => { const program = STM.succeed(Option.some(1)).some.commit() const result = await program.unsafeRunPromise() assert.strictEqual(result, 1) }) it("fails on None", async () => { const program = STM.succeed(Option.none).some.commit() const result = await program.unsafeRunPromiseExit() assert.isTrue(result.untraced() == Exit.fail(Option.none)) }) it("fails when given an exception", async () => { const program = STM.fail(ExampleError).some.commit() const result = await program.unsafeRunPromiseExit() assert.isTrue( result.isFailure() && result.cause.isFailType() && result.cause.value._tag === "Some" && result.cause.value.value instanceof Error && result.cause.value.value.message === "fail" ) }) it("lifting a value", async () => { const program = STM.some(42).commit() const result = await program.unsafeRunPromise() assert.isTrue(result == Option.some(42)) }) }) describe.concurrent("someOrElse", () => { it("extracts the value from Some", async () => { const program = STM.succeed(Option.some(1)).someOrElse(42).commit() const result = await program.unsafeRunPromise() assert.strictEqual(result, 1) }) it("falls back to the default value if None", async () => { const program = STM.succeed(Option.none).someOrElse(42).commit() const result = await program.unsafeRunPromise() assert.strictEqual(result, 42) }) it("does not change failed state", async () => { const program = STM.fail(ExampleError).someOrElse(42).commit() const result = await program.unsafeRunPromiseExit() assert.isTrue( result.isFailure() && result.cause.isFailType() && result.cause.value instanceof Error && result.cause.value.message === "fail" ) }) }) describe.concurrent("someOrElseSTM", () => { it("extracts the value from Some", async () => { const program = STM.succeed(Option.some(1)) .someOrElseSTM(STM.succeed(42)) .commit() const result = await program.unsafeRunPromise() assert.strictEqual(result, 1) }) it("falls back to the default value if None", async () => { const program = STM.succeed(Option.none).someOrElseSTM(STM.succeed(42)).commit() const result = await program.unsafeRunPromise() assert.strictEqual(result, 42) }) it("does not change failed state", async () => { const program = STM.fail(ExampleError).someOrElseSTM(STM.succeed(42)).commit() const result = await program.unsafeRunPromiseExit() assert.isTrue( result.isFailure() && result.cause.isFailType() && result.cause.value instanceof Error && result.cause.value.message === "fail" ) }) }) describe.concurrent("someOrFail", () => { it("extracts the value from Some", async () => { const program = STM.succeed(Option.some(1)).someOrFail(ExampleError).commit() const result = await program.unsafeRunPromise() assert.strictEqual(result, 1) }) it("fails on None", async () => { const program = STM.succeed(Option.none).someOrFail(ExampleError).commit() const result = await program.unsafeRunPromiseExit() assert.isTrue( result.isFailure() && result.cause.isFailType() && result.cause.value instanceof Error && result.cause.value.message === "fail" ) }) it("fails with the original error", async () => { const program = STM.fail(ExampleError) .someOrFail(new Error("not example")) .commit() const result = await program.unsafeRunPromiseExit() assert.isTrue( result.isFailure() && result.cause.isFailType() && result.cause.value instanceof Error && result.cause.value.message === "fail" ) }) }) describe.concurrent("someOrFailException", () => { it("extracts the optional value", async () => { const program = STM.succeed(Option.some(42)).someOrFailException().commit() const result = await program.unsafeRunPromise() assert.strictEqual(result, 42) }) it("fails when given a None", async () => { const program = STM.succeed(Option.none).someOrFailException().commit() const result = await program.unsafeRunPromiseExit() assert.isTrue(result.isFailure() && result.cause.isFailType() && result.cause.value instanceof NoSuchElement) }) }) it("succeed to make a successful computation and check the value", async () => { const program = STM.succeed("hello world").commit() const result = await program.unsafeRunPromise() assert.strictEqual(result, "hello world") }) describe.concurrent("summarized", () => { it("returns summary and value", async () => { const program = STM.Do() .bind("counter", () => TRef.make(0)) .bindValue("increment", ({ counter }) => counter.updateAndGet((n) => n + 1)) .flatMap(({ increment }) => increment.summarized(increment, (start, end) => Tuple(start, end))) .commit() const { tuple: [ { tuple: [start, end] }, value ] } = await program.unsafeRunPromise() assert.strictEqual(start, 1) assert.strictEqual(end, 3) assert.strictEqual(value, 2) }) }) it("zip to return a tuple of two computations", async () => { const program = STM.succeed(1).zip(STM.succeed("A")).commit() const { tuple: [n, s] } = await program.unsafeRunPromise() assert.strictEqual(n, 1) assert.strictEqual(s, "A") }) it("zipWith to perform an action to two computations", async () => { const program = STM.succeed(598) .zipWith(STM.succeed(2), (a, b) => a + b) .commit() const result = await program.unsafeRunPromise() assert.strictEqual(result, 600) }) }) })
the_stack
import * as d3 from "d3"; import * as Typesettable from "typesettable"; import * as Formatters from "../core/formatters"; import { QuantitativeScale } from "../scales/quantitativeScale"; import * as Utils from "../utils"; import { Axis, AxisOrientation } from "./axis"; export class Numeric extends Axis<number> { private _tickLabelPositioning = "center"; private _usesTextWidthApproximation = false; private _measurer: Typesettable.CacheMeasurer; private _wrapper: Typesettable.Wrapper; /** * Constructs a Numeric Axis. * * A Numeric Axis is a visual representation of a QuantitativeScale. * * @constructor * @param {QuantitativeScale} scale * @param {AxisOrientation} orientation Orientation of this Numeric Axis. */ constructor(scale: QuantitativeScale<number>, orientation: AxisOrientation) { super(scale, orientation); this.formatter(Formatters.general()); } protected _setup() { super._setup(); const context = new Typesettable.SvgContext(this._tickLabelContainer.node() as SVGElement, Axis.TICK_LABEL_CLASS); this._measurer = new Typesettable.CacheMeasurer(context); this._wrapper = new Typesettable.Wrapper().maxLines(1); } protected _computeWidth() { const maxTextWidth = this._usesTextWidthApproximation ? this._computeApproximateTextWidth() : this._computeExactTextWidth(); if (this._tickLabelPositioning === "center") { return this._maxLabelTickLength() + this.tickLabelPadding() + maxTextWidth; } else { return Math.max(this._maxLabelTickLength(), this.tickLabelPadding() + maxTextWidth); } } private _computeExactTextWidth(): number { const tickValues = this._getTickValues(); const textLengths = tickValues.map((v: any) => { const formattedValue = this.formatter()(v); return this._measurer.measure(formattedValue).width; }); return Utils.Math.max(textLengths, 0); } private _computeApproximateTextWidth(): number { const tickValues = this._getTickValues(); const mWidth = this._measurer.measure("M").width; const textLengths = tickValues.map((v: number): number => { const formattedValue = this.formatter()(v); return formattedValue.length * mWidth; }); return Utils.Math.max(textLengths, 0); } protected _computeHeight() { const textHeight = this._measurer.measure().height; if (this._tickLabelPositioning === "center") { return this._maxLabelTickLength() + this.tickLabelPadding() + textHeight; } else { return Math.max(this._maxLabelTickLength(), this.tickLabelPadding() + textHeight); } } protected _getTickValues() { const scale = (<QuantitativeScale<number>> this._scale); const domain = scale.domain(); const min = domain[0] <= domain[1] ? domain[0] : domain[1]; const max = domain[0] >= domain[1] ? domain[0] : domain[1]; return scale.ticks().filter((i: number) => i >= min && i <= max); } protected _rescale() { if (!this._isSetup) { return; } if (!this.isHorizontal()) { const reComputedWidth = this._computeWidth(); if (reComputedWidth > this.width() || reComputedWidth < (this.width() - this.margin())) { this.redraw(); return; } } this.render(); } public renderImmediately() { super.renderImmediately(); const tickLabelAttrHash: { [key: string]: number | string | ((d: any) => number) } = { x: <any> 0, y: <any> 0, dx: "0em", dy: "0.3em", }; const tickMarkLength = this._maxLabelTickLength(); const tickLabelPadding = this.tickLabelPadding(); let tickLabelTextAnchor = "middle"; let labelGroupTransformX = 0; let labelGroupTransformY = 0; let labelGroupShiftX = 0; let labelGroupShiftY = 0; if (this.isHorizontal()) { switch (this._tickLabelPositioning) { case "left": tickLabelTextAnchor = "end"; labelGroupTransformX = -tickLabelPadding; labelGroupShiftY = tickLabelPadding; break; case "center": labelGroupShiftY = tickMarkLength + tickLabelPadding; break; case "right": tickLabelTextAnchor = "start"; labelGroupTransformX = tickLabelPadding; labelGroupShiftY = tickLabelPadding; break; } } else { switch (this._tickLabelPositioning) { case "top": tickLabelAttrHash["dy"] = "-0.3em"; labelGroupShiftX = tickLabelPadding; labelGroupTransformY = -tickLabelPadding; break; case "center": labelGroupShiftX = tickMarkLength + tickLabelPadding; break; case "bottom": tickLabelAttrHash["dy"] = "1em"; labelGroupShiftX = tickLabelPadding; labelGroupTransformY = tickLabelPadding; break; } } const tickMarkAttrHash = this._generateTickMarkAttrHash(); switch (this.orientation()) { case "bottom": tickLabelAttrHash["x"] = tickMarkAttrHash["x1"]; tickLabelAttrHash["dy"] = "0.95em"; labelGroupTransformY = <number> tickMarkAttrHash["y1"] + labelGroupShiftY; break; case "top": tickLabelAttrHash["x"] = tickMarkAttrHash["x1"]; tickLabelAttrHash["dy"] = "-.25em"; labelGroupTransformY = <number> tickMarkAttrHash["y1"] - labelGroupShiftY; break; case "left": tickLabelTextAnchor = "end"; labelGroupTransformX = <number> tickMarkAttrHash["x1"] - labelGroupShiftX; tickLabelAttrHash["y"] = tickMarkAttrHash["y1"]; break; case "right": tickLabelTextAnchor = "start"; labelGroupTransformX = <number> tickMarkAttrHash["x1"] + labelGroupShiftX; tickLabelAttrHash["y"] = tickMarkAttrHash["y1"]; break; } const tickLabelValues = this._getTickValues(); const tickLabelsUpdate = this._tickLabelContainer.selectAll("." + Axis.TICK_LABEL_CLASS).data(tickLabelValues); tickLabelsUpdate.exit().remove(); const tickLabels = tickLabelsUpdate .enter() .append("text") .classed(Axis.TICK_LABEL_CLASS, true) .merge(tickLabelsUpdate); tickLabels.style("text-anchor", tickLabelTextAnchor) .style("visibility", "inherit") .attrs(tickLabelAttrHash) .text((s: any) => this.formatter()(s)); const labelGroupTransform = "translate(" + labelGroupTransformX + ", " + labelGroupTransformY + ")"; this._tickLabelContainer.attr("transform", labelGroupTransform); this._showAllTickMarks(); if (!this.showEndTickLabels()) { this._hideEndTickLabels(); } this._hideOverflowingTickLabels(); this._hideOverlappingTickLabels(); if (this._tickLabelPositioning !== "center") { this._hideTickMarksWithoutLabel(); } return this; } /** * Gets the tick label position relative to the tick marks. * * @returns {string} The current tick label position. */ public tickLabelPosition(): string; /** * Sets the tick label position relative to the tick marks. * * @param {string} position "top"/"center"/"bottom" for a vertical Numeric Axis, * "left"/"center"/"right" for a horizontal Numeric Axis. * @returns {Numeric} The calling Numeric Axis. */ public tickLabelPosition(position: string): this; public tickLabelPosition(position?: string): any { if (position == null) { return this._tickLabelPositioning; } else { const positionLC = position.toLowerCase(); if (this.isHorizontal()) { if (!(positionLC === "left" || positionLC === "center" || positionLC === "right")) { throw new Error(positionLC + " is not a valid tick label position for a horizontal NumericAxis"); } } else { if (!(positionLC === "top" || positionLC === "center" || positionLC === "bottom")) { throw new Error(positionLC + " is not a valid tick label position for a vertical NumericAxis"); } } this._tickLabelPositioning = positionLC; this.redraw(); return this; } } /** * Gets the approximate text width setting. * * @returns {boolean} The current text width approximation setting. */ public usesTextWidthApproximation(): boolean; /** * Sets the approximate text width setting. Approximating text width * measurements can drastically speed up plot rendering, but the plot may * have extra white space that would be eliminated by exact measurements. * Additionally, very abnormal fonts may not approximate reasonably. * * @param {boolean} The new text width approximation setting. * @returns {Axes.Numeric} The calling Axes.Numeric. */ public usesTextWidthApproximation(enable: boolean): this; public usesTextWidthApproximation(enable?: boolean): any { if (enable == null) { return this._usesTextWidthApproximation; } else { this._usesTextWidthApproximation = enable; return this; } } private _hideEndTickLabels() { const boundingBox = this.element().node().getBoundingClientRect(); const tickLabels = this._tickLabelContainer.selectAll("." + Axis.TICK_LABEL_CLASS); if (tickLabels.size() === 0) { return; } const firstTickLabel = <Element> tickLabels.nodes()[0]; if (!Utils.DOM.clientRectInside(firstTickLabel.getBoundingClientRect(), boundingBox)) { d3.select(firstTickLabel).style("visibility", "hidden"); } const lastTickLabel = <Element> tickLabels.nodes()[tickLabels.size() - 1]; if (!Utils.DOM.clientRectInside(lastTickLabel.getBoundingClientRect(), boundingBox)) { d3.select(lastTickLabel).style("visibility", "hidden"); } } private _hideOverlappingTickLabels() { const visibleTickLabels = this._tickLabelContainer .selectAll("." + Axis.TICK_LABEL_CLASS) .filter(function (d: any, i: number) { const visibility = d3.select(this).style("visibility"); return (visibility === "inherit") || (visibility === "visible"); }); const visibleTickLabelRects = visibleTickLabels.nodes().map((label: HTMLScriptElement) => label.getBoundingClientRect()); let interval = 1; while (!this._hasOverlapWithInterval(interval, visibleTickLabelRects) && interval < visibleTickLabelRects.length) { interval += 1; } visibleTickLabels.each(function (d: string, i: number) { const tickLabel = d3.select(this); if (i % interval !== 0) { tickLabel.style("visibility", "hidden"); } }); } /** * The method is responsible for evenly spacing the labels on the axis. * @return test to see if taking every `interval` recrangle from `rects` * will result in labels not overlapping * * For top, bottom, left, right positioning of the thicks, we want the padding * between the labels to be 3x, such that the label will be `padding` distance * from the tick and 2 * `padding` distance (or more) from the next tick: * see https://github.com/palantir/plottable/pull/1812 */ private _hasOverlapWithInterval(interval: number, rects: ClientRect[]): boolean { const padding = (this._tickLabelPositioning === "center") ? this.tickLabelPadding() : this.tickLabelPadding() * 3; const rectsWithPadding = rects.map((rect) => Utils.DOM.expandRect(rect, padding)); for (let i = 0; i < rectsWithPadding.length - interval; i += interval) { const currRect = rectsWithPadding[i]; const nextRect = rectsWithPadding[i + interval]; if (Utils.DOM.clientRectsOverlap(currRect, nextRect)) { return false; } } return true; } public invalidateCache() { super.invalidateCache(); (this._measurer as Typesettable.CacheMeasurer).reset(); } }
the_stack
declare var multiplex: multiplex.MultiplexStatic; // Support AMD require declare module 'multiplexjs' { export = multiplex; } // Collapse multiplex into mx import mx = multiplex; // ES6 compatibility interface Array<T> extends multiplex.Iterable<T> { } interface String extends multiplex.Iterable<string> { } declare namespace multiplex { /** * ES6 Iterable */ interface Iterable<T> { "@@iterator"(): Iterator<T> } interface Iterator<T> { next(): IteratorResult<T>; return?(value?: any): IteratorResult<T>; throw?(e?: any): IteratorResult<T>; } interface IteratorResult<T> { done: boolean; value?: T; } /** * Supports a simple iteration over a collection. */ interface Enumerator<T> { /** * Advances the enumerator to the next element of the collection. */ next(): boolean; /** * Gets the element in the collection at the current position of the enumerator. */ current: T; } interface EnumeratorConstructor { new <T>(generator: (yielder: (value: T) => T) => any): Enumerator<T>; } /** * Exposes the enumerator, which supports a simple iteration over a collection of a specified type. * Enumerable uses ES6 Iteration protocol. */ interface Enumerable<T> extends Iterable<T> { /** * Returns an enumerator that iterates through the collection. */ getEnumerator(): Enumerator<T>; } interface EnumerableConstructor { /** * Exposes the enumerator, which supports an iteration over the specified Enumerable object. * @param obj An Iterable object. eg. Enumerable, Array, String, Set, Map, Iterable & Generators */ new <T>(obj: Iterable<T>): Enumerable<T>; /** * Defines an enumerator, which supports an iteration over the specified Generator function. * @param factory An Enumerator factory function. */ new <T>(factory: () => Enumerator<T>): Enumerable<T>; /** * Defines an enumerator, which supports an iteration over the items of the specified Array-like object. * An Array-like object is an object which has the "length" property and indexed properties access, eg. jQuery * @param obj An Array-like object. */ new <T>(obj: ArrayLike<T>): Enumerable<T>; /** * Defines an enumerator, which supports an iteration over the arguments local variable available within all functions. * @param obj arguments local variable available within all functions. */ new (obj: IArguments): Enumerable<any>; /** * Defines an enumerator, which supports an iteration over the properties of the specified object. * @param obj A regular Object. */ new (obj: Object): Enumerable<KeyValuePair<string, any>>; /** * Returns an empty Enumerable. */ empty<T>(): Enumerable<T>; /** * Detects if an object is Enumerable. * @param obj An object to check its Enumerability. */ is(obj: any): boolean; /** * Generates a sequence of integral numbers within a specified range. * @param start The value of the first integer in the sequence. * @param count The number of sequential integers to generate. */ range(start: number, count: number): Enumerable<number>; /** * Generates a sequence that contains one repeated value. * @param element The value to be repeated. * @param count The number of times to repeat the value in the generated sequence. */ repeat<T>(element: T, count: number): Enumerable<T>; } /** * Defines a method that a type implements to compare two objects. */ interface Comparer<T> { /** * Compares two objects and returns a value indicating whether one is less than, equal to, or greater than the other. * returns An integer that indicates the relative values of x and y, as shown in the following table: * Less than zero x is less than y. * Zero x equals y. * Greater than zero x is greater than y.. * @param x The first object to compare. * @param y The second object to compare. */ compare(x: T, y: T): number; } interface ComparerConstructor { /** * Returns a default sort order comparer for the type specified by the generic argument. */ defaultComparer: Comparer<any>; /** * Creates a comparer by using the specified comparison. * @param comparison The comparison to use. */ create<T>(comparison: (x: T, y: T) => number): Comparer<T>; } /** * Provides a base class for implementations of the EqualityComparer. */ interface EqualityComparer<T> { /** * Determines whether the specified objects are equal. * @param x The first object of type Object to compare. * @param y The second object of type Object to compare. */ equals(x: T, y: T): boolean; /** * Returns a hash code for the specified object. * @param obj The Object for which a hash code is to be returned. */ hash(obj: T): number } interface EqualityComparerConstructor { /** * Gets a default equality comparer for the type specified by the generic argument. */ defaultComparer: EqualityComparer<any>; /** * Creates an EqualityComparer by using the specified equality and hashCodeProvider. * @param hashCodeProvider The hashCodeProvider to use for a hash code is to be returned. * @param equality The equality function. */ create<T>(hashCodeProvider: (obj: T) => number, equality: (x: T, y: T) => boolean): EqualityComparer<T>; } /** * Initializes a new instance of the abstract Collection class. */ interface Collection<T> extends Enumerable<T> { /** * Gets the number of elements contained in the Collection. */ count(): number; /** * Copies the Collection to an existing one-dimensional Array, starting at the specified array index. * @param array The one-dimensional Array that is the destination of the elements copied from Dictionary keys. * @param arrayIndex The zero-based index in array at which copying begins. */ copyTo(array: T[], arrayIndex: number): void } interface CollectionConstructor { /** * Initializes a new instance of the Collection class that is empty. */ new <T>(): Collection<T> /** * Initializes a new instance of the Collection class that is wrapper around the specified Enumerable. * @param value The Iterable to wrap. */ new <T>(value: Iterable<T>): Collection<T> } /** * Initializes a new instance of the abstract Collection class. */ interface ReadOnlyCollection<T> extends Collection<T> { /** * Gets the element at the specified index. * @param index The zero-based index of the element to get. */ [index: number]: T; /** * Gets the element at the specified index. * @param index The zero-based index of the element to get. */ get(index: number): T /** * Determines whether the ReadOnlyCollection contains a specific value. * @param item The object to locate in the ReadOnlyCollection. */ contains(item: T): boolean /** * Searches for the specified object and returns the zero-based index of the first occurrence within the entire ReadOnlyCollection. * @param item The object to locate in the ReadOnlyCollection. */ indexOf(item: T): number } interface ReadOnlyCollectionConstructor { /** * Initializes a new instance of the ReadOnlyCollection class that is a read-only wrapper around the specified list. * @param list The list to wrap. */ new <T>(list: List<T>): ReadOnlyCollection<T> } /** * Represents a strongly typed list of objects that can be accessed by index. */ interface List<T> extends Collection<T> { /** * Gets the element at the specified index. * @param index The zero-based index of the element to get. */ [index: number]: T; /** * Adds an object to the end of the List. * @param item The object to be added to the end of the List. */ add(item: T): void /** * Adds the elements of the specified collection to the end of the List. * @param collection The collection whose elements should be added to the end of the List. */ addRange(collection: Iterable<T>): void /** * Returns a read-only wrapper for the current list. */ asReadOnly(): ReadOnlyCollection<T> /** * Searches the entire sorted List for an element using the default comparer and returns the zero-based index of the element. * Returns The zero-based index of item in the sorted List, if item is found; otherwise, a negative number * that is the bitwise complement of the index of the next element that is larger than item or, if there is no larger element, * the bitwise complement of List.count(). * @param item The object to locate. The value can be null for reference types. */ binarySearch(item: T): number /** * Searches the entire sorted List for an element using the specified comparer and returns the zero-based index of the element. * returns The zero-based index of item in the sorted List, if item is found; otherwise, a negative number * that is the bitwise complement of the index of the next element that is larger than item or, if there is no larger element, * the bitwise complement of List.count(). * @param item The object to locate. The value can be null for reference types. * @param comparer The Comparer implementation to use when comparing elements. */ binarySearch(item: T, comparer: Comparer<T>): number /** * Searches a range of elements in the sorted List for an element using the specified comparer and returns the zero-based index of the element. * returns The zero-based index of item in the sorted List, if item is found; otherwise, a negative number * that is the bitwise complement of the index of the next element that is larger than item or, if there is no larger element, * the bitwise complement of List.count(). * @param item The object to locate. The value can be null for reference types. * @param index The zero-based starting index of the range to search. * @param count The length of the range to search. * @param comparer The Comparer implementation to use when comparing elements. */ binarySearch(item: T, index: number, count: number, comparer: Comparer<T>): number /** * Removes all items from the List. */ clear(): void /** * Determines whether the List contains elements that match the conditions defined by the specified predicate. * @param match The predicate function that defines the conditions of the elements to search for. */ exists(match: (item: T) => boolean): boolean /** * Searches for an element that matches the conditions defined by the specified predicate, and returns the first occurrence within the entire List. * @param match The predicate function that defines the conditions of the elements to search for. */ find(match: (item: T) => boolean): T /** * Retrieves all the elements that match the conditions defined by the specified predicate. * @param match The predicate function that defines the conditions of the elements to search for. */ findAll(match: (item: T) => boolean): List<T> /** * Searches for an element that matches the conditions defined by the specified predicate, * and returns the zero-based index of the first occurrence within the entire List, if found; otherwise, –1. * @param match The predicate function that defines the conditions of the elements to search for. */ findIndex(match: (item: T) => boolean): number /** * Searches for an element that matches the conditions defined by the specified predicate, * and returns the zero-based index of the first occurrence within the range of elements * in the List that extends from the specified index to the last element, if found; otherwise, –1. * @param startIndex The zero-based starting index of the search. * @param match The predicate function that defines the conditions of the elements to search for. */ findIndex(startIndex: number, match: (item: T) => boolean): number /** * Searches for an element that matches the conditions defined by the specified predicate, * and returns the zero-based index of the first occurrence within the range of elements * in the List that starts at the specified index and contains the specified number of elements, if found; otherwise, –1. * @param startIndex The zero-based starting index of the search. * @param count The number of elements in the section to search. * @param match The predicate function that defines the conditions of the elements to search for. */ findIndex(startIndex: number, count: number, match: (item: T) => boolean): number /** * Searches for an element that matches the conditions defined by the specified predicate, * and returns the last occurrence within the entire List. * @param match The predicate function that defines the conditions of the elements to search for. */ findLast(match: (item: T) => boolean): T /** * Searches for an element that matches the conditions defined by the specified predicate, * and returns the zero-based index of the last occurrence within the entire List, if found; otherwise, –1. * @param match The predicate function that defines the conditions of the elements to search for. */ findLastIndex(match: (item: T) => boolean): number /** * Searches for an element that matches the conditions defined by the specified predicate, * and returns the zero-based index of the last occurrence within the range of elements * in the List that extends from the first element to the specified index, if found; otherwise, –1. * @param startIndex The zero-based starting index of the search. * @param match The predicate function that defines the conditions of the elements to search for. */ findLastIndex(startIndex: number, match: (item: T) => boolean): number /** * Searches for an element that matches the conditions defined by the specified predicate, * and returns the zero-based index of the last occurrence within the range of elements * in the List that contains the specified number of elements and ends at the specified index, if found; otherwise, –1. * @param startIndex The zero-based starting index of the search. * @param count The number of elements in the section to search. * @param match The predicate function that defines the conditions of the elements to search for. */ findLastIndex(startIndex: number, count: number, match: (item: T) => boolean): number /** * Performs the specified action on each element of the List. * @param action The action function to perform on each element of an List; the second parameter of the function represents the index of the source element. */ forEach(action: (item: T, index: number) => void): void /** * Gets the element at the specified index. * @param index The zero-based index of the element to get. */ get(index: number): T /** * Creates a shallow copy of a range of elements in the source List. * @param index The zero-based List index at which the range starts. * @param count The number of elements in the range. */ getRange(index: number, count: number): List<T> /** * Searches for the specified object and returns the zero-based index of the first occurrence within the entire List, if found; otherwise, –1. * @param item The object to locate in the List. */ indexOf(item: T): number /** * Searches for the specified object and returns the zero-based index of the first occurrence within * the range of elements in the List that extends from the specified index to the last element, if found; otherwise, –1. * @param item The object to locate in the List. * @param index The zero-based starting index of the search. 0 (zero) is valid in an empty list. */ indexOf(item: T, index: number): number /** * Inserts an element into the List at the specified index. * @param index The zero-based index at which item should be inserted. * @param item The object to insert. The value can be null for reference types. */ insert(index: number, item: T): void /** * Inserts the elements of a collection into the List at the specified index. * @param index The zero-based index at which item should be inserted. * @param collection The collection whose elements should be inserted into the List. */ insertRange(index: number, collection: Iterable<T>): void /** * Gets an Array wrapper around the List. */ items(): T[] /** * Searches for the specified object and returns the zero-based index of the last occurrence within the entire List, if found; otherwise, –1. * @param item The object to locate in the List. */ lastIndexOf(item: T): number /** * Searches for the specified object and returns the zero-based index of the last occurrence * within the range of elements in the List that extends from the specified index to the last element if found; otherwise, –1. * @param item The object to locate in the List. * @param index The zero-based starting index of the search. 0 (zero) is valid in an empty list. */ lastIndexOf(item: T, index: number): number /** * Removes the first occurrence of a specific object from the List. * @param item The object to remove from the List. */ remove(item: T): boolean /** * Removes all the elements that match the conditions defined by the specified predicate. * @param match The predicate function that defines the conditions of the elements to remove. */ removeAll(match: (item: T) => boolean): number /** * Removes the element at the specified index of the List. * @param index The zero-based index of the element to remove. */ removeAt(index: number): void /** * Removes a range of elements from the List. * @param index The zero-based index of the element to remove. * @param count The number of elements to remove. */ removeRange(index: number, count: number): void /** * Reverses the order of the elements in the entire List */ reverse(): any /** * Reverses the order of the elements in the entire List * @param index The zero-based starting index of the range to reverse. * @param count The number of elements in the range to reverse. */ reverse(index: number, count: number): void /** * Sets the element at the specified index. * @param index The zero-based index of the element to set. * @param item The object to be added at the specified index. */ set(index: number, value: T): void /** * Sorts the elements in the entire List using the default comparer. */ sort(): void /** * Sorts the elements in the entire List using the specified Comparison. * @param comparison The comparison function to use when comparing elements. */ sort(comparison: (x: T, y: T) => number): void /** * Sorts the elements in the entire List using the specified comparer. * @param comparer The Comparer implementation to use when comparing elements. */ sort(comparer: Comparer<T>): void /** * Sorts the elements in a range of elements in List using the specified comparer. * @param index The zero-based starting index of the range to sort. * @param count The length of the range to sort. * @param comparer The Comparer implementation to use when comparing elements. */ sort(index: number, count: number, comparer: Comparer<T>): void /** * Copies the elements of the List to a new array. */ toArray(): T[] /** * Determines whether every element in the List matches the conditions defined by the specified predicate. * @param match The Predicate function that defines the conditions to check against the elements. */ trueForAll(match: (item: T) => boolean): boolean } interface ListConstructor { /** * Initializes a new instance of the List class that is empty. */ new <T>(): List<T> /** * Initializes a new instance of the List class that is empty and has the specified initial capacity. * @param capacity The number of elements that the new list can initially store. */ new <T>(capacity: number): List<T> /** * Initializes a new instance of the List class that contains elements copied from the specified arguments * @param args Arbitrary number of arguments to copy to the new list. */ new <T>(...args: T[]): List<T> /** * Initializes a new instance of the List class that contains elements copied from the specified collection * and has sufficient capacity to accommodate the number of elements copied. * @param collection The collection whose elements are copied to the new list. */ new <T>(collection: Iterable<T>): List<T> } /** * Represents a collection of key/value pairs that are sorted by key based on the associated Comparer implementation. */ interface SortedList<TKey, TValue> extends Collection<KeyValuePair<TKey, TValue>> { /** * Adds an element with the specified key and value into the SortedList. * @param key The key of the element to add. * @param value The value of the element to add. The value can be null for reference types. */ add(key: TKey, value: TValue): void /** * Gets the value associated with the specified key. * @param key The key whose value to get. */ get(key: TKey): TValue /** * Gets or sets the number of elements that the SortedList can contain. * @param value The number of elements that the SortedList can contain. */ capacity(value?: number): number /** * Removes all elements from the SortedList. */ clear(): void /** * Gets the Comparer for the sorted list. */ comparer(): Comparer<TKey> /** * Determines whether the SortedList contains a specific key. * @param key The key to locate in the SortedList. */ containsKey(key: TKey): boolean /** * Determines whether the SortedList contains a specific value. * @param value The value to locate in the SortedList. */ containsValue(value: TValue): boolean /** * Gets a collection containing the keys in the SortedList, in sorted order. */ keys(): Collection<TKey> /** * Gets a collection containing the values in the SortedLis. */ values(): Collection<TValue> /** * Searches for the specified key and returns the zero-based index within the entire SortedList. * @param key The key to locate in the SortedList. */ indexOfKey(key: TKey): number /** * Searches for the specified value and returns the zero-based index of the first occurrence within the entire SortedList. * @param value The value to locate in the SortedList. */ indexOfValue(value: TValue): number /** * Removes the element with the specified key from the SortedList. * Returns true if the element is successfully removed; otherwise, false. This method also returns false if key was not found in the original SortedList. * @param key The key of the element to remove. */ remove(key: TKey): boolean /** * Removes the element at the specified index of the SortedList. * @param index The zero-based index of the element to remove. */ removeAt(index: number): void /** * Sets the value associated with the specified key. * @param key The key whose value to get or set. * @param value The value associated with the specified key. */ set(key: TKey, value: TValue): void /** * Sets the capacity to the actual number of elements in the SortedList, if that number is less than 90 percent of current capacity. */ trimExcess(): void /** * Gets the value associated with the specified key. * @param key The key whose value to get. * @param callback When this method returns, callback method is called with the value * associated with the specified key, if the key is found; otherwise, null for the type of the value parameter. */ tryGetValue(key: TKey, callback: (value: TValue) => void): boolean } interface SortedListConstructor { /** * Initializes a new instance of the SortedList class that is empty, * has the default initial capacity, and uses the default Comparer. */ new <TKey, TValue>(): SortedList<TKey, TValue> /** * Initializes a new instance of the SortedList class that contains elements copied from the specified Dictionary, * has sufficient capacity to accommodate the number of elements copied, and uses the default Comparer. * @param dictionary The Dictionary whose elements are copied to the new SortedList. */ new <TKey, TValue>(dictionary: Dictionary<TKey, TValue>): SortedList<TKey, TValue> /** * Initializes a new instance of the SortedList class that is empty, * has the default initial capacity, and uses the specified Comparer. * @param comparer The Comparer implementation to use when comparing keys.-or-null to use the default Comparer for the type of the key. */ new <TKey, TValue>(comparer: Comparer<TKey>): SortedList<TKey, TValue> /** * Initializes a new instance of the SortedList class that is empty, * has the specified initial capacity, and uses the default Comparer. * @param capacity The initial number of elements that the SortedList can contain. */ new <TKey, TValue>(capacity: number): SortedList<TKey, TValue> /** * Initializes a new instance of the SortedList class that contains elements copied from the specified Dictionary, * has sufficient capacity to accommodate the number of elements copied, and uses the specified Comparer. * @param dictionary The Dictionary whose elements are copied to the new SortedList. * @param comparer The Comparer implementation to use when comparing keys.-or-null to use the default Comparer for the type of the key. */ new <TKey, TValue>(dictionary: Dictionary<TKey, TValue>, comparer: Comparer<TKey>): SortedList<TKey, TValue> /** * Initializes a new instance of the SortedList class that is empty, * has the specified initial capacity, and uses the specified Comparer. * @param capacity The initial number of elements that the SortedList can contain. * @param comparer The Comparer implementation to use when comparing keys.-or-null to use the default Comparer for the type of the key. */ new <TKey, TValue>(capacity: number, comparer: Comparer<TKey>): SortedList<TKey, TValue> } /** * Defines a key/value pair that can be set or retrieved. */ interface KeyValuePair<TKey, TValue> { /** * Gets the key in the key/value pair. */ key: TKey; /** * Gets the value in the key/value pair. */ value: TValue; } interface KeyValuePairConstructor { /** * Initializes a new instance of the KeyValuePair with the specified key and value. * @param key The object defined in each key/value pair. * @param value The definition associated with key. */ new <TKey, TValue>(key: TKey, value: TValue): KeyValuePair<TKey, TValue> } /** * Represents a collection of keys and values. */ interface Dictionary<TKey, TValue> extends Collection<KeyValuePair<TKey, TValue>> { /** * Adds an element with the provided key and value to the Dictionary. * @param key The object to use as the key of the element to add. * @param value The object to use as the value of the element to add. */ add(key: TKey, value: TValue): void /** * Removes all keys and values from the Dictionary. */ clear(): void /** * Determines whether the Dictionary contains the specified key. * @param key The key to locate in the Dictionary. */ containsKey(key: TKey): boolean /** * Determines whether the Dictionary contains a specific value. * @param value The value to locate in the Dictionary. */ containsValue(value: TValue): boolean /** * Copies the Dictionary keys to an existing one-dimensional Array, starting at the specified array index. * @param array The one-dimensional Array that is the destination of the elements copied from Dictionary keys. * @param arrayIndex The zero-based index in array at which copying begins. */ copyTo(array: TKey[], arrayIndex: number): void copyTo(array: KeyValuePair<TKey, TValue>[], arrayIndex: number): void /** * Gets a Collection containing the keys of the Dictionary. */ keys(): Collection<TKey> /** * Gets a Collection containing the values in the Dictionary. */ values(): Collection<TValue> /** * Gets element with the specified key. * @param key The key of the element to get. */ get(key: TKey): TValue /** * Sets the element with the specified key. * @param key The key of the element to set. * @param value The object to use as the value of the element to set. */ set(key: TKey, value: TValue): void /** * Gets the value associated with the specified key. * @param key The key whose value to get. * @param callback When this method returns, callback method is called with the value * associated with the specified key, if the key is found; otherwise, null for the type of the value parameter. */ tryGetValue(key: TKey, callback: (value: TValue) => void): boolean /** * Removes the element with the specified key from the Dictionary. * @param key The key of the element to remove. */ remove(key: TKey): boolean } interface DictionaryConstructor { /** * Initializes a new instance of the Dictionary class that is empty, */ new <TKey, TValue>(): Dictionary<TKey, TValue> /** * Initializes a new instance of the Dictionary class that contains elements copied * from the specified Dictionary and uses the default equality comparer for the key type. * @param dictionary The Dictionary whose elements are copied to the new Dictionary. */ new <TKey, TValue>(dictionary: Dictionary<TKey, TValue>): Dictionary<TKey, TValue> /** * Initializes a new instance of the Dictionary class that is empty, and uses the specified EqualityComparer. * @param comparer The EqualityComparer implementation to use when comparing keys. */ new <TKey, TValue>(comparer: EqualityComparer<TKey>): Dictionary<TKey, TValue> /** * Initializes a new instance of the Dictionary class that is empty, has the specified initial capacity, and uses the default equality comparer for the key type. * @param capacity The initial number of elements that the Dictionary can contain. */ new <TKey, TValue>(capacity: number): Dictionary<TKey, TValue> /** * Initializes a new instance of the Dictionary that is empty, has the specified initial capacity, and uses the specified EqualityComparer. * @param capacity The initial number of elements that the Dictionary can contain. * @param comparer The EqualityComparer implementation to use when comparing keys. */ new <TKey, TValue>(capacity: number, comparer: EqualityComparer<TKey>): Dictionary<TKey, TValue> /** * Initializes a new instance of the Dictionary class that contains elements copied * from the specified Dictionary and uses the specified EqualityComparer. * @param dictionary The Dictionary whose elements are copied to the new Dictionary. * @param comparer The EqualityComparer implementation to use when comparing keys. */ new <TKey, TValue>(dictionary: Dictionary<TKey, TValue>, comparer: EqualityComparer<TKey>): Dictionary<TKey, TValue> } /** * Represents a set of values. */ interface HashSet<T> extends Collection<T> { /** * Adds an element to the current set. * @param item The element to add to the set. */ add(item: T): boolean /** * Removes all elements from a HashSet object. */ clear(): void /** * Copies the elements of a HashSet object to an array. * @param array The one-dimensional array that is the destination of the elements copied from the HashSet object. */ copyTo(array: T[]): void /** * Copies the elements of a HashSet object to an array. starting at the specified array index. * @param array The one-dimensional array that is the destination of the elements copied from the HashSet object. * @param arrayIndex The zero-based index in array at which copying begins. */ copyTo(array: T[], arrayIndex: number): void /** * Copies the elements of a HashSet object to an array. * @param array The one-dimensional array that is the destination of the elements copied from the HashSet object. * @param arrayIndex The zero-based index in array at which copying begins. * @param count The number of elements to copy to array. */ copyTo(array: T[], arrayIndex: number, count: number): void /** * Gets the EqualityComparer object that is used to determine equality for the values in the set. */ comparer(): EqualityComparer<T> /** * Removes the specified element from a HashSet object. * @param item The element to remove. */ remove(item: T): boolean /** * Removes all elements that match the conditions defined by the specified predicate from a HashSet collection. * @param match The predicate function that defines the conditions of the elements to remove. */ removeWhere(match: (item: T) => boolean): number /** * Removes all elements in the specified collection from the current set. * @param other The collection of items to remove from the set. */ exceptWith(other: Iterable<T>): void /** * Modifies the current set so that it contains only elements that are also in a specified collection. * @param other The collection to compare to the current set. */ intersectWith(other: Iterable<T>): void /** * Determines whether the current set is a proper (strict) subset of a specified collection. * @param other The collection to compare to the current set. */ isProperSubsetOf(other: Iterable<T>): boolean /** * Determines whether the current set is a proper (strict) superset of a specified collection. * @param other The collection to compare to the current set. */ isProperSupersetOf(other: Iterable<T>): boolean /** * Determines whether a set is a subset of a specified collection. * @param other The collection to compare to the current set. */ isSubsetOf(other: Iterable<T>): boolean /** * Determines whether the current set is a superset of a specified collection. * @param other The collection to compare to the current set. */ isSupersetOf(other: Iterable<T>): boolean /** * Determines whether the current set overlaps with the specified collection. * @param other The collection to compare to the current set. */ overlaps(other: Iterable<T>): boolean /** * Determines whether the current set and the specified collection contain the same elements. * @param other The collection to compare to the current set. */ setEquals(other: Iterable<T>): boolean /** * Modifies the current set so that it contains only elements that are present * either in the current set or in the specified collection, but not both. * @param other The collection to compare to the current set. */ symmetricExceptWith(other: Iterable<T>): void /** * Modifies the current set so that it contains all elements that are present * in either the current set or the specified collection. * @param other The collection to compare to the current set. */ unionWith(other: Iterable<T>): void } interface HashSetConstructor { /** * Initializes a new instance of the HashSet class that is empty and uses the default equality comparer for the set type. */ new <T>(): HashSet<T> /** * Initializes a new instance of the HashSet class that uses the default equality comparer for the set type, * and contains elements copied from the specified collection. * @param collection The collection whose elements are copied to the new set. */ new <T>(collection: Iterable<T>): HashSet<T> /** * Initializes a new instance of the HashSet class that is empty and uses the specified equality comparer for the set type. * @param comparer The EqualityComparer implementation to use when comparing values in the set. */ new <T>(comparer: EqualityComparer<T>): HashSet<T> /** * Initializes a new instance of the HashSet class that uses the specified equality comparer for the set type, * contains elements copied from the specified collection, and uses the specified equality comparer for the set type. * @param collection The collection whose elements are copied to the new set. * @param comparer The EqualityComparer implementation to use when comparing values in the set. */ new <T>(collection: Iterable<T>, comparer: EqualityComparer<T>): HashSet<T> } /** * Represents a node in a LinkedList. */ interface LinkedListNode<T> { /** * Gets the value contained in the node. */ value(): T /** * Gets the LinkedList that the LinkedListNode belongs to. */ list(): LinkedList<T> /** * Gets the next node in the LinkedList. */ next(): LinkedListNode<T> /** * Gets the previous node in the LinkedList. */ previous(): LinkedListNode<T> } interface LinkedListNodeConstructor { /** * Initializes a new instance of the LinkedListNode class, containing the specified value. * @param value The value to contain in the LinkedListNode */ new <T>(value: T): LinkedListNode<T> } /** * Represents a doubly linked list. */ interface LinkedList<T> extends Collection<T> { /** * Adds an item to the LinkedList. * @param item The object to add to the LinkedList. */ add(item: T): void /** * Removes all nodes from the LinkedList. */ clear(): void /** * Determines whether a value is in the LinkedList. * @param value The value to locate in the LinkedList. The value can be null for reference types. */ contains(item: T): boolean /** * Gets the first node of the LinkedList. */ getFirst(): LinkedListNode<T> /** * Gets the last node of the LinkedList. */ getLast(): LinkedListNode<T> /** * Adds the specified new node after the specified existing node in the LinkedList and returns the new LinkedListNode. * @param node The LinkedListNode after which to insert newNode. * @param newNode The new LinkedListNode to add to the LinkedList. */ addAfter(node: LinkedListNode<T>, newNode: LinkedListNode<T>): LinkedListNode<T> /** * Adds the specified new node after the specified existing node in the LinkedList. * returns The new LinkedListNode containing value. * @param node The LinkedListNode after which to insert newNode. * @param value The value to add to the LinkedList. */ addAfter(node: LinkedListNode<T>, value: T): LinkedListNode<T> /** * Adds the specified new node before the specified existing node in the LinkedList. * returns The new LinkedListNode. * @param node The LinkedListNode before which to insert newNode. * @param newNode The new LinkedListNode to add to the LinkedList. */ addBefore(node: LinkedListNode<T>, newNode: LinkedListNode<T>): LinkedListNode<T> /** * Adds the specified new node before the specified existing node in the LinkedList. * returns The new LinkedListNode containing value. * @param node The LinkedListNode before which to insert newNode. * @param value The value to add to the LinkedList. */ addBefore(node: LinkedListNode<T>, value: T): LinkedListNode<T> /** * Adds the specified new node at the start of the LinkedList. * returns The new LinkedListNode. * @param node The new LinkedListNode to add at the start of the LinkedList. */ addFirst(node: LinkedListNode<T>): LinkedListNode<T> /** * Adds the specified new node at the start of the LinkedList. * returns The new LinkedListNode containing value. * @param value The value to add at the start of the LinkedList. */ addFirst(value: T): LinkedListNode<T> /** * Adds the specified new node at the end of the LinkedList. * returns The new LinkedListNode. * @param node The new LinkedListNode to add at the end of the LinkedList. */ addLast(node: LinkedListNode<T>): LinkedListNode<T> /** * Adds the specified new node at the end of the LinkedList. * returns The new LinkedListNode containing value. * @param value The value to add at the end of the LinkedList. */ addLast(value: T): LinkedListNode<T> /** * Finds the first node that contains the specified value. * @param value The value to locate in the LinkedList. */ find(value: T): LinkedListNode<T> /** * Finds the last node that contains the specified value. * @param value The value to locate in the LinkedList. */ findLast(value: T): LinkedListNode<T> /** * Removes the node at the start of the LinkedList. * returns true if the node is successfully removed; otherwise, false. * This method also returns false if value was not found in the original LinkedList. * @param node */ remove(node: LinkedListNode<T>): boolean /** * Removes the first occurrence of the specified value from the LinkedList. * returns true if the element containing value is successfully removed; otherwise, false. * This method also returns false if value was not found in the original LinkedList. * @param value The value to remove from the LinkedList. */ remove(value: T): boolean /** * Removes the node at the start of the LinkedList. */ removeFirst(): void /** * Removes the node at the end of the LinkedList. */ removeLast(): void } interface LinkedListConstructor { /** * Initializes a new instance of the LinkedList class that is empty. */ new <T>(): LinkedList<T> /** * Initializes a new instance of the LinkedList class that contains elements copied from the specified Enumerable. * @param collection The collection to copy elements from. */ new <T>(collection: Iterable<T>): LinkedList<T> } /** * Represents a first-in, first-out collection of objects. */ interface Queue<T> extends Collection<T> { /** * Removes all objects from the Queue. */ clear(): void /** * Determines whether an element is in the Queue. * @param item The object to locate in the Queue. */ contains(item: T): boolean /** * Removes and returns the object at the beginning of the Queue. */ dequeue(): T /** * Adds an object to the end of the Queue. * @param item The object to add to the Queue. */ enqueue(item: T): void /** * Returns the object at the beginning of the Queue without removing it. */ peek(): T /** * Copies the Queue to a new array. */ toArray(): T[] } interface QueueConstructor { /** * Initializes a new instance of the Queue class that is empty. */ new <T>(): Queue<T> /** * Initializes a new instance of the Queue class that contains elements copied from the specified collection. * @param collection The collection to copy elements from. */ new <T>(collection: Iterable<T>): Queue<T> } /** * Represents a variable size last-in-first-out (LIFO) collection of instances of the same arbitrary type. */ interface Stack<T> extends Collection<T> { /** * Removes all objects from the Stack. */ clear(): void /** * Determines whether an element is in the Stack. * @param item The object to locate in the Stack. */ contains(item: T): boolean /** * Returns the object at the top of the Stack without removing it. */ peek(): T /** * Removes and returns the object at the top of the Stack. */ pop(): T /** * Inserts an object at the top of the Stack. * @param item The object to push onto the Stack. */ push(item: T): void /** * Copies the Stack to a new array. */ toArray(): T[] } interface StackConstructor { /** * Initializes a new instance of the Stack class that is empty. */ new <T>(): Stack<T> /** * Initializes a new instance of the Stack class that contains elements copied from the specified collection. * @param collection The collection to copy elements from. */ new <T>(collection: Iterable<T>): Stack<T> } /** * Defines a data structures that map keys to Enumerable sequences of values. */ interface Lookup<TKey, TElement> extends Collection<Grouping<TKey, TElement>> { /** * Determines whether a specified key exists in the Lookup. * @param key The key to search for in the Lookup. */ contains(key: TKey): boolean contains(item: Grouping<TKey, TElement>): boolean /** * Gets the value associated with the specified key. * @param key The key of the element to add. */ get(key: TKey): Enumerable<TElement> } /** * Represents a collection of objects that have a common key. */ interface Grouping<TKey, TElement> extends Collection<TElement> { /** * Gets the key of the Grouping. */ key: TKey } /** * Exposes the enumerator, which supports a simple iteration over a collection of a specified type. */ interface OrderedEnumerable<TElement> extends Enumerable<TElement> { /** * Performs a subsequent ordering on the elements of an OrderedEnumerable<TElement> according to a key. * @param keySelector The selector used to extract the key for each element. * @param comparer The Comparer used to compare keys for placement in the returned sequence. * @param descending true to sort the elements in descending order; false to sort the elements in ascending order. */ createOrderedEnumerable<TKey>(keySelector: (item: TElement) => TKey, comparer: Comparer<TKey>, descending: boolean): OrderedEnumerable<TElement> /** * Performs a subsequent ordering of the elements in a sequence in descending order, according to a key. * Returns an OrderedEnumerable whose elements are sorted in descending order according to a key. * @param keySelector A function to extract a key from each element. */ thenBy<TKey>(keySelector: (item: TElement) => TKey): OrderedEnumerable<TElement> /** * Performs a subsequent ordering of the elements in a sequence in ascending order by using a specified comparer. * Returns an OrderedEnumerable whose elements are sorted according to a key. * @param keySelector A function to extract a key from each element. * @param comparer A Comparer to compare keys. */ thenBy<TKey>(keySelector: (item: TElement) => TKey, comparer: Comparer<TKey>): OrderedEnumerable<TElement> /** * Performs a subsequent ordering of the elements in a sequence in descending order, according to a key. * Returns an OrderedEnumerable whose elements are sorted in descending order according to a key. * @param keySelector A function to extract a key from each element. */ thenByDescending<TKey>(keySelector: (item: TElement) => TKey): OrderedEnumerable<TElement> /** * Performs a subsequent ordering of the elements in a sequence in descending order, according to a key. * Returns an OrderedEnumerable whose elements are sorted in descending order according to a key. * @param keySelector A function to extract a key from each element. * @param comparer A Comparer to compare keys. */ thenByDescending<TKey>(keySelector: (item: TElement) => TKey, comparer: Comparer<TKey>): OrderedEnumerable<TElement> } /** * Defines Enumerable extention methods applied on Enumerable */ interface Enumerable<T> { /** * Applies an accumulator function over a sequence. * Returns the final accumulator value. * @param func An accumulator function to be invoked on each element. */ aggregate(func: (accumulate: T, item: T) => T): T /** * Applies an accumulator function over a sequence. The specified seed value is used as the initial accumulator value. * Returns the final accumulator value. * @param seed The initial accumulator value. * @param func An accumulator function to be invoked on each element. */ aggregate<TAccumulate>(seed: TAccumulate, func: (accumulate: TAccumulate, item: T) => TAccumulate): TAccumulate /** * Applies an accumulator function over a sequence. The specified seed value is used as the initial accumulator value, * and the specified function is used to select the result value. * Returns the final accumulator value. * @param seed The initial accumulator value. * @param func An accumulator function to be invoked on each element. * @param resultSelector A function to transform the final accumulator value into the result value. */ aggregate<TAccumulate, TResult>(seed: TAccumulate, func: (accumulate: TAccumulate, item: T) => TAccumulate, resultSelector: (accumulate: TAccumulate) => TResult): TResult; /** * Determines whether all elements of a sequence satisfy a condition. * Returns true if every element of the source sequence passes the test in the specified predicate, or if the sequence is empty; otherwise, false. * @param predicate A function to test each element for a condition. */ all(predicate: (item: T) => boolean): boolean /** * Determines whether a sequence contains any elements. * Returns true if the source sequence contains any elements; otherwise, false. */ any(): boolean /** * Determines whether any element of a sequence satisfies a condition. * Returns true if any elements in the source sequence pass the test in the specified predicate; otherwise, false. * @param predicate A function to test each element for a condition. */ any(predicate: (item: T) => boolean): boolean /** * Returns the input typed as Enumerable. */ asEnumerable(): Enumerable<T> /** * Computes the average of a sequence of numeric values. */ average(): number /** * Computes the average of a sequence of numeric values that are obtained by invoking a transform function on each element of the input sequence. * @param selector A transform function to apply to each element. */ average(selector: (item: number) => number): number /** * Concatenates two sequences. * @param second The sequence to concatenate to the first sequence. */ concat(second: Iterable<T>): Enumerable<T> /** * Determines whether a sequence contains a specified element by using the default equality comparer. * @param value The value to locate in the sequence. */ contains(value: T): boolean /** * Returns the last element of a sequence that satisfies a specified condition. * @param value The value to locate in the sequence. * @param comparer An equality comparer to compare values. */ contains(value: T, comparer: EqualityComparer<T>): boolean /** * Returns the number of elements in a sequence. */ count(): number /** * Returns a number that represents how many elements in the specified sequence satisfy a condition. * @param predicate A function to test each element for a condition. */ count(predicate: (item: T) => boolean): number /** * Returns the elements of the specified sequence or null if the sequence is empty. */ defaultIfEmpty(): Enumerable<T> /** * Returns the elements of the specified sequence or the specified value in a singleton collection if the sequence is empty. * @param defaultValue The value to return if the sequence is empty. */ defaultIfEmpty(defaultValue: T): Enumerable<T> /** * Returns distinct elements from a sequence by using the default equality comparer to compare values. */ distinct(): Enumerable<T> /** * Produces the set difference of two sequences by using the EqualityComparer to compare values. * @param comparer An EqualityComparer to compare values. */ distinct(comparer: EqualityComparer<T>): Enumerable<T> /** * Produces the set difference of two sequences by using the default equality comparer to compare values. * @param second An Iterable whose elements that also occur in the first sequence will cause those elements to be removed from the returned sequence. */ except(second: Iterable<T>): Enumerable<T> /** * Produces the set difference of two sequences by using the specified EqualityComparer to compare values. * @param second An Iterable whose elements that also occur in the first sequence will cause those elements to be removed from the returned sequence. * @param comparer An EqualityComparer to compare values. */ except(second: Iterable<T>, comparer: EqualityComparer<T>): Enumerable<T> /** * Returns the element at a specified index in a sequence. Throws an error if the index is less than 0 or greater than or equal to the number of elements in source. * @param index The zero-based index of the element to retrieve. */ elementAt(index: number): T /** * Returns the first element of a sequence. this method throws an exception if there is no element in the sequence. */ first(): T /** * Returns the first element in a sequence that satisfies a specified condition. this method throws an exception if there is no element in the sequence. * @param predicate A function to test each source element for a condition; the second parameter of the function represents the index of the source element. */ first(predicate: (item: T) => boolean): T /** * Returns the first element of a sequence, or null if the sequence contains no elements. */ firstOrDefault(): T /** * Returns the first element of the sequence that satisfies a condition or null if no such element is found. * @param predicate A function to test each source element for a condition; the second parameter of the function represents the index of the source element. */ firstOrDefault(predicate: (item: T) => boolean): T /** * Returns the first element of the sequence that satisfies a condition or a default value if no such element is found. * @param predicate A function to test each source element for a condition; the second parameter of the function represents the index of the source element. * @param defaultValue The value to return if no element exists with specified condition. */ firstOrDefault(predicate: (item: T) => boolean, defaultValue: T): T /** * Performs the specified action on each element of an Enumerable. * @param action The action function to perform on each element of an Enumerable; the second parameter of the function represents the index of the source element. */ forEach(action: (item: T, index: number) => void): void /** * Groups the elements of a sequence according to a specified key selector function. * @param keySelector A function to extract the key for each element. */ groupBy<TKey>(keySelector: (item: T) => TKey): Enumerable<Grouping<TKey, T>>; /** * Groups the elements of a sequence according to a specified key selector function. * @param keySelector A function to extract the key for each element. * @param comparer An equality comparer to compare values. */ groupBy<TKey>(keySelector: (item: T) => TKey, comparer: EqualityComparer<TKey>): Enumerable<Grouping<TKey, T>>; /** * Groups the elements of a sequence according to a specified key selector function and projects the elements for each group by using a specified function. * @param keySelector A function to extract the key for each element. * @param elementSelector A function to map each source element to an element in the Grouping. */ groupBy<TKey, TElement>(keySelector: (item: T) => TKey, elementSelector: (item: T) => TElement): Enumerable<Grouping<TKey, TElement>>; /** * Groups the elements of a sequence according to a key selector function. * The keys are compared by using a comparer and each group's elements are projected by using a specified function. * @param keySelector A function to extract the key for each element. * @param elementSelector A function to map each source element to an element in the Grouping. * @param comparer An equality comparer to compare values. */ groupBy<TKey, TElement>(keySelector: (item: T) => TKey, elementSelector: (item: T) => TElement, comparer: EqualityComparer<TKey>): Enumerable<Grouping<TKey, TElement>>; /** * Groups the elements of a sequence according to a specified key selector function and projects the elements for each group by using a specified function. * @param keySelector A function to extract the key for each element. * @param elementSelector A function to map each source element to an element in the Grouping. * @param resultSelector A function to extract the key for each element. */ groupBy<TKey, TElement, TResult>(keySelector: (item: T) => TKey, elementSelector: (item: T) => TElement, resultSelector: (key: TKey, elements: Iterable<TElement>) => TResult): Enumerable<TResult>; /** * Groups the elements of a sequence according to a key selector function. * The keys are compared by using a comparer and each group's elements are projected by using a specified function. * @param keySelector A function to extract the key for each element. * @param elementSelector A function to map each source element to an element in the Grouping. * @param resultSelector A function to extract the key for each element. * @param comparer An equality comparer to compare values. */ groupBy<TKey, TElement, TResult>(keySelector: (item: T) => TKey, elementSelector: (item: T) => TElement, resultSelector: (key: TKey, elements: Iterable<TElement>) => TResult, comparer: EqualityComparer<TKey>): Enumerable<TResult>; /** * Correlates the elements of two sequences based on equality of keys and groups the results. The default equality comparer is used to compare keys. * @param inner The sequence to join to the current sequence. * @param outerKeySelector A function to extract the join key from each element of the first sequence. * @param innerKeySelector A function to extract the join key from each element of the second sequence. * @param resultSelector A function to create a result element from an element from the first sequence and a collection of matching elements from the second sequence. */ groupJoin<TInner, TKey, TResult>(inner: Iterable<TInner>, outerKeySelector: (item: T) => TKey, innerKeySelector: (item: TInner) => TKey, resultSelector: (outer: T, inner: Enumerable<TInner>) => TResult): Enumerable<TResult>; /** * Correlates the elements of two sequences based on key equality and groups the results. A specified EqualityComparer is used to compare keys. * @param inner The sequence to join to the current sequence. * @param outerKeySelector A function to extract the join key from each element of the first sequence. * @param innerKeySelector A function to extract the join key from each element of the second sequence. * @param resultSelector A function to create a result element from an element from the first sequence and a collection of matching elements from the second sequence. * @param comparer An equality comparer to compare values. */ groupJoin<TInner, TKey, TResult>(inner: Iterable<TInner>, outerKeySelector: (item: T) => TKey, innerKeySelector: (item: TInner) => TKey, resultSelector: (outer: T, inner: Enumerable<TInner>) => TResult, comparer: EqualityComparer<TKey>): Enumerable<TResult>; /** * Produces the set intersection of two sequences by using the default equality comparer to compare values. * @param second An Iterable whose distinct elements that also appear in the first sequence will be returned. */ intersect(second: Iterable<T>): Enumerable<T>; /** * Produces the set intersection of two sequences by using the default equality comparer to compare values. * @param second An Iterable whose distinct elements that also appear in the first sequence will be returned. * @param comparer An EqualityComparer to compare values. */ intersect(second: Iterable<T>, comparer: EqualityComparer<T>): Enumerable<T>; /** * Correlates the elements of two sequences based on matching keys. The default equality comparer is used to compare keys. * @param inner The sequence to join to the current sequence. * @param outerKeySelector A function to extract the join key from each element of the first sequence. * @param innerKeySelector A function to extract the join key from each element of the second sequence. * @param resultSelector A function to create a result element from an element from the first sequence and a collection of matching elements from the second sequence. */ join<TInner, TKey, TResult>(inner: Iterable<TInner>, outerKeySelector: (item: T) => TKey, innerKeySelector: (item: TInner) => TKey, resultSelector: (outer: T, inner: TInner) => TResult): Enumerable<TResult>; /** * Correlates the elements of two sequences based on matching keys. A specified EqualityComparer is used to compare keys. * @param inner The sequence to join to the current sequence. * @param outerKeySelector A function to extract the join key from each element of the first sequence. * @param innerKeySelector A function to extract the join key from each element of the second sequence. * @param resultSelector A function to create a result element from an element from the first sequence and a collection of matching elements from the second sequence. * @param comparer An equality comparer to compare values. */ join<TInner, TKey, TResult>(inner: Iterable<TInner>, outerKeySelector: (item: T) => TKey, innerKeySelector: (item: TInner) => TKey, resultSelector: (outer: T, inner: TInner) => TResult, comparer: EqualityComparer<TKey>): Enumerable<TResult>; /** * Returns the last element of a sequence. */ last(): T /** * Returns the last element of a sequence that satisfies a specified condition. * @param predicate A function to test each source element for a condition. */ last(predicate: (item: T) => boolean): T /** * Returns the first element of a sequence, or null if the sequence contains no elements. */ lastOrDefault(): T /** * Returns the last element of a sequence, or null if the sequence contains no elements. * @param predicate A function to test each source element for a condition. */ lastOrDefault(predicate: (item: T) => boolean): T /** * Returns the last element of a sequence that satisfies a condition or null if no such element is found. * @param predicate A function to test each source element for a condition. * @param defaultValue The value to return if no element exists with specified condition. */ lastOrDefault(predicate: (item: T) => boolean, defaultValue: T): T /** * Returns the maximum value in a sequence of values. */ max(): T /** * Invokes a transform function on each element of a sequence and returns the maximum value. * @param selector A transform function to apply to each element. */ max<TResult>(selector: (item: T) => TResult): TResult /** * Returns the minimum value in a sequence of values. */ min(): T /** * Invokes a transform function on each element of a sequence and returns the minimum value. * @param selector A transform function to apply to each element. */ min<TResult>(selector: (item: T) => TResult): TResult /** * Filters the elements of an Enumerable based on a specified type. * @param type The type to filter the elements of the sequence on. */ ofType<TResult>(type: { new (...args: any[]): TResult }): Enumerable<TResult> /** * Sorts the elements of a sequence in ascending order by using a specified comparer. * @param keySelector A function to extract a key from each element. */ orderBy<TKey>(keySelector: (item: T) => TKey): OrderedEnumerable<T> /** * Sorts the elements of a sequence in ascending order by using a specified comparer. * Returns an OrderedEnumerable whose elements are sorted according to a key. * @param keySelector A function to extract a key from each element. * @param comparer A Comparer to compare keys. */ orderBy<TKey>(keySelector: (item: T) => TKey, comparer: EqualityComparer<TKey>): OrderedEnumerable<T> /** * Sorts the elements of a sequence in descending order by using a specified comparer. * Returns an OrderedEnumerable whose elements are sorted according to a key. * Returns an OrderedEnumerable whose elements are sorted in descending order according to a key. * @param keySelector A function to extract a key from each element. */ orderByDescending<TKey>(keySelector: (item: T) => TKey): OrderedEnumerable<T> /** * Sorts the elements of a sequence in descending order by using a specified comparer. * Returns an OrderedEnumerable whose elements are sorted in descending order according to a key. * @param keySelector A function to extract a key from each element. * @param comparer A Comparer to compare keys. */ orderByDescending<TKey>(keySelector: (item: T) => TKey, comparer: EqualityComparer<TKey>): OrderedEnumerable<T> /** * Inverts the order of the elements in a sequence. */ reverse(): Enumerable<T> /** * Determines whether two sequences are equal by comparing the elements by using the default equality comparer for their type. * @param second An Iterable to compare to the first sequence. */ sequenceEqual(second: Iterable<T>): boolean /** * Determines whether two sequences are equal by comparing their elements by using a specified EqualityComparer. * @param second An Iterable to compare to the first sequence. * @param comparer The EqualityComparer to compare values. */ sequenceEqual(second: Iterable<T>, comparer: EqualityComparer<T>): boolean /** * Projects each element of a sequence into a new form. May incorporate the element's index. * @param selector A transform function to apply to each source element; the second parameter of the function represents the index of the source element. */ select<TResult>(selector: (item: T, index: number) => TResult): Enumerable<TResult>; /** * Projects each element of a sequence to an Enumerable and flattens the resulting sequences into one sequence. The index of each source element is used in the projected form of that element. * @param collectionSelector A transform function to apply to each source element; the second parameter of the function represents the index of the source element. * @param resultSelector A transform function to apply to each element of the intermediate sequence. */ selectMany<TCollection, TResult>(collectionSelector: (item: T, index: number) => Iterable<TCollection>, resultSelector?: (item: T, collection: TCollection) => TResult): Enumerable<TResult>; /** * Returns the only element of a sequence, and throws an exception if there is not exactly one element in the sequence. */ single(): T /** * Returns the only element of a sequence that satisfies a specified condition, and throws an exception if more than one such element exists. * @param predicate A function to test each source element for a condition. */ single(predicate: (item: T) => boolean): T /** * Returns the only element of a sequence, or a null if the sequence is empty; this method throws an exception if there is more than one element in the sequence. */ singleOrDefault(): T /** * Returns the only element of a sequence that satisfies a specified condition or a null if no such element exists; this method throws an exception if more than one element satisfies the condition. * @param predicate A function to test each source element for a condition. */ singleOrDefault(predicate: (item: T) => boolean): T /** * Returns the only element of a sequence that satisfies a specified condition or a default value if no such element exists; this method throws an exception if more than one element satisfies the condition. * @param predicate A function to test each source element for a condition. * @param defaultValue The value to return if no element exists with specified condition. */ singleOrDefault(predicate: (item: T) => boolean, defaultValue: T): T /** * Bypasses a specified number of elements in a sequence and then returns the remaining elements. * @param count The number of elements to skip before returning the remaining elements. */ skip(count: number): Enumerable<T> /** * Bypasses elements in a sequence as long as a specified condition is true and then returns the remaining elements. * @param predicate A function to test each source element for a condition. */ skipWhile(predicate: (item: T) => boolean): Enumerable<T> /** * Bypasses elements in a sequence as long as a specified condition is true and then returns the remaining elements. The element's index is used in the logic of the predicate function. * @param predicate A function to test each source element for a condition; the second parameter of the function represents the index of the source element. */ skipWhile(predicate: (item: T, index: number) => boolean): Enumerable<T> /** * Computes the sum of a sequence of values. */ sum(): number /** * Computes the sum of the sequence of values that are obtained by invoking a transform function on each element of the input sequence. * @param selector A transform function to apply to each element. */ sum(selector: (item: T) => number): number /** * Returns a specified number of contiguous elements from the start of a sequence. * @param count The number of elements to return. */ take(count: number): Enumerable<T> /** * Returns elements from a sequence as long as a specified condition is true. * @param predicate A function to test each source element for a condition. */ takeWhile(predicate: (item: T) => boolean): Enumerable<T> /** * Returns elements from a sequence as long as a specified condition is true. The element's index is used in the logic of the predicate function. * @param predicate A function to test each source element for a condition; the second parameter of the function represents the index of the source element. */ takeWhile(predicate: (item: T, index: number) => boolean): Enumerable<T> /** * Creates an array from an Enumerable. */ toArray(): T[] /** * Creates a Dictionary from an Enumerable according to a specified key selector function. * @param keySelector A function to extract a key from each element. */ toDictionary<TKey>(keySelector: (item: T) => TKey): Dictionary<TKey, T>; /** * Creates a Dictionary from an Enumerable according to specified key selector and comparer. * @param keySelector A function to extract a key from each element. * @param comparer An equality comparer to compare values. */ toDictionary<TKey>(keySelector: (item: T) => TKey, comparer: EqualityComparer<TKey>): Dictionary<TKey, T>; /** * Creates a Dictionary from an Enumerable according to specified key selector and element selector functions. * @param keySelector A function to extract a key from each element. * @param elementSelector A transform function to produce a result element value from each element. */ toDictionary<TKey, TElement>(keySelector: (item: T) => TKey, elementSelector: (item: T) => TElement): Dictionary<TKey, TElement>; /** * Creates a Dictionary from an Enumerable according to a specified key selector function, a comparer, and an element selector function. * @param keySelector A function to extract a key from each element. * @param elementSelector A transform function to produce a result element value from each element. * @param comparer An equality comparer to compare values. */ toDictionary<TKey, TElement>(keySelector: (item: T) => TKey, elementSelector: (item: T) => TElement, comparer: EqualityComparer<TKey>): Dictionary<TKey, TElement>; /** * Creates a List from an Enumerable. */ toList(): List<T> /** * Creates a Lookup from an Enumerable according to a specified key selector function. * @param keySelector A function to extract a key from each element. */ toLookup<TKey>(keySelector: (item: T) => TKey): Lookup<TKey, T>; /** * Creates a Lookup from an Enumerable according to a specified key selector function and comparer. * @param keySelector A function to extract a key from each element. * @param comparer An equality comparer to compare values. */ toLookup<TKey>(keySelector: (item: T) => TKey, comparer: EqualityComparer<TKey>): Lookup<TKey, T>; /** * Creates a Lookup from an Enumerable according to specified key selector and element selector functions. * @param keySelector A function to extract a key from each element. * @param elementSelector A transform function to produce a result element value from each element. */ toLookup<TKey, TElement>(keySelector: (item: T) => TKey, elementSelector: (item: T) => TElement): Lookup<TKey, TElement>; /** * Creates a Lookup from an Enumerable according to a specified key selector function, a comparer and an element selector function. * @param keySelector A function to extract a key from each element. * @param elementSelector A transform function to produce a result element value from each element. * @param comparer An equality comparer to compare values. */ toLookup<TKey, TElement>(keySelector: (item: T) => TKey, elementSelector: (item: T) => TElement, comparer: EqualityComparer<TKey>): Lookup<TKey, TElement>; /** * Produces the set union of two sequences by using the default equality comparer. * @param second An Iterable whose distinct elements form the second set for the union. */ union(second: Iterable<T>): Enumerable<T> /** * Produces the set union of two sequences by using a specified EqualityComparer. * @param second An Iterable whose distinct elements form the second set for the union. * @param comparer The EqualityComparer to compare values. */ union(second: Iterable<T>, comparer: EqualityComparer<T>): Enumerable<T> /** * Filters a sequence of values based on a predicate. * @param predicate A function to test each source element for a condition. */ where(predicate: (item: T) => boolean): Enumerable<T>; /** * Filters a sequence of values based on a predicate. Each element's index is used in the logic of the predicate function. * @param predicate A function to test each source element for a condition; the second parameter of the function represents the index of the source element. */ where(predicate: (item: T, index: number) => boolean): Enumerable<T>; /** * Merges two sequences by using the specified predicate function. * @param second The second sequence to merge. * @param resultSelector A function that specifies how to merge the elements from the two sequences. */ zip<TSecond, TResult>(second: Iterable<TSecond>, resultSelector: (first: T, second: TSecond) => TResult): Enumerable<TResult>; } /** * Represents Array-like objects which has the "length" property and indexed properties access, eg. jQuery */ interface ArrayLike<T> { length: number; [n: number]: T; } /** * Provides 'hash' and 'equals' functions for a particular type, suitable for use in hashing algorithms and data structures such as a hash table. */ interface RuntimeComparer { /** * Serves as a hash function for a particular type. */ __hash__(): number; /** * Determines whether the specified Object is equal to the current Object. */ __equals__(obj: any): boolean; } /** * Provides a set of static methods that provide support for internal operations. */ interface MultiplexRuntime { /** * Serves as a hash function for a particular type, suitable for use in hashing algorithms and data structures such as a hash table. * @param obj An object to retrieve the hash code for. */ hash(obj: any): number; /** * Determines whether the specified object instances are considered equal. * @param objA The first object to compare. * @param objB The second object to compare. */ equals(objA: any, objB: any): boolean; /** * Performs a comparison of two objects of the same type and returns a value indicating whether one object is less than, equal to, or greater than the other. * @param objA The first object to compare. * @param objB The second object to compare. */ compare<T>(objA: T, objB: T): number; /** * Creates A function expression from the specified string lambda expression * @param exp String lambda expression. */ lambda<T, TResult>(exp: string): (obj: T) => TResult; /** * Creates A function expression from the specified string lambda expression * @param exp String lambda expression. */ lambda<T1, T2, TResult>(exp: string): (obj1: T1, obj2: T2) => TResult; /** * Creates A function expression from the specified string lambda expression * @param exp String lambda expression. */ lambda<T1, T2, T3, TResult>(exp: string): (obj1: T1, obj2: T2, obj3: T3) => TResult; /** * Creates A function expression from the specified string lambda expression * @param exp String lambda expression. */ lambda<TResult>(exp: string): (...args: any[]) => TResult; /** * Defines new or modifies existing properties directly on the specified object, returning the object. * @param obj The object on which to define or modify properties. * @param prop The name of the property to be defined or modified. * @param attributes The descriptor for the property being defined or modified. */ define<T>(obj: T, prop: String, attributes: PropertyDescriptor): T; /** * Extends the given object by implementing supplied members. * @param obj The object on which to define or modify properties. * @param properties Represetnts the mixin source object * @param attributes The descriptor for the property being defined or modified. */ mixin<T>(obj: T, properties: Object, attributes?: PropertyDescriptor): T; } /** * Defines MultiplexStatic module members */ interface MultiplexStatic { /* Factory Methods --------------------------------------------------------------------------*/ /** * Exposes the enumerator, which supports an iteration over the specified Enumerable object. * @param obj An Iterable object. eg. Enumerable, Array, String, Set, Map, Iterable & Generators */ <T>(obj: Iterable<T>): Enumerable<T> /** * Defines an enumerator, which supports an iteration over the specified Generator function. * @param factory An Enumerator factory function. */ <T>(factory: () => Enumerator<T>): Enumerable<T> /** * Defines an enumerator, which supports an iteration over the items of the specified Array-like object. * An Array-like object is an object which has the "length" property and indexed properties access, eg. jQuery * @param obj An Array-like object. */ <T>(obj: ArrayLike<T>): Enumerable<T> /** * Defines an enumerator, which supports an iteration over the arguments local variable available within all functions. * @param obj arguments local variable available within all functions. */ (obj: IArguments): Enumerable<any> /** * Defines an enumerator, which supports an iteration over the properties of the specified object. * @param obj A regular Object. */ (obj: Object): Enumerable<KeyValuePair<string, any>> /* Static Methods --------------------------------------------------------------------------*/ /** * Gets and combines hash code for the given parameters, calls the overridden "hash" method when available. * @param objs Optional number of objects to combine their hash codes. */ hash(...obj: any[]): number; /** * Determines whether the specified object instances are considered equal. calls the overridden "equals" method when available. * @param objA The first object to compare. * @param objB The second object to compare. */ equals(objA: any, objB: any): boolean; /** * Determines whether the specified object instances are considered equal. calls the overridden "equals" method when available. * @param objA The first object to compare. * @param objB The second object to compare. * @param comparer An equality comparer to compare values. */ equals(objA: any, objB: any, comparer: EqualityComparer<any>): boolean; /** * Performs a comparison of two objects of the same type and returns a value indicating whether one object is less than, equal to, or greater than the other. * @param objA The first object to compare. * @param objB The second object to compare. */ compare<T>(objA: T, objB: T): number; /** * Extends Enumerable extension methods to the given type * @param type The type to extend. */ enumerableExtend(type: Function): void; /** * Returns an empty Enumerable. */ empty<T>(): Enumerable<T>; /** * Detects if an object is Enumerable. * @param obj An object to check its Enumerability. */ is(obj: any): boolean; /** * Generates a sequence of integral numbers within a specified range. * @param start The value of the first integer in the sequence. * @param count The number of sequential integers to generate. */ range(start: number, count: number): Enumerable<number>; /** * Generates a sequence that contains one repeated value. * @param element The value to be repeated. * @param count The number of times to repeat the value in the generated sequence. */ repeat<T>(element: T, count: number): Enumerable<T>; /* Mutiplex Types --------------------------------------------------------------------------*/ /** * Provides a set of static methods that provide support for internal operations. */ runtime: MultiplexRuntime /** * Supports a simple iteration over a collection. */ Enumerator: EnumeratorConstructor /** * Exposes the enumerator, which supports a simple iteration over a collection of a specified type. */ Enumerable: EnumerableConstructor /** * Provides a base class for implementations of Comparer<T> generic interface. */ Comparer: ComparerConstructor /** * Provides a base class for implementations of the EqualityComparer. */ EqualityComparer: EqualityComparerConstructor /** * Initializes a new instance of the abstract Collection class. */ Collection: CollectionConstructor /** * Initializes a new instance of the abstract Collection class. */ ReadOnlyCollection: ReadOnlyCollectionConstructor /** * Represents a strongly typed list of objects that can be accessed by index. */ List: ListConstructor /** * Represents a collection of key/value pairs that are sorted by key based on the associated Comparer implementation. */ SortedList: SortedListConstructor /** * Defines a key/value pair that can be set or retrieved. */ KeyValuePair: KeyValuePairConstructor /** * Represents a collection of keys and values. */ Dictionary: DictionaryConstructor /** * Represents a set of values. */ HashSet: HashSetConstructor /** * Represents a node in a LinkedList. */ LinkedListNode: LinkedListNodeConstructor /** * Represents a doubly linked list. */ LinkedList: LinkedListConstructor /** * Represents a first-in, first-out collection of objects. */ Queue: QueueConstructor /** * Represents a variable size last-in-first-out (LIFO) collection of instances of the same arbitrary type. */ Stack: StackConstructor } }
the_stack
import { ActionTypes, AnyAction, AttachAction, BatchAction, CreateAction, DeleteAction, DerivedAction, DetachAction, EntitiesByType, Entity, Id, IdsByType, InvalidAction, MoveAction, MoveAttachedAction, SetStateAction, SingularAction, SortAction, SortAttachedAction, State, UpdateAction, Reducer, } from './interfaces'; import { ModelSchemaReader } from './schema'; import Derivator from './derivator'; import { ActionUtils } from './actions'; import { Cardinalities, UpdateActionMethod } from './enums'; import { arrayMove, arrayPut } from './util'; export const makeReducer = <S extends State>( schema: ModelSchemaReader, derivator: Derivator<S>, actionTypes: ActionTypes, actionUtils: ActionUtils ) => { const rootReducer: Reducer<S> = (state: S = schema.getEmptyState(), action: AnyAction): S => { // if not handleable, then return state without changes if (!actionUtils.isHandleable(action)) { return state; } if (actionUtils.isStateSetter(action)) { if (action.type === actionTypes.SET_STATE) { return (action as SetStateAction<S>).state; } } if (actionUtils.isBatch(action)) { // with a batch action, reduce iteratively const batchAction = action as BatchAction; return batchAction.actions.reduce((prevState: S, action: SingularAction | InvalidAction) => { return singularReducer(prevState, action); }, state); } else { // with a singular action, reduce once return singularReducer(state, action as SingularAction); } }; function singularReducer(state: S, action: SingularAction | InvalidAction): S { const singularAction = action as SingularAction; let actions: SingularAction[]; if (actionUtils.isDerivable(singularAction)) { const derivedAction = derivator.deriveAction(state, singularAction) as DerivedAction; actions = derivedAction.derived; } else { actions = [singularAction]; } // reduce [action] return actions.reduce((prevState: S, action: SingularAction) => { // sort has to be handled here because it needs both slices if (action.type === actionTypes.SORT) { const { entityType, compare } = action as SortAction; const ids = prevState.ids[entityType]; const entities = prevState.entities[entityType]; const sortedIds = [...ids].sort((idA, idB) => { const entityA = entities[idA]; const entityB = entities[idB]; // comparison error will need to be handled in the future // ... return compare(entityA, entityB); }); return { entities: prevState.entities, ids: { ...prevState.ids, [entityType]: sortedIds, }, } as S; } // all other actions handled here return { entities: entitiesReducer(prevState.entities, action), ids: idsReducer(prevState.ids, action), } as S; }, state); } const defaultEntitiesState = schema.getEmptyEntitiesByTypeState(); function entitiesReducer( state: EntitiesByType = defaultEntitiesState, action: SingularAction ): EntitiesByType { if (action.type === actionTypes.INVALID) { return state; } if (!schema.typeExists(action.entityType)) { return state; // if no such entityType, then no change } if (action.type === actionTypes.DETACH) { const { entityType, id, detachableId, relation } = action as DetachAction; const entity = state[entityType][id] as Entity; if (!entity) { return state; // if entity not found, then no change } const relationKey = schema.type(entityType).resolveRelationKey(relation); if (!relationKey) { return state; // if entity relation key not found, then no change } let newEntity = entity; // to contain the change immutably const cardinality = schema.type(entityType).resolveRelationCardinality(relation); if (cardinality === Cardinalities.ONE) { const attachedId = entity[relationKey] as Id; if (detachableId !== attachedId) { return state; // if detachableId is not the attached id, then no change } // detach it: set the relation value to undefined newEntity = { ...entity, [relationKey]: undefined }; } if (cardinality === Cardinalities.MANY) { const attachedIds = (entity[relationKey] || []) as Id[]; // detach it: filter out the detachableId newEntity = { ...entity, [relationKey]: attachedIds.filter(attachedId => attachedId !== detachableId), }; } return { ...state, [entityType]: { ...state[entityType], [id]: newEntity, }, }; } if (action.type === actionTypes.ATTACH) { const { entityType, id, attachableId, relation, index } = action as AttachAction; const entity = state[entityType][id] as Entity; if (!entity) { return state; // if entity not found, then no change } const relationKey = schema.type(entityType).resolveRelationKey(relation); if (!relationKey) { return state; // if entity relation key not found, then no change } let newEntity = entity; // to contain the change immutably const cardinality = schema.type(entityType).resolveRelationCardinality(relation); if (cardinality === Cardinalities.ONE) { newEntity = { ...newEntity, [relationKey]: attachableId, }; } if (cardinality === Cardinalities.MANY) { if (!entity[relationKey] || !entity[relationKey].includes(attachableId)) { newEntity = { ...newEntity, [relationKey]: arrayPut(attachableId, newEntity[relationKey], index), }; } } return { ...state, [entityType]: { ...state[entityType], [id]: newEntity, }, }; } if (action.type === actionTypes.DELETE) { const { entityType, id } = action as DeleteAction; const entity = state[entityType][id] as Entity; if (!entity) { return state; // if entity not found, then no change } const entitiesOfType = { ...state[entityType] }; delete entitiesOfType[id]; return { ...state, [entityType]: entitiesOfType, }; } if (action.type === actionTypes.CREATE) { const { entityType, id, data } = action as CreateAction; const entity = state[entityType][id] as Entity; if (entity) { return state; // if entity exists, then no change } return { ...state, [entityType]: { ...state[entityType], [id]: data || {}, }, }; } if (action.type === actionTypes.UPDATE) { const { entityType, id, data, method } = action as UpdateAction; const entity = state[entityType][id] as Entity; if (!entity) { return state; // if entity not found, then no change } let newEntity = { ...entity }; if (method === UpdateActionMethod.PUT) { // extract the current relational data, so we have a copy of it and it won't get overwritten const relationKeys = schema.type(entityType).getRelationKeys(); const relationalData = relationKeys.reduce((relationalData, relationKey) => { if (entity[relationKey]) { relationalData[relationKey] = entity[relationKey]; } return relationalData; }, {} as { [k: string]: Id | Id[] }); // replace the current entity with the update data and the relational data newEntity = { ...data, ...relationalData }; } if (method === UpdateActionMethod.PATCH) { // merge the update data with the current data newEntity = { ...entity, ...data }; } return { ...state, [entityType]: { ...state[entityType], [id]: newEntity, }, }; } if (action.type === actionTypes.MOVE_ATTACHED) { const { entityType, id, relation, src, dest } = action as MoveAttachedAction; const entity = state[entityType][id] as Entity; if (!entity) { return state; // if entity not found, then no change } const relationKey = schema.type(entityType).resolveRelationKey(relation); if (!relationKey) { return state; // if entity relation key not found, then no change } const cardinality = schema.type(entityType).resolveRelationCardinality(relation); if (cardinality === Cardinalities.ONE) { return state; // if cardinality is one, then no change } const attachedIds = entity[relationKey]; if (!Array.isArray(attachedIds)) { return state; // if attached ids is not an array, then no change } const newEntity = { ...entity, [relationKey]: arrayMove(attachedIds, src, dest), }; return { ...state, [entityType]: { ...state[entityType], [id]: newEntity, }, }; } if (action.type === actionTypes.SORT_ATTACHED) { const { entityType, id, relation, compare } = action as SortAttachedAction; const entity = state[entityType][id] as Entity; if (!entity) { return state; // if entity not found, then no change } const relationKey = schema.type(entityType).resolveRelationKey(relation); const relationType = schema.type(entityType).resolveRelationType(relation); if (!relationKey || !relationType) { return state; // if entity relation key or relation type not found, then no change } const cardinality = schema.type(entityType).resolveRelationCardinality(relation); if (cardinality === Cardinalities.ONE) { return state; // if cardinality is one, then no change } const attachedIds = entity[relationKey]; if (!Array.isArray(attachedIds)) { return state; // if attached ids is not an array, then no change } const relatedEntities = state[relationType]; const sortedIds = [...attachedIds].sort((idA, idB) => { const entityA = relatedEntities[idA]; const entityB = relatedEntities[idB]; // comparison error will need to be handled in the future // ... return compare(entityA, entityB); }); const newEntity = { ...entity, [relationKey]: sortedIds, }; return { ...state, [entityType]: { ...state[entityType], [id]: newEntity, }, }; } return state; } const defaultIdsState = schema.getEmptyIdsByTypeState(); function idsReducer(state: IdsByType = defaultIdsState, action: SingularAction): IdsByType { if (action.type === actionTypes.INVALID) { return state; } if (!schema.typeExists(action.entityType)) { return state; // if no such entityType, then no change } if (action.type === actionTypes.DELETE) { const { entityType, id } = action as DeleteAction; const idsOfEntity = state[entityType].filter(existingId => existingId !== id); return { ...state, [entityType]: idsOfEntity, }; } if (action.type === actionTypes.CREATE) { const { entityType, id, index } = action as CreateAction; // this O(n) operation can be improved if existence is checked // in an O(c) lookup against the entities slice from one level up, // and then set the existence boolean on the action if (state[entityType].includes(id)) { return state; // if entity exists, then no change } return { ...state, [entityType]: arrayPut(id, state[entityType], index), }; } if (action.type === actionTypes.MOVE) { const { entityType, src, dest } = action as MoveAction; return { ...state, [entityType]: arrayMove(state[entityType], src, dest), }; } return state; } return rootReducer; };
the_stack
declare class OnAckCallback extends NSObject { static alloc(): OnAckCallback; // inherited from NSObject static new(): OnAckCallback; // inherited from NSObject timingOutAfterCallback(seconds: number, callback: (p1: NSArray<any>) => void): void; } declare class SSLSecurity extends NSObject { static alloc(): SSLSecurity; // inherited from NSObject static new(): SSLSecurity; // inherited from NSObject constructor(o: { usePublicKeys: boolean; }); initWithUsePublicKeys(usePublicKeys: boolean): this; } declare class SocketAckEmitter extends NSObject { static alloc(): SocketAckEmitter; // inherited from NSObject static new(): SocketAckEmitter; // inherited from NSObject readonly rawEmitView: SocketRawAckView; with(items: NSArray<any>): void; } declare class SocketAnyEvent extends NSObject { static alloc(): SocketAnyEvent; // inherited from NSObject static new(): SocketAnyEvent; // inherited from NSObject readonly event: string; readonly items: NSArray<any>; } declare class SocketEngine extends NSObject implements NSURLSessionDelegate, SocketEngineSpec { static alloc(): SocketEngine; // inherited from NSObject static new(): SocketEngine; // inherited from NSObject client: SocketEngineClient; // inherited from SocketEngineSpec readonly closed: boolean; // inherited from SocketEngineSpec readonly compress: boolean; // inherited from SocketEngineSpec connectParams: NSDictionary<string, any>; // inherited from SocketEngineSpec readonly connected: boolean; // inherited from SocketEngineSpec readonly cookies: NSArray<NSHTTPCookie>; // inherited from SocketEngineSpec readonly debugDescription: string; // inherited from NSObjectProtocol readonly description: string; // inherited from NSObjectProtocol readonly engineQueue: NSObject; // inherited from SocketEngineSpec extraHeaders: NSDictionary<string, string>; // inherited from SocketEngineSpec readonly fastUpgrade: boolean; // inherited from SocketEngineSpec readonly forcePolling: boolean; // inherited from SocketEngineSpec readonly forceWebsockets: boolean; // inherited from SocketEngineSpec readonly hash: number; // inherited from NSObjectProtocol readonly isProxy: boolean; // inherited from NSObjectProtocol readonly polling: boolean; // inherited from SocketEngineSpec readonly probing: boolean; // inherited from SocketEngineSpec readonly sid: string; // inherited from SocketEngineSpec readonly socketPath: string; // inherited from SocketEngineSpec readonly superclass: typeof NSObject; // inherited from NSObjectProtocol readonly urlPolling: NSURL; // inherited from SocketEngineSpec readonly urlWebSocket: NSURL; // inherited from SocketEngineSpec readonly websocket: boolean; // inherited from SocketEngineSpec readonly ws: WebSocket; // inherited from SocketEngineSpec readonly // inherited from NSObjectProtocol constructor(o: { client: SocketEngineClient; url: NSURL; options: NSDictionary<string, any>; }); // inherited from SocketEngineSpec URLSessionDidBecomeInvalidWithError(session: NSURLSession, error: NSError): void; URLSessionDidFinishEventsForBackgroundURLSession(session: NSURLSession): void; URLSessionDidReceiveChallengeCompletionHandler(session: NSURLSession, challenge: NSURLAuthenticationChallenge, completionHandler: (p1: NSURLSessionAuthChallengeDisposition, p2: NSURLCredential) => void): void; class(): typeof NSObject; conformsToProtocol(aProtocol: any /* Protocol */): boolean; connect(): void; didErrorWithReason(reason: string): void; disconnectWithReason(reason: string): void; doFastUpgrade(): void; flushWaitingForPostToWebSocket(): void; initWithClientUrlOptions(client: SocketEngineClient, url: NSURL, options: NSDictionary<string, any>): this; isEqual(object: any): boolean; isKindOfClass(aClass: typeof NSObject): boolean; isMemberOfClass(aClass: typeof NSObject): boolean; parseEngineData(data: NSData): void; parseEngineMessage(message: string): void; performSelector(aSelector: string): any; performSelectorWithObject(aSelector: string, object: any): any; performSelectorWithObjectWithObject(aSelector: string, object1: any, object2: any): any; respondsToSelector(aSelector: string): boolean; retainCount(): number; self(): this; writeWithTypeWithData(msg: string, type: SocketEnginePacketType, data: NSArray<NSData>): void; } interface SocketEngineClient { engineDidCloseWithReason(reason: string): void; engineDidErrorWithReason(reason: string): void; engineDidOpenWithReason(reason: string): void; engineDidReceivePong(): void; engineDidSendPing(): void; parseEngineBinaryData(data: NSData): void; parseEngineMessage(msg: string): void; } declare var SocketEngineClient: { prototype: SocketEngineClient; }; declare const enum SocketEnginePacketType { Open = 0, Close = 1, Ping = 2, Pong = 3, Message = 4, Upgrade = 5, Noop = 6 } interface SocketEngineSpec { client: SocketEngineClient; closed: boolean; compress: boolean; connectParams: NSDictionary<string, any>; connected: boolean; cookies: NSArray<NSHTTPCookie>; engineQueue: NSObject; extraHeaders: NSDictionary<string, string>; fastUpgrade: boolean; forcePolling: boolean; forceWebsockets: boolean; polling: boolean; probing: boolean; sid: string; socketPath: string; urlPolling: NSURL; urlWebSocket: NSURL; websocket: boolean; ws: WebSocket; connect(): void; didErrorWithReason(reason: string): void; disconnectWithReason(reason: string): void; doFastUpgrade(): void; flushWaitingForPostToWebSocket(): void; initWithClientUrlOptions?(client: SocketEngineClient, url: NSURL, options: NSDictionary<string, any>): SocketEngineSpec; parseEngineData(data: NSData): void; parseEngineMessage(message: string): void; writeWithTypeWithData(msg: string, type: SocketEnginePacketType, data: NSArray<NSData>): void; } declare var SocketEngineSpec: { prototype: SocketEngineSpec; }; declare class SocketIOClient extends NSObject { static alloc(): SocketIOClient; // inherited from NSObject static new(): SocketIOClient; // inherited from NSObject readonly manager: SocketManagerSpec; readonly nsp: string; readonly rawEmitView: SocketRawView; readonly sid: string; readonly status: SocketIOStatus; constructor(o: { manager: SocketManagerSpec; nsp: string; }); connect(): void; connectWithTimeoutAfterWithHandler(timeoutAfter: number, handler: () => void): void; disconnect(): void; emitWith(event: string, items: NSArray<any>): void; emitWithAckWith(event: string, items: NSArray<any>): OnAckCallback; handleAckData(ack: number, data: NSArray<any>): void; handleEventDataIsInternalMessageWithAck(event: string, data: NSArray<any>, isInternalMessage: boolean, ack: number): void; initWithManagerNsp(manager: SocketManagerSpec, nsp: string): this; joinNamespace(): void; leaveNamespace(): void; off(event: string): void; offWithId(id: NSUUID): void; onAny(handler: (p1: SocketAnyEvent) => void): void; onCallback(event: string, callback: (p1: NSArray<any>, p2: SocketAckEmitter) => void): NSUUID; onceCallback(event: string, callback: (p1: NSArray<any>, p2: SocketAckEmitter) => void): NSUUID; removeAllHandlers(): void; setReconnectingWithReason(reason: string): void; } declare const enum SocketIOStatus { NotConnected = 0, Disconnected = 1, Connecting = 2, Connected = 3 } declare var SocketIOVersionNumber: number; declare var SocketIOVersionString: interop.Reference<number>; declare class SocketManager extends NSObject implements SocketManagerSpec { static alloc(): SocketManager; // inherited from NSObject static new(): SocketManager; // inherited from NSObject readonly defaultSocket: SocketIOClient; // inherited from SocketManagerSpec engine: SocketEngineSpec; // inherited from SocketManagerSpec forceNew: boolean; // inherited from SocketManagerSpec handleQueue: NSObject; // inherited from SocketManagerSpec nsps: NSDictionary<string, SocketIOClient>; // inherited from SocketManagerSpec reconnectWait: number; // inherited from SocketManagerSpec reconnects: boolean; // inherited from SocketManagerSpec readonly socketURL: NSURL; // inherited from SocketManagerSpec readonly status: SocketIOStatus; // inherited from SocketManagerSpec constructor(o: { socketURL: NSURL; config: NSDictionary<string, any>; }); connect(): void; connectSocket(socket: SocketIOClient): void; didDisconnectWithReason(reason: string): void; disconnect(): void; disconnectSocket(socket: SocketIOClient): void; disconnectSocketForNamespace(nsp: string): void; emitAllWithItems(event: string, items: NSArray<any>): void; engineDidCloseWithReason(reason: string): void; engineDidErrorWithReason(reason: string): void; engineDidOpenWithReason(reason: string): void; engineDidReceivePong(): void; engineDidSendPing(): void; initWithSocketURLConfig(socketURL: NSURL, config: NSDictionary<string, any>): this; parseEngineBinaryData(data: NSData): void; parseEngineMessage(msg: string): void; reconnect(): void; removeSocket(socket: SocketIOClient): SocketIOClient; socketForNamespace(nsp: string): SocketIOClient; } interface SocketManagerSpec extends SocketEngineClient { defaultSocket: SocketIOClient; engine: SocketEngineSpec; forceNew: boolean; handleQueue: NSObject; nsps: NSDictionary<string, SocketIOClient>; reconnectWait: number; reconnects: boolean; socketURL: NSURL; status: SocketIOStatus; connect(): void; connectSocket(socket: SocketIOClient): void; didDisconnectWithReason(reason: string): void; disconnect(): void; disconnectSocket(socket: SocketIOClient): void; disconnectSocketForNamespace(nsp: string): void; emitAllWithItems(event: string, items: NSArray<any>): void; reconnect(): void; removeSocket(socket: SocketIOClient): SocketIOClient; socketForNamespace(nsp: string): SocketIOClient; } declare var SocketManagerSpec: { prototype: SocketManagerSpec; }; declare class SocketRawAckView extends NSObject { static alloc(): SocketRawAckView; // inherited from NSObject static new(): SocketRawAckView; // inherited from NSObject with(items: NSArray<any>): void; } declare class SocketRawView extends NSObject { static alloc(): SocketRawView; // inherited from NSObject static new(): SocketRawView; // inherited from NSObject emitWith(event: string, items: NSArray<any>): void; emitWithAckWith(event: string, items: NSArray<any>): OnAckCallback; }
the_stack
// // Copyright (c) Microsoft. // Licensed under the MIT license. See LICENSE file in the project root for full license information. // import fs from 'fs'; import path from 'path'; import recursiveReadDirectory from 'recursive-readdir'; import { wrapError, sleep } from '../../utils'; import { Organization } from '../../business'; import { RepositoryMetadataEntity, GitHubRepositoryPermission, GitHubRepositoryPermissions, GitHubRepositoryVisibility } from '../../entities/repositoryMetadata/repositoryMetadata'; import { Repository } from '../../business'; import { CreateRepositoryEntrypoint, ICreateRepositoryApiResult } from '../../api/createRepo'; import { CoreCapability, IAlternateTokenOption, IOperationsProviders, IOperationsRepositoryMetadataProvider, IProviders, throwIfNotCapable } from '../../interfaces'; import { ErrorHelper } from '../../transitional'; import { setupRepositoryReadmeSubstring, setupRepositorySubstring } from '../../features/newRepositoryLockdown'; export interface IApprovalPackage { id: string; // requestingUser: string; repositoryMetadata: RepositoryMetadataEntity; createResponse: unknown; isUnlockingExistingRepository: number | string | boolean | null | undefined; isFork: boolean; isTransfer: boolean; createEntrypoint: CreateRepositoryEntrypoint, repoCreateResponse: ICreateRepositoryApiResult, } interface IFileContents { path: string; content: string; // base 64 content } export enum RepoWorkflowDecision { Approve = 'approve', Deny = 'deny', } export interface IRepositoryWorkflowOutput { error?: any; message?: string; } interface ICommitterOptions { isUsingApp: boolean; alternateTokenOptions: IAlternateTokenOption; alternateToken: string; login: string; } export class RepoWorkflowEngine { organization: Organization; request: RepositoryMetadataEntity; user: string; id: string; typeName: string; private createResponse?: unknown; private isUnlockingExistingRepository: boolean; private isFork: boolean; private isTransfer: boolean; private githubResponse: ICreateRepositoryApiResult; private createEntrypoint: CreateRepositoryEntrypoint; private _hasAuthorizedTemplateCommitter = false; private _contentCommitter: ICommitterOptions; private log: IRepositoryWorkflowOutput[] = []; private repository: Repository; constructor(private providers: IProviders, organization: Organization, approvalPackage: IApprovalPackage) { this.request = approvalPackage.repositoryMetadata; // this.user = approvalPackage.requestingUser; this.id = approvalPackage.id; this.organization = organization; this.typeName = 'Repository Create'; this.githubResponse = approvalPackage?.repoCreateResponse; this.createResponse = approvalPackage.createResponse; this.isUnlockingExistingRepository = !!approvalPackage.isUnlockingExistingRepository; this.isFork = approvalPackage.isFork; this.isTransfer = approvalPackage.isTransfer; this.createEntrypoint = approvalPackage.createEntrypoint; } private async getTemplateCommitter() { if (this._contentCommitter) { return this._contentCommitter; } const { config } = this.providers; this._contentCommitter = { isUsingApp: true, login: null, alternateTokenOptions: null, alternateToken: null, }; if (config?.github?.user?.initialCommit?.username && config.github.user.initialCommit.token) { const login = config.github.user.initialCommit.username; const alternateToken = config.github.user.initialCommit.token; const alternateTokenOptions = { alternateToken, }; if (!this._hasAuthorizedTemplateCommitter) { try { await this.authorizeTemplateCommitter({ login, alternateToken, alternateTokenOptions, isUsingApp: false }); } catch (error) { this.log.push({ error: new Error(`Error trying to authorize template committer ${login}: ${error}`) }); } } } return this._contentCommitter; } private async finalizeCommitter() { if (!this._hasAuthorizedTemplateCommitter) { return; } const { login } = await this.getTemplateCommitter(); if (login && this.repository) { try { await this.repository.removeCollaborator(login); this.log.push({ message: `Temporary committer ${login} removed` }); } catch (error) { this.log.push({ error: new Error(`Error removing committer ${login}: ${error}`) }); } } this._hasAuthorizedTemplateCommitter = false; } private async authorizeTemplateCommitter(options: ICommitterOptions) { if (!options.login) { return; } const invitation = await this.repository.addCollaborator(options.login, GitHubRepositoryPermission.Push); let hadError = false; if (invitation?.id) { try { await this.repository.acceptCollaborationInvite(invitation.id, options.alternateTokenOptions); } catch (error) { hadError = true; this.log.push({ error: new Error(`The collaboration invitation could not be accepted for ${options.login}: ${error}`) }); } } if (!hadError) { this.log.push({ message: `Temporarily invited ${options.login} to commit to the repository` }); this._contentCommitter = options; this._hasAuthorizedTemplateCommitter = true; } } editGet(req, res) { req.individualContext.webContext.render({ view: 'org/team/approvals/editRepo', title: 'Edit Repo Request', state: { entry: this.request, teamUrl: req.teamUrl, team: req.team, }, }); } editPost(req, res, next) { const { operations } = this.providers; const ops = throwIfNotCapable<IOperationsRepositoryMetadataProvider>(operations, CoreCapability.RepositoryMetadataProvider); const repositoryMetadataProvider = ops.repositoryMetadataProvider; const visibility = req.body.repoVisibility; if (!(visibility === 'public' || visibility === 'private' || visibility === 'internal')) { return next(new Error('Visibility for the repo request must be provided.')); } this.request.repositoryName = req.body.repoName; this.request.initialRepositoryVisibility = visibility; // visibility === 'public' ? GitHubRepositoryVisibility.Public : GitHubRepositoryVisibility.Private; this.request.initialRepositoryDescription = req.body.repoDescription; // this ... repoUrl = req.body.repoUrl repositoryMetadataProvider.updateRepositoryMetadata(this.request).then(ok => { return res.redirect(req.teamUrl + 'approvals/' + this.id); }).catch(error => { return next(wrapError(error, 'There was a problem updating the request.')); }); } getApprovedViewName() { return 'org/team/repos/repoCreated'; } getDecisionEmailViewName() { return 'repoApprovals/decision'; } async executeNewRepositoryChores(): Promise<IRepositoryWorkflowOutput[] /* output */> { const request = this.request; const repositoryName = request.repositoryName; this.repository = this.organization.repository(repositoryName); for (let i = 0; i < request.initialTeamPermissions.length; i++) { let { teamId, permission, teamName } = request.initialTeamPermissions[i]; if (teamId && !teamName) { try { const team = this.organization.team(Number(teamId)); await team.getDetails(); if (team.name) { teamName = team.name; } } catch (noFail) { /* ignore */ } } if (teamId && permission) { await this.addTeamPermission(Number(teamId), teamName, permission); } } const patchUpdates: any = {}; if (request.initialRepositoryVisibility === GitHubRepositoryVisibility.Public && this.githubResponse?.github?.private === true) { // Time to make it public again. Though this is debatable. patchUpdates.private = false; } if (request.initialRepositoryDescription && this.githubResponse?.github?.description !== request.initialRepositoryDescription) { patchUpdates.description = request.initialRepositoryDescription; } else if (this.githubResponse?.github?.description?.includes(setupRepositorySubstring)) { patchUpdates.description = ''; } const setupUrlSubstring = this.organization.absoluteBaseUrl; if (request.initialRepositoryHomepage && this.githubResponse?.github?.homepage !== request.initialRepositoryHomepage) { patchUpdates.homepage = request.initialRepositoryHomepage; } else if (this.githubResponse?.github?.homepage?.includes(setupUrlSubstring)) { patchUpdates.homepage = ''; } if (Object.getOwnPropertyNames(patchUpdates).length > 0) { await this.resetOriginalProperties(patchUpdates); } if (request.initialTemplate) { try { await this.addTemplateCollaborators(request.initialTemplate); await this.createAddTemplateFilesTask(request.initialTemplate, this.isUnlockingExistingRepository, this.isFork, this.isTransfer); await this.addTemplateWebHook(request.initialTemplate); } catch (outerError) { // ignored console.dir(outerError); } } else { try { await this.tryResetReadme(request?.initialRepositoryDescription); } catch (outerError) { console.dir(outerError); } } // GitHub adds the creator of a repo (when using a PAT) as an admin directly now, but we don't need that... await this.removeOrganizationCollaboratorTask(); // Add any administrator logins as invited, if present if (request.initialAdministrators && request.initialAdministrators.length > 0) { await this.addAdministratorCollaboratorsTask(request.initialAdministrators); } await this.finalizeCommitter(); return this.log.filter(real => real); } async addTeamPermission(id: number, teamName: string, permission: GitHubRepositoryPermission): Promise<void> { let attempts = 0; const calculateDelay = (retryCount: number) => 500 * Math.pow(2, retryCount); let error = null; const teamIdentity = teamName ? `${teamName} (${id})` : `with the ID ${id}`; while (attempts < 3) { try { await this.repository.setTeamPermission(id, permission); this.log.push({ message: `Successfully added the ${this.repository.name} repo to GitHub team ${teamIdentity} with ${permission.toUpperCase()} permissions.` }); return; } catch (iterationError) { error = iterationError; } const nextInterval = calculateDelay(attempts++); await sleep(nextInterval); }; const message = `The addition of the repo ${this.repository.name} to GitHub team ${teamIdentity} failed. GitHub returned an error: ${error.message}.`; this.log.push({ error, message }); } async getFileContents(templateRoot: string, templatePath: string, templateName: string, absoluteFileNames: string[]): Promise<IFileContents[]> { const contents = []; for (let i = 0; i < absoluteFileNames.length; i++) { const absoluteFileName = absoluteFileNames[i]; const fileName = path.relative(templateRoot, absoluteFileName); const fileContents = await this.readFileToBase64(templatePath, templateName, fileName); contents.push(fileContents); } return contents; } async getTemplateFilenames(templateRoot: string): Promise<string[]> { return new Promise((resolve, reject) => { recursiveReadDirectory(templateRoot, (error, fileNames: string[]) => { return error ? reject(error) : resolve(fileNames); }); }); } async readFileToBase64(templatePath: string, templateName: string, fileName: string): Promise<IFileContents> { return new Promise((resolve, reject) => { fs.readFile(path.join(templatePath, templateName, fileName), (error, file) => { if (error) { return reject(error); } const base64content = file.toString('base64'); return resolve({ path: fileName, content: base64content, }); }); }); } async addTemplateWebHook(templateName: string): Promise<void> { const { config } = this.providers; const definitions = config.github.templates.definitions; const templateData = definitions ? definitions[templateName] : null; if (!templateData || !templateData.webhook) { return null; } const webhook = templateData.webhook; const webhookSharedSecret = templateData.webhookSharedSecret; const webhookEvents = templateData.webhookEvents; const webhookFriendlyName = templateData.webhookFriendlyName; let error = null; let message = null; const friendlyName = webhookFriendlyName || webhook; try { await this.repository.createWebhook({ config: { url: webhook, content_type: 'json', secret: webhookSharedSecret, insecure_ssl: '0', }, events: webhookEvents || ['push'], }); message = `${friendlyName} webhook added to the repository.`; } catch (webhookCreateError) { error = new Error(`The template ${templateName} defines a webhook ${friendlyName}. Adding the webhook failed. ${webhookCreateError.message()}`); error.inner = webhookCreateError; } this.log.push({ error, message }); } async removeOrganizationCollaboratorTask(): Promise<void> { const result = null; if (this.organization.usesApp) { // If a GitHub App created the repo, it is not present as a collaborator. return; } try { const createAccount = await this.organization.getAuthorizedOperationsAccount(); await this.repository.removeCollaborator(createAccount.login); } catch (ignoredError) { if (ErrorHelper.GetStatus(ignoredError) === 400) { // GitHub App in use } else { console.warn(`removeOrganizationCollaboratorTask ignored error: ${ignoredError}`); } } return result; } async createAddTemplateFilesTask(templateName: string, isUnlockingExistingRepository: boolean, isFork: boolean, isTransfer: boolean): Promise<void> { const { config } = this.providers; const templatePath = config.github.templates.directory; const { alternateTokenOptions } = await this.getTemplateCommitter(); try { const templateRoot = path.join(templatePath, templateName); const fileNames = await this.getTemplateFilenames(templateRoot); const fileContents = await this.getFileContents(templateRoot, templatePath, templateName, fileNames); const uploadedFiles = []; if (isFork || isTransfer) { const subMessage = isFork ? 'is a fork' : 'was transferred'; this.log.push({ message: `Repository ${subMessage}, template files will not be committed. Please check the LICENSE and other files to understand existing obligations.` }); return; } try { for (let i = 0; i < fileContents.length; i++) { const item = fileContents[i]; let sha = null; // if (isUnlockingExistingRepository) { try { const fileDescription = await this.repository.getFile(item.path); if (fileDescription && fileDescription.sha) { sha = fileDescription.sha; } } catch (getFileError) { if (getFileError.status === 404) { // often the file will not exist, that's great. } else { throw getFileError; } } // } const fileOptions = sha ? { ...alternateTokenOptions, sha } : alternateTokenOptions; const message = sha ? `${item.path} updated to template` : `${item.path} committed`; await this.repository.createFile(item.path, item.content, message, fileOptions); uploadedFiles.push(item.path); } } catch (error) { const notUploaded = fileContents.map(fc => fc.path).filter(f => !uploadedFiles.includes(f)); if (uploadedFiles.length) { this.log.push({ error, message: `Initial commit of ${uploadedFiles.join(', ')} template files to the ${this.repository.name} repo partially succeeded. Not uploaded: ${notUploaded.join(', ')}. Error: ${error.message}` }); } else { this.log.push({ error, message: `Initial commit of template file(s) to the ${this.repository.name} repo failed. Not uploaded: ${notUploaded.join(', ')}. Error: ${error.message}.` }); } } } catch (error) { this.log.push({ error }); } } async addAdministratorCollaboratorsTask(administratorLogins: string[]): Promise<void> { if (!administratorLogins || !administratorLogins.length) { return null; } const errors = []; const messages = []; for (const login of administratorLogins) { try { await this.repository.addCollaborator(login, GitHubRepositoryPermission.Admin); messages.push(`Added collaborator ${login} with admin permission`); } catch (error) { errors.push(error.message); } } let error = null; let message = null; if (errors.length) { error = errors.join(', '); } else { message = messages.join(', '); } this.log.push({ error, message }); } async resetOriginalProperties(patch: any): Promise<void> { let error: Error = null; let message: string = null; try { const description = 'Patching original values for ' + Object.getOwnPropertyNames(patch).join(', '); await this.repository.update(patch); message = description; } catch (err) { error = new Error(`Error patching: ${err}`); } this.log.push({ error, message }); } async tryResetReadme(initialDescription: string): Promise<void> { let error: Error = null; let message: string = null; try { const readmeFile = await this.repository.getReadme(); const sha = readmeFile.sha; if (readmeFile.content?.includes(setupRepositoryReadmeSubstring)) { message = `Updating ${readmeFile.path}`; const descriptionSection = initialDescription ? `\n\n${initialDescription}` : ''; const newReadmeFile = `# ${this.repository.name}${descriptionSection}`; const asBuffer = Buffer.from(newReadmeFile, 'utf-8'); const asBase64 = asBuffer.toString('base64'); await this.repository.createFile(readmeFile.path, asBase64, 'Initial README', { sha }); } } catch (err) { if (ErrorHelper.IsNotFound(err)) { message = 'No README.md file to update.'; } else { error = new Error(`Could not reset README content: ${err}`); } } this.log.push({ error, message }); } async addTemplateCollaborators(templateName: string): Promise<void> { const { config } = this.providers; const definitions = config.github.templates.definitions; const templateData = definitions ? definitions[templateName] : null; if (!templateData || !templateData.collaborators) { return null; } const collaborators = templateData.collaborators; const errors = []; const messages = []; for (const permission of GitHubRepositoryPermissions) { const users = collaborators[permission]; if (users && Array.isArray(users)) { for (const { username, acceptInvitationToken } of users) { try { const invitation = await this.repository.addCollaborator(username, permission); messages.push(`Added collaborator ${username} with ${permission} permission`); if (acceptInvitationToken) { const invitationId = invitation.id; await this.repository.acceptCollaborationInvite(invitationId, acceptInvitationToken); } } catch (error) { errors.push(error.message); } } } } let error = null; let message = null; if (errors.length) { error = errors.join(', '); } else { message = messages.join(', '); } this.log.push({ error, message }); } }
the_stack
import { Messages, SendCallback, AntPlusSensor, AntPlusScanner } from './ant'; class FitnessEquipmentSensorState { constructor(deviceID: number) { this.DeviceID = deviceID; } _EventCount0x19?: number; _EventCount0x1A?: number; DeviceID: number; Temperature?: number; ZeroOffset?: number; SpinDownTime?: number; EquipmentType?: 'Treadmill' | 'Elliptical' | 'Reserved' | 'Rower' | 'Climber' | 'NordicSkier' | 'Trainer/StationaryBike' | 'General'; ElapsedTime?: number; Distance?: number; RealSpeed?: number; VirtualSpeed?: number; HeartRate?: number; HeartRateSource?: 'HandContact' | 'EM' | 'ANT+'; State?: 'OFF' | 'READY' | 'IN_USE' | 'FINISHED'; CycleLength?: number; Incline?: number; Resistance?: number; METs?: number; CaloricBurnRate?: number; Calories?: number; AscendedDistance?: number; DescendedDistance?: number; Strides?: number; Strokes?: number; Cadence?: number; AccumulatedPower?: number; InstantaneousPower?: number; AveragePower?: number; TrainerStatus?: number; TargetStatus?: 'OnTarget' | 'LowSpeed' | 'HighSpeed'; WheelTicks?: number; WheelPeriod?: number; Torque?: number; HwVersion?: number; ManId?: number; ModelNum?: number; SwVersion?: number; SerialNumber?: number; PairedDevices: any[] = []; } class FitnessEquipmentScanState extends FitnessEquipmentSensorState { Rssi: number; Threshold: number; } export class FitnessEquipmentSensor extends AntPlusSensor { static deviceType = 0x11; public attach(channel, deviceID): void { super.attach(channel, 'receive', deviceID, FitnessEquipmentSensor.deviceType, 0, 255, 8192); this.state = new FitnessEquipmentSensorState(deviceID); } private state: FitnessEquipmentSensorState; protected updateState(deviceId, data) { this.state.DeviceID = deviceId; updateState(this, this.state, data); } private _setUserConfiguration(userWeight?: number, bikeWeight?: number, wheelDiameter?: number, gearRatio?: number, cbk?: SendCallback) { const m = userWeight === undefined ? 0xFFFF : Math.max(0, Math.min(65534, Math.round(userWeight * 100))); const df = wheelDiameter === undefined ? 0xFF : Math.round(wheelDiameter * 10) % 10; const mb = bikeWeight === undefined ? 0xFFF : Math.max(0, Math.min(1000, Math.round(bikeWeight * 20))); const d = wheelDiameter === undefined ? 0xFF : Math.max(0, Math.min(254, Math.round(wheelDiameter))); const gr = gearRatio === undefined ? 0x00 : Math.max(1, Math.min(255, Math.round(gearRatio / .03))); const payload = [0x37, m & 0xFF, (m >> 8) & 0xFF, 0xFF, (df & 0xF) | ((mb & 0xF) << 4), (mb >> 4) & 0xF, d & 0xFF, gr & 0xFF]; const msg = Messages.acknowledgedData(this.channel, payload); this.send(msg, cbk); } public setUserConfiguration(cbk: SendCallback); public setUserConfiguration(userWeight: number, cbk?: SendCallback); public setUserConfiguration(userWeight: number, bikeWeight: number, cbk?: SendCallback); public setUserConfiguration(userWeight: number, bikeWeight: number, wheelDiameter: number, cbk?: SendCallback); public setUserConfiguration(userWeight: number, bikeWeight: number, wheelDiameter: number, gearRatio: number, cbk?: SendCallback); public setUserConfiguration(userWeight?: number | SendCallback, bikeWeight?: number | SendCallback, wheelDiameter?: number | SendCallback, gearRatio?: number | SendCallback, cbk?: SendCallback) { if (typeof (userWeight) === 'function') { return this._setUserConfiguration(undefined, undefined, undefined, undefined, userWeight); } else if (typeof (bikeWeight) === 'function') { return this._setUserConfiguration(userWeight, undefined, undefined, undefined, bikeWeight); } else if (typeof (wheelDiameter) === 'function') { return this._setUserConfiguration(userWeight, bikeWeight, undefined, undefined, wheelDiameter); } else if (typeof (gearRatio) === 'function') { return this._setUserConfiguration(userWeight, bikeWeight, wheelDiameter, undefined, gearRatio); } else { return this._setUserConfiguration(userWeight, bikeWeight, wheelDiameter, gearRatio, cbk); } } public setBasicResistance(resistance: number, cbk?: SendCallback) { const res = Math.max(0, Math.min(200, Math.round(resistance * 2))); const payload = [0x30, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, res & 0xFF]; const msg = Messages.acknowledgedData(this.channel, payload); this.send(msg, cbk); } public setTargetPower(power: number, cbk?: SendCallback) { const p = Math.max(0, Math.min(4000, Math.round(power * 4))); const payload = [0x31, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, p & 0xFF, (p >> 8) & 0xFF]; const msg = Messages.acknowledgedData(this.channel, payload); this.send(msg, cbk); } private _setWindResistance(windCoeff?: number, windSpeed?: number, draftFactor?: number, cbk?: SendCallback) { const wc = windCoeff === undefined ? 0xFF : Math.max(0, Math.min(186, Math.round(windCoeff * 100))); const ws = windSpeed === undefined ? 0xFF : Math.max(0, Math.min(254, Math.round(windSpeed + 127))); const df = draftFactor === undefined ? 0xFF : Math.max(0, Math.min(100, Math.round(draftFactor * 100))); const payload = [0x32, 0xFF, 0xFF, 0xFF, 0xFF, wc & 0xFF, ws & 0xFF, df & 0xFF]; const msg = Messages.acknowledgedData(this.channel, payload); this.send(msg, cbk); } public setWindResistance(cbk: SendCallback); public setWindResistance(windCoeff: number, cbk?: SendCallback); public setWindResistance(windCoeff: number, windSpeed: number, cbk?: SendCallback); public setWindResistance(windCoeff: number, windSpeed: number, draftFactor: number, cbk?: SendCallback); public setWindResistance(windCoeff?: number | SendCallback, windSpeed?: number | SendCallback, draftFactor?: number | SendCallback, cbk?: SendCallback) { if (typeof (windCoeff) === 'function') { return this._setWindResistance(undefined, undefined, undefined, windCoeff); } else if (typeof (windSpeed) === 'function') { return this._setWindResistance(windCoeff, undefined, undefined, windSpeed); } else if (typeof (draftFactor) === 'function') { return this._setWindResistance(windCoeff, windSpeed, undefined, draftFactor); } else { return this._setWindResistance(windCoeff, windSpeed, draftFactor, cbk); } } private _setTrackResistance(slope?: number, rollingResistanceCoeff?: number, cbk?: SendCallback) { const s = slope === undefined ? 0xFFFF : Math.max(0, Math.min(40000, Math.round((slope + 200) * 100))); const rr = rollingResistanceCoeff === undefined ? 0xFF : Math.max(0, Math.min(254, Math.round(rollingResistanceCoeff * 20000))); const payload = [0x33, 0xFF, 0xFF, 0xFF, 0xFF, s & 0xFF, (s >> 8) & 0xFF, rr & 0xFF]; const msg = Messages.acknowledgedData(this.channel, payload); this.send(msg, cbk); } public setTrackResistance(cbk: SendCallback); public setTrackResistance(slope: number, cbk?: SendCallback); public setTrackResistance(slope: number, rollingResistanceCoeff: number, cbk?: SendCallback); public setTrackResistance(slope?: number | SendCallback, rollingResistanceCoeff?: number | SendCallback, cbk?: SendCallback) { if (typeof (slope) === 'function') { return this._setTrackResistance(undefined, undefined, slope); } else if (typeof (rollingResistanceCoeff) === 'function') { return this._setTrackResistance(slope, undefined, rollingResistanceCoeff); } else { return this._setTrackResistance(slope, rollingResistanceCoeff, cbk); } } } export class FitnessEquipmentScanner extends AntPlusScanner { protected deviceType() { return FitnessEquipmentSensor.deviceType; } private states: { [id: number]: FitnessEquipmentScanState } = {}; protected createStateIfNew(deviceId) { if (!this.states[deviceId]) { this.states[deviceId] = new FitnessEquipmentScanState(deviceId); } } protected updateRssiAndThreshold(deviceId, rssi, threshold) { this.states[deviceId].Rssi = rssi; this.states[deviceId].Threshold = threshold; } protected updateState(deviceId, data) { updateState(this, this.states[deviceId], data); } } function resetState(state: FitnessEquipmentSensorState | FitnessEquipmentScanState) { delete state.ElapsedTime; delete state.Distance; delete state.RealSpeed; delete state.VirtualSpeed; delete state.HeartRate; delete state.HeartRateSource; delete state.CycleLength; delete state.Incline; delete state.Resistance; delete state.METs; delete state.CaloricBurnRate; delete state.Calories; delete state._EventCount0x19; delete state._EventCount0x1A; delete state.Cadence; delete state.AccumulatedPower; delete state.InstantaneousPower; delete state.AveragePower; delete state.TrainerStatus; delete state.TargetStatus; delete state.AscendedDistance; delete state.DescendedDistance; delete state.Strides; delete state.Strokes; delete state.WheelTicks; delete state.WheelPeriod; delete state.Torque; } function updateState( sensor: FitnessEquipmentSensor | FitnessEquipmentScanner, state: FitnessEquipmentSensorState | FitnessEquipmentScanState, data: Buffer) { const page = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA); switch (page) { case 0x01: { const temperature = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 3); if (temperature !== 0xFF) { state.Temperature = -25 + temperature * 0.5; } const calBF = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 1); if (calBF & 0x40) { state.ZeroOffset = data.readUInt16LE(Messages.BUFFER_INDEX_MSG_DATA + 4); } if (calBF & 0x80) { state.SpinDownTime = data.readUInt16LE(Messages.BUFFER_INDEX_MSG_DATA + 6); } break; } case 0x10: { const equipmentTypeBF = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 1); switch (equipmentTypeBF & 0x1F) { case 19: state.EquipmentType = 'Treadmill'; break; case 20: state.EquipmentType = 'Elliptical'; break; case 21: state.EquipmentType = 'Reserved'; break; case 22: state.EquipmentType = 'Rower'; break; case 23: state.EquipmentType = 'Climber'; break; case 24: state.EquipmentType = 'NordicSkier'; break; case 25: state.EquipmentType = 'Trainer/StationaryBike'; break; default: state.EquipmentType = 'General'; break; } let elapsedTime = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 2); let distance = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 3); const speed = data.readUInt16LE(Messages.BUFFER_INDEX_MSG_DATA + 4); const heartRate = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 6); const capStateBF = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 7); if (heartRate !== 0xFF) { switch (capStateBF & 0x03) { case 3: { state.HeartRate = heartRate; state.HeartRateSource = 'HandContact'; break; } case 2: { state.HeartRate = heartRate; state.HeartRateSource = 'EM'; break; } case 1: { state.HeartRate = heartRate; state.HeartRateSource = 'ANT+'; break; } default: { delete state.HeartRate; delete state.HeartRateSource; break; } } } elapsedTime /= 4; const oldElapsedTime = (state.ElapsedTime || 0) % 64; if (elapsedTime !== oldElapsedTime) { if (oldElapsedTime > elapsedTime) { //Hit rollover value elapsedTime += 64; } } state.ElapsedTime = (state.ElapsedTime || 0) + elapsedTime - oldElapsedTime; if (capStateBF & 0x04) { const oldDistance = (state.Distance || 0) % 256; if (distance !== oldDistance) { if (oldDistance > distance) { //Hit rollover value distance += 256; } } state.Distance = (state.Distance || 0) + distance - oldDistance; } else { delete state.Distance; } if (capStateBF & 0x08) { state.VirtualSpeed = speed / 1000; delete state.RealSpeed; } else { delete state.VirtualSpeed; state.RealSpeed = speed / 1000; } switch ((capStateBF & 0x70) >> 4) { case 1: state.State = 'OFF'; break; case 2: state.State = 'READY'; resetState(state); break; case 3: state.State = 'IN_USE'; break; case 4: state.State = 'FINISHED'; break; default: delete state.State; break; } if (capStateBF & 0x80) { // lap } break; } case 0x11: { const cycleLen = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 3); const incline = data.readInt16LE(Messages.BUFFER_INDEX_MSG_DATA + 4); const resistance = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 6); const capStateBF = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 7); if (cycleLen !== 0xFF) { state.CycleLength = cycleLen / 100; } if (incline >= -10000 && incline <= 10000) { state.Incline = incline / 100; } if (resistance !== 0xFF) { state.Resistance = resistance; } switch ((capStateBF & 0x70) >> 4) { case 1: state.State = 'OFF'; break; case 2: state.State = 'READY'; resetState(state); break; case 3: state.State = 'IN_USE'; break; case 4: state.State = 'FINISHED'; break; default: delete state.State; break; } if (capStateBF & 0x80) { // lap } break; } case 0x12: { const mets = data.readUInt16LE(Messages.BUFFER_INDEX_MSG_DATA + 2); const caloricbr = data.readUInt16LE(Messages.BUFFER_INDEX_MSG_DATA + 4); const calories = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 6); const capStateBF = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 7); if (mets !== 0xFFFF) { state.METs = mets / 100; } if (caloricbr !== 0xFFFF) { state.CaloricBurnRate = caloricbr / 10; } if (capStateBF & 0x01) { state.Calories = calories; } switch ((capStateBF & 0x70) >> 4) { case 1: state.State = 'OFF'; break; case 2: state.State = 'READY'; resetState(state); break; case 3: state.State = 'IN_USE'; break; case 4: state.State = 'FINISHED'; break; default: delete state.State; break; } if (capStateBF & 0x80) { // lap } break; } case 0x13: { const cadence = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 4); let negDistance = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 5); let posDistance = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 6); const flagStateBF = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 7); if (cadence !== 0xFF) { state.Cadence = cadence; } if (flagStateBF & 0x02) { const oldNegDistance = (state.DescendedDistance || 0) % 256; if (negDistance !== oldNegDistance) { if (oldNegDistance > negDistance) { negDistance += 256; } } state.DescendedDistance = (state.DescendedDistance || 0) + negDistance - oldNegDistance; } if (flagStateBF & 0x01) { const oldPosDistance = (state.AscendedDistance || 0) % 256; if (posDistance !== oldPosDistance) { if (oldPosDistance > posDistance) { posDistance += 256; } } state.AscendedDistance = (state.AscendedDistance || 0) + posDistance - oldPosDistance; } switch ((flagStateBF & 0x70) >> 4) { case 1: state.State = 'OFF'; break; case 2: state.State = 'READY'; resetState(state); break; case 3: state.State = 'IN_USE'; break; case 4: state.State = 'FINISHED'; break; default: delete state.State; break; } if (flagStateBF & 0x80) { // lap } break; } case 0x14: { let posDistance = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 2); let strides = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 3); const cadence = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 4); const power = data.readUInt16LE(Messages.BUFFER_INDEX_MSG_DATA + 5); const flagStateBF = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 7); if (cadence !== 0xFF) { state.Cadence = cadence; } if (power !== 0xFFFF) { state.InstantaneousPower = power; } if (flagStateBF & 0x02) { const oldPosDistance = (state.AscendedDistance || 0) % 256; if (posDistance !== oldPosDistance) { if (oldPosDistance > posDistance) { posDistance += 256; } } state.AscendedDistance = (state.AscendedDistance || 0) + posDistance - oldPosDistance; } if (flagStateBF & 0x01) { const oldStrides = (state.Strides || 0) % 256; if (strides !== oldStrides) { if (oldStrides > strides) { strides += 256; } } state.Strides = (state.Strides || 0) + strides - oldStrides; } switch ((flagStateBF & 0x70) >> 4) { case 1: state.State = 'OFF'; break; case 2: state.State = 'READY'; resetState(state); break; case 3: state.State = 'IN_USE'; break; case 4: state.State = 'FINISHED'; break; default: delete state.State; break; } if (flagStateBF & 0x80) { // lap } break; } case 0x16: { let strokes = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 3); const cadence = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 4); const power = data.readUInt16LE(Messages.BUFFER_INDEX_MSG_DATA + 5); const flagStateBF = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 7); if (cadence !== 0xFF) { state.Cadence = cadence; } if (power !== 0xFFFF) { state.InstantaneousPower = power; } if (flagStateBF & 0x01) { const oldStrokes = (state.Strokes || 0) % 256; if (strokes !== oldStrokes) { if (oldStrokes > strokes) { strokes += 256; } } state.Strokes = (state.Strokes || 0) + strokes - oldStrokes; } switch ((flagStateBF & 0x70) >> 4) { case 1: state.State = 'OFF'; break; case 2: state.State = 'READY'; resetState(state); break; case 3: state.State = 'IN_USE'; break; case 4: state.State = 'FINISHED'; break; default: delete state.State; break; } if (flagStateBF & 0x80) { // lap } break; } case 0x17: { let strides = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 3); const cadence = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 4); const power = data.readUInt16LE(Messages.BUFFER_INDEX_MSG_DATA + 5); const flagStateBF = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 7); if (cadence !== 0xFF) { state.Cadence = cadence; } if (power !== 0xFFFF) { state.InstantaneousPower = power; } if (flagStateBF & 0x01) { const oldStrides = (state.Strides || 0) % 256; if (strides !== oldStrides) { if (oldStrides > strides) { strides += 256; } } state.Strides = (state.Strides || 0) + strides - oldStrides; } switch ((flagStateBF & 0x70) >> 4) { case 1: state.State = 'OFF'; break; case 2: state.State = 'READY'; resetState(state); break; case 3: state.State = 'IN_USE'; break; case 4: state.State = 'FINISHED'; break; default: delete state.State; break; } if (flagStateBF & 0x80) { // lap } break; } case 0x18: { let strides = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 3); const cadence = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 4); const power = data.readUInt16LE(Messages.BUFFER_INDEX_MSG_DATA + 5); const flagStateBF = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 7); if (cadence !== 0xFF) { state.Cadence = cadence; } if (power !== 0xFFFF) { state.InstantaneousPower = power; } if (flagStateBF & 0x01) { const oldStrides = (state.Strides || 0) % 256; if (strides !== oldStrides) { if (oldStrides > strides) { strides += 256; } } state.Strides = (state.Strides || 0) + strides - oldStrides; } switch ((flagStateBF & 0x70) >> 4) { case 1: state.State = 'OFF'; break; case 2: state.State = 'READY'; resetState(state); break; case 3: state.State = 'IN_USE'; break; case 4: state.State = 'FINISHED'; break; default: delete state.State; break; } if (flagStateBF & 0x80) { // lap } break; } case 0x19: { const oldEventCount = state._EventCount0x19 || 0; let eventCount = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 1); const cadence = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 2); let accPower = data.readUInt16LE(Messages.BUFFER_INDEX_MSG_DATA + 3); const power = data.readUInt16LE(Messages.BUFFER_INDEX_MSG_DATA + 5) & 0xFFF; const trainerStatus = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 6) >> 4; const flagStateBF = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 7); if (eventCount !== oldEventCount) { state._EventCount0x19 = eventCount; if (oldEventCount > eventCount) { //Hit rollover value eventCount += 255; } } if (cadence !== 0xFF) { state.Cadence = cadence; } if (power !== 0xFFF) { state.InstantaneousPower = power; const oldAccPower = (state.AccumulatedPower || 0) % 65536; if (accPower !== oldAccPower) { if (oldAccPower > accPower) { accPower += 65536; } } state.AccumulatedPower = (state.AccumulatedPower || 0) + accPower - oldAccPower; state.AveragePower = (accPower - oldAccPower) / (eventCount - oldEventCount); } state.TrainerStatus = trainerStatus; switch (flagStateBF & 0x03) { case 0: state.TargetStatus = 'OnTarget'; break; case 1: state.TargetStatus = 'LowSpeed'; break; case 2: state.TargetStatus = 'HighSpeed'; break; default: delete state.TargetStatus; break; } switch ((flagStateBF & 0x70) >> 4) { case 1: state.State = 'OFF'; break; case 2: state.State = 'READY'; resetState(state); break; case 3: state.State = 'IN_USE'; break; case 4: state.State = 'FINISHED'; break; default: delete state.State; break; } if (flagStateBF & 0x80) { // lap } break; } case 0x1A: { const oldEventCount = state._EventCount0x1A || 0; let eventCount = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 1); let wheelTicks = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 2); let accWheelPeriod = data.readUInt16LE(Messages.BUFFER_INDEX_MSG_DATA + 3); let accTorque = data.readUInt16LE(Messages.BUFFER_INDEX_MSG_DATA + 5); const flagStateBF = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 7); if (eventCount !== oldEventCount) { state._EventCount0x1A = eventCount; if (oldEventCount > eventCount) { //Hit rollover value eventCount += 255; } } const oldWheelTicks = (state.WheelTicks || 0) % 256; if (wheelTicks !== oldWheelTicks) { if (oldWheelTicks > wheelTicks) { wheelTicks += 65536; } } state.WheelTicks = (state.WheelTicks || 0) + wheelTicks - oldWheelTicks; const oldWheelPeriod = (state.WheelPeriod || 0) % 256; if (accWheelPeriod !== oldWheelPeriod) { if (oldWheelPeriod > accWheelPeriod) { accWheelPeriod += 65536; } } state.WheelPeriod = (state.WheelPeriod || 0) + accWheelPeriod - oldWheelPeriod; const oldTorque = (state.Torque || 0) % 256; if (accTorque !== oldTorque) { if (oldTorque > accTorque) { accTorque += 65536; } } state.Torque = (state.Torque || 0) + accTorque - oldTorque; switch ((flagStateBF & 0x70) >> 4) { case 1: state.State = 'OFF'; break; case 2: state.State = 'READY'; resetState(state); break; case 3: state.State = 'IN_USE'; break; case 4: state.State = 'FINISHED'; break; default: delete state.State; break; } if (flagStateBF & 0x80) { // lap } break; } case 0x50: { state.HwVersion = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 3); state.ManId = data.readUInt16LE(Messages.BUFFER_INDEX_MSG_DATA + 4); state.ModelNum = data.readUInt16LE(Messages.BUFFER_INDEX_MSG_DATA + 6); break; } case 0x51: { const swRevSup = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 2); const swRevMain = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 3); const serial = data.readInt32LE(Messages.BUFFER_INDEX_MSG_DATA + 4); state.SwVersion = swRevMain; if (swRevSup !== 0xFF) { state.SwVersion += swRevSup / 1000; } if (serial !== 0xFFFFFFFF) { state.SerialNumber = serial; } break; } case 0x56: { const idx = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 1); const tot = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 2); const chState = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 3); const devId = data.readUInt16LE(Messages.BUFFER_INDEX_MSG_DATA + 4); const trType = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 6); const devType = data.readUInt8(Messages.BUFFER_INDEX_MSG_DATA + 7); if (idx === 0) { state.PairedDevices = []; } if (tot > 0) { state.PairedDevices.push({ id: devId, type: devType, paired: (chState & 0x80) ? true : false }); } break; } default: return; } sensor.emit('fitnessData', state); }
the_stack
import { DEFAULT_KEY_LENGTH, EMPTY_HASH, EMPTY_VALUE, NodeSide } from './constants'; import { Leaf } from './leaf'; import { Database, Proof, Query } from './types'; import { parseBranchData, parseLeafData, isLeaf, binaryExpansion, sortByBitmapAndKey, binaryStringToBuffer, bufferToBinaryString, treeSort, } from './utils'; import { binarySearch } from '../utils'; import { Branch } from './branch'; import { Empty } from './empty'; type TreeNode = Branch | Leaf | Empty; type SingleProof = { key: Buffer; value: Buffer; binaryBitmap: string; ancestorHashes: Buffer[]; siblingHashes: Buffer[]; }; type QueryWithHeight = { key: Buffer; value: Buffer; binaryBitmap: string; siblingHashes: Buffer[]; height: number; }; export class SparseMerkleTree { private readonly _db: Database; private readonly _keyLength: number; private _rootHash: Buffer; public constructor(options: { db: Database; rootHash?: Buffer; keyLength?: number }) { this._db = options.db; this._keyLength = options.keyLength ?? DEFAULT_KEY_LENGTH; // Make sure to always set rootHash explicitly whenever updating the tree this._rootHash = options.rootHash ?? EMPTY_HASH; } public get rootHash(): Buffer { return this._rootHash; } public get keyLength(): number { return this._keyLength; } public async getNode(nodeHash: Buffer): Promise<TreeNode> { if (nodeHash.equals(EMPTY_HASH)) { return new Empty(); } const data = await this._db.get(nodeHash); if (!data) { throw new Error( `Node with input hash: ${nodeHash.toString('hex')} does not exist in the tree`, ); } if (isLeaf(data)) { const { key, value } = parseLeafData(data, this.keyLength); return new Leaf(key, value); } const { leftHash, rightHash } = parseBranchData(data); return new Branch(leftHash, rightHash); } // As specified in from https://github.com/LiskHQ/lips/blob/master/proposals/lip-0039.md public async update(key: Buffer, value: Buffer): Promise<TreeNode> { if (value.length === 0) { throw new Error('Value cannot be empty'); } if (key.byteLength !== this.keyLength) { throw new Error(`Key is not equal to defined key length of ${this.keyLength}`); } let rootNode = await this.getNode(this._rootHash); let currentNode = rootNode; const newLeaf = new Leaf(key, value); await this._db.set(newLeaf.hash, newLeaf.data); const binaryKey = binaryExpansion(key, this.keyLength); // if the currentNode is EMPTY node then assign it to leafNode and return if (currentNode instanceof Empty) { rootNode = newLeaf; this._rootHash = rootNode.hash; return rootNode; } let h = 0; const ancestorNodes: TreeNode[] = []; while (currentNode instanceof Branch) { const d = binaryKey.charAt(h); // Append currentNode to ancestorNodes ancestorNodes.push(currentNode); if (d === '0') { currentNode = await this.getNode(currentNode.leftHash); } else if (d === '1') { currentNode = await this.getNode(currentNode.rightHash); } h += 1; } // The currentNode is an empty node, newLeaf will replace the default empty node or currentNode will be updated to newLeaf let bottomNode: TreeNode = new Empty(); if (currentNode instanceof Empty) { // delete the empty node and update the tree, the new leaf will substitute the empty node bottomNode = newLeaf; } else if (currentNode.key === key) { bottomNode = newLeaf; } else { // We need to create new branches in the tree to fulfill the // Condition of one leaf per empty subtree // Note: h is set to the last value from the previous loop const currentNodeBinaryKey = binaryExpansion(currentNode.key, this.keyLength); while (binaryKey.charAt(h) === currentNodeBinaryKey.charAt(h)) { // Create branch node with empty value const newBranch = new Branch(EMPTY_HASH, EMPTY_HASH); // Append defaultBranch to ancestorNodes ancestorNodes.push(newBranch); h += 1; } // Create last branch node, parent of node and newLeaf const d = binaryKey.charAt(h); if (d === '0') { bottomNode = new Branch(newLeaf.hash, currentNode.hash); await this._db.set(bottomNode.hash, bottomNode.data); } else if (d === '1') { bottomNode = new Branch(currentNode.hash, newLeaf.hash); await this._db.set(bottomNode.hash, bottomNode.data); } } // Finally update all branch nodes in ancestorNodes // Starting from the last while (h > 0) { const p = ancestorNodes[h - 1]; const d = binaryKey.charAt(h - 1); if (d === '0') { (p as Branch).update(bottomNode.hash, NodeSide.LEFT); } else if (d === '1') { (p as Branch).update(bottomNode.hash, NodeSide.RIGHT); } await this._db.set(p.hash, (p as Branch).data); bottomNode = p; h -= 1; } this._rootHash = bottomNode.hash; return bottomNode; } public async remove(key: Buffer): Promise<TreeNode> { if (key.length !== this.keyLength) { throw new Error(`Key is not equal to defined key length of ${this.keyLength}`); } const ancestorNodes: TreeNode[] = []; const binaryKey = binaryExpansion(key, this.keyLength); let currentNode = await this.getNode(this._rootHash); let h = 0; let currentNodeSibling: TreeNode = new Empty(); // Collect all ancestor nodes through traversing the binary expansion by height // End of the loop ancestorNodes has all the branch nodes // currentNode will be the leaf/node we are looking to remove while (currentNode instanceof Branch) { ancestorNodes.push(currentNode); const d = binaryKey[h]; if (d === '0') { currentNodeSibling = await this.getNode(currentNode.rightHash); currentNode = await this.getNode(currentNode.leftHash); } else if (d === '1') { currentNodeSibling = await this.getNode(currentNode.leftHash); currentNode = await this.getNode(currentNode.rightHash); } h += 1; } // When currentNode is empty, nothing to remove if (currentNode instanceof Empty) { return currentNode; } // When the input key does not match node key, nothing to remove if (!currentNode.key.equals(key)) { return currentNode; } let bottomNode: TreeNode = new Empty(); // currentNode has a branch sibling, delete currentNode if (currentNodeSibling instanceof Branch) { await this._db.del(currentNode.hash); } else if (currentNodeSibling instanceof Leaf) { // currentNode has a leaf sibling, // remove the leaf and move sibling up the tree await this._db.del(currentNode.hash); bottomNode = currentNodeSibling; h -= 1; // In order to move sibling up the tree // an exact emptyHash check is required // not using EMPTY_HASH here to make sure we use correct hash from Empty class const emptyHash = new Empty().hash; while (h > 0) { const p = ancestorNodes[h - 1] as Branch; // if one of the children is empty then break the condition if ( p instanceof Branch && !p.leftHash.equals(emptyHash) && !p.rightHash.equals(emptyHash) ) { break; } await this._db.del(p.hash); h -= 1; } } // finally update all branch nodes in ancestorNodes. // note that h now is set to the correct height from which // nodes have to be updated while (h > 0) { const p = ancestorNodes[h - 1]; const d = binaryKey.charAt(h - 1); if (d === '0') { (p as Branch).update(bottomNode.hash, NodeSide.LEFT); } else if (d === '1') { (p as Branch).update(bottomNode.hash, NodeSide.RIGHT); } await this._db.set(p.hash, (p as Branch).data); bottomNode = p; h -= 1; } this._rootHash = bottomNode.hash; return bottomNode; } public async generateSingleProof(queryKey: Buffer): Promise<SingleProof> { const rootNode = await this.getNode(this._rootHash); let currentNode = rootNode; if (currentNode instanceof Empty) { return { key: queryKey, value: EMPTY_VALUE, binaryBitmap: bufferToBinaryString(EMPTY_VALUE), siblingHashes: [], ancestorHashes: [], }; } let h = 0; const siblingHashes = []; const ancestorHashes = []; let binaryBitmap = ''; const binaryKey = binaryExpansion(queryKey, this.keyLength); while (currentNode instanceof Branch) { ancestorHashes.push(currentNode.hash); const d = binaryKey.charAt(h); let currentNodeSibling: TreeNode = new Empty(); if (d === '0') { currentNodeSibling = await this.getNode(currentNode.rightHash); currentNode = await this.getNode(currentNode.leftHash); } else if (d === '1') { currentNodeSibling = await this.getNode(currentNode.leftHash); currentNode = await this.getNode(currentNode.rightHash); } if (currentNodeSibling instanceof Empty) { binaryBitmap = `0${binaryBitmap}`; } else { binaryBitmap = `1${binaryBitmap}`; siblingHashes.push(currentNodeSibling.hash); } h += 1; } if (currentNode instanceof Empty) { // exclusion proof return { siblingHashes, ancestorHashes, binaryBitmap, key: queryKey, value: EMPTY_VALUE, }; } if (!currentNode.key.equals(queryKey)) { // exclusion proof ancestorHashes.push(currentNode.hash); // in case the leaf is sibling to another node return { siblingHashes, ancestorHashes, binaryBitmap, key: currentNode.key, value: currentNode.value, }; } // inclusion proof ancestorHashes.push(currentNode.hash); // in case the leaf is sibling to another node return { siblingHashes, ancestorHashes, binaryBitmap, key: currentNode.key, value: currentNode.value, }; } public async generateMultiProof(queryKeys: Buffer[]): Promise<Proof> { const partialQueries: SingleProof[] = []; for (const queryKey of queryKeys) { const query = await this.generateSingleProof(queryKey); partialQueries.push(query); } const queries: Query[] = [...partialQueries].map(sp => ({ bitmap: binaryStringToBuffer(sp.binaryBitmap), key: sp.key, value: sp.value, })); const siblingHashes: Buffer[] = []; const ancestorHashes = [...partialQueries].map(sp => sp.ancestorHashes).flat(); let sortedQueries: QueryWithHeight[] = [...partialQueries].map(sp => ({ binaryBitmap: sp.binaryBitmap, key: sp.key, value: sp.value, siblingHashes: sp.siblingHashes, height: sp.binaryBitmap.length, })); sortedQueries = sortByBitmapAndKey(sortedQueries); while (sortedQueries.length > 0) { const sp = sortedQueries.shift()!; if (sp.height === 0) { continue; } const b = sp.binaryBitmap.charAt(sp.binaryBitmap.length - sp.height); if (b === '1') { const nodeHash = sp.siblingHashes.pop()!; let isPresentInSiblingHashes = false; let isPresentInAncestorHashes = false; for (const i of siblingHashes) { if (i.equals(nodeHash)) { isPresentInSiblingHashes = true; break; } } for (const i of ancestorHashes) { if (i.equals(nodeHash)) { isPresentInAncestorHashes = true; break; } } if (!isPresentInSiblingHashes && !isPresentInAncestorHashes) { // TODO : optimize this siblingHashes.push(nodeHash); } } sp.height -= 1; if (sortedQueries.length > 0) { const sortedQueriesWithBinaryKey = sortedQueries.map(query => ({ binaryKey: binaryExpansion(query.key, this.keyLength), binaryBitmap: query.binaryBitmap, value: query.value, siblingHashes: query.siblingHashes, height: query.height, })); const spWithBinaryKey = { binaryKey: binaryExpansion(sp.key, this.keyLength), binaryBitmap: sp.binaryBitmap, value: sp.value, siblingHashes: sp.siblingHashes, height: sp.height, }; const insertIndex = binarySearch( sortedQueriesWithBinaryKey, callback => treeSort(spWithBinaryKey, callback) < 0, ); if (insertIndex === sortedQueries.length) { sortedQueries.push(sp); } else { const keyPrefix = binaryExpansion(sp.key, this.keyLength).substring(0, sp.height); const query = sortedQueries[insertIndex]; if (!binaryExpansion(query.key, this.keyLength).endsWith(keyPrefix, query.height)) { sortedQueries.splice(insertIndex, 0, sp); } } } else { sortedQueries.push(sp); } } return { siblingHashes, queries }; } }
the_stack
export interface CreateFileSystemResponse { /** * 文件系统 */ FileSystem?: FileSystem; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * 权限规则 */ export interface AccessRule { /** * 权限规则ID */ AccessRuleId?: number; /** * 权限规则地址(网段或IP) */ Address?: string; /** * 权限规则访问模式(1:只读;2:读写) */ AccessMode?: number; /** * 优先级(取值范围1~100,值越小优先级越高) */ Priority?: number; /** * 创建时间 */ CreateTime?: string; } /** * DescribeFileSystem返回参数结构体 */ export interface DescribeFileSystemResponse { /** * 文件系统 */ FileSystem?: FileSystem; /** * 文件系统已使用容量(已弃用) 注意:此字段可能返回 null,表示取不到有效值。 */ FileSystemCapacityUsed?: number; /** * 已使用容量(byte),包括标准和归档存储 注意:此字段可能返回 null,表示取不到有效值。 */ CapacityUsed?: number; /** * 已使用归档存储容量(byte) 注意:此字段可能返回 null,表示取不到有效值。 */ ArchiveCapacityUsed?: number; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * 权限组 */ export interface AccessGroup { /** * 权限组ID */ AccessGroupId: string; /** * 权限组名称 */ AccessGroupName: string; /** * 权限组描述 */ Description: string; /** * 创建时间 */ CreateTime: string; } /** * ModifyFileSystem返回参数结构体 */ export interface ModifyFileSystemResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * ModifyFileSystem请求参数结构体 */ export interface ModifyFileSystemRequest { /** * 文件系统ID */ FileSystemId: string; /** * 文件系统名称 */ FileSystemName?: string; /** * 文件系统描述 */ Description?: string; /** * 文件系统容量(byte),下限为1G,上限为1P,且必须是1G的整数倍 注意:修改的文件系统容量不能小于当前使用量 */ CapacityQuota?: number; } /** * DescribeLifeCycleRules请求参数结构体 */ export interface DescribeLifeCycleRulesRequest { /** * 文件系统ID */ FileSystemId: string; } /** * ModifyAccessGroup请求参数结构体 */ export interface ModifyAccessGroupRequest { /** * 权限组ID */ AccessGroupId: string; /** * 权限组名称 */ AccessGroupName?: string; /** * 权限组描述 */ Description?: string; } /** * DescribeFileSystems返回参数结构体 */ export interface DescribeFileSystemsResponse { /** * 文件系统列表 */ FileSystems?: Array<FileSystem>; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * DescribeFileSystem请求参数结构体 */ export interface DescribeFileSystemRequest { /** * 文件系统ID */ FileSystemId: string; } /** * CreateMountPoint请求参数结构体 */ export interface CreateMountPointRequest { /** * 挂载点名称 */ MountPointName: string; /** * 文件系统ID */ FileSystemId: string; /** * 权限组ID */ AccessGroupId: string; /** * VPC网络ID */ VpcId: string; /** * 挂载点状态(1:打开;2:关闭) */ MountPointStatus: number; /** * VPC网络类型(1:CVM;2:黑石1.0;3:黑石2.0) */ VpcType: number; } /** * DeleteAccessGroup请求参数结构体 */ export interface DeleteAccessGroupRequest { /** * 权限组ID */ AccessGroupId: string; } /** * ModifyLifeCycleRules请求参数结构体 */ export interface ModifyLifeCycleRulesRequest { /** * 多个生命周期规则,上限为10 */ LifeCycleRules: Array<LifeCycleRule>; } /** * CreateRestoreTasks返回参数结构体 */ export interface CreateRestoreTasksResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * DeleteFileSystem请求参数结构体 */ export interface DeleteFileSystemRequest { /** * 文件系统ID */ FileSystemId: string; } /** * DescribeMountPoints请求参数结构体 */ export interface DescribeMountPointsRequest { /** * 文件系统ID 注意:若根据AccessGroupId查看挂载点列表,则无需设置FileSystemId */ FileSystemId?: string; /** * 权限组ID 注意:若根据FileSystemId查看挂载点列表,则无需设置AccessGroupId */ AccessGroupId?: string; /** * 偏移量,默认为0 */ Offset?: number; /** * 返回数量,默认为所有 */ Limit?: number; } /** * DescribeMountPoint请求参数结构体 */ export interface DescribeMountPointRequest { /** * 挂载点ID */ MountPointId: string; } /** * DescribeAccessGroups请求参数结构体 */ export interface DescribeAccessGroupsRequest { /** * 过滤条件,Name可选“AccessGroupId“和“AccessGroupName”,Values上限为10 */ Filters?: Array<Filter>; /** * 偏移量,默认为0 */ Offset?: number; /** * 返回数量,默认为所有 */ Limit?: number; } /** * DeleteAccessGroup返回参数结构体 */ export interface DeleteAccessGroupResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * ModifyLifeCycleRules返回参数结构体 */ export interface ModifyLifeCycleRulesResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * DeleteMountPoint返回参数结构体 */ export interface DeleteMountPointResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * CreateMountPoint返回参数结构体 */ export interface CreateMountPointResponse { /** * 挂载点 */ MountPoint?: MountPoint; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * CreateFileSystem请求参数结构体 */ export interface CreateFileSystemRequest { /** * 文件系统名称 */ FileSystemName: string; /** * 文件系统容量(byte),下限为1G,上限为1P,且必须是1G的整数倍 */ CapacityQuota: number; /** * 文件系统描述 */ Description?: string; } /** * ModifyResourceTags请求参数结构体 */ export interface ModifyResourceTagsRequest { /** * 文件系统ID */ FileSystemId: string; /** * 多个资源标签,可以为空数组 */ Tags?: Array<Tag>; } /** * DescribeResourceTags返回参数结构体 */ export interface DescribeResourceTagsResponse { /** * 资源标签列表 */ Tags?: Array<Tag>; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * DescribeAccessGroups返回参数结构体 */ export interface DescribeAccessGroupsResponse { /** * 权限组列表 */ AccessGroups?: Array<AccessGroup>; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * DeleteMountPoint请求参数结构体 */ export interface DeleteMountPointRequest { /** * 挂载点ID */ MountPointId: string; } /** * DeleteFileSystem返回参数结构体 */ export interface DeleteFileSystemResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * CreateLifeCycleRules请求参数结构体 */ export interface CreateLifeCycleRulesRequest { /** * 文件系统ID */ FileSystemId: string; /** * 多个生命周期规则,上限为10 */ LifeCycleRules: Array<LifeCycleRule>; } /** * 过滤条件 */ export interface Filter { /** * 过滤字段 */ Name: string; /** * 过滤值 */ Values: Array<string>; } /** * CreateAccessGroup返回参数结构体 */ export interface CreateAccessGroupResponse { /** * 权限组 */ AccessGroup?: AccessGroup; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * DeleteLifeCycleRules请求参数结构体 */ export interface DeleteLifeCycleRulesRequest { /** * 多个生命周期规则ID,上限为10 */ LifeCycleRuleIds: Array<number>; } /** * 文件系统 */ export interface FileSystem { /** * appid */ AppId: number; /** * 文件系统名称 */ FileSystemName: string; /** * 文件系统描述 */ Description: string; /** * 地域 */ Region: string; /** * 文件系统ID */ FileSystemId: string; /** * 创建时间 */ CreateTime: string; /** * 文件系统块大小(byte) */ BlockSize: number; /** * 文件系统容量(byte) */ CapacityQuota: number; /** * 文件系统状态(1:创建中;2:创建成功;3:创建失败) */ Status: number; } /** * 回热任务 */ export interface RestoreTask { /** * 回热任务ID */ RestoreTaskId?: number; /** * 回热任务文件路径 */ FilePath?: string; /** * 回热任务类型(1:标准;2:极速;3:批量) */ Type?: number; /** * 指定恢复出的临时副本的有效时长(单位天) */ Days?: number; /** * 回热任务状态(1:绑定文件中;2:绑定文件完成;3:文件回热中;4:文件回热完成) */ Status?: number; /** * 创建时间 */ CreateTime?: string; } /** * ModifyMountPoint请求参数结构体 */ export interface ModifyMountPointRequest { /** * 挂载点ID */ MountPointId: string; /** * 挂载点名称 */ MountPointName?: string; /** * 挂载点状态 */ MountPointStatus?: number; /** * 权限组ID */ AccessGroupId?: string; } /** * DeleteLifeCycleRules返回参数结构体 */ export interface DeleteLifeCycleRulesResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * CreateRestoreTasks请求参数结构体 */ export interface CreateRestoreTasksRequest { /** * 文件系统ID */ FileSystemId: string; /** * 多个回热任务,上限为10 */ RestoreTasks: Array<RestoreTask>; } /** * 生命周期规则转换属性 */ export interface Transition { /** * 触发时间(单位天) */ Days: number; /** * 转换类型(1:归档;2:删除) */ Type: number; } /** * DescribeRestoreTasks返回参数结构体 */ export interface DescribeRestoreTasksResponse { /** * 回热任务列表 */ RestoreTasks?: Array<RestoreTask>; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * DescribeLifeCycleRules返回参数结构体 */ export interface DescribeLifeCycleRulesResponse { /** * 生命周期规则列表 */ LifeCycleRules?: Array<LifeCycleRule>; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * CreateAccessRules返回参数结构体 */ export interface CreateAccessRulesResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * DeleteAccessRules请求参数结构体 */ export interface DeleteAccessRulesRequest { /** * 多个权限规则ID,上限为10 */ AccessRuleIds: Array<number>; } /** * DeleteAccessRules返回参数结构体 */ export interface DeleteAccessRulesResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * DescribeFileSystems请求参数结构体 */ export interface DescribeFileSystemsRequest { /** * 偏移量,默认为0 */ Offset?: number; /** * 返回数量,默认为所有 */ Limit?: number; } /** * DescribeResourceTags请求参数结构体 */ export interface DescribeResourceTagsRequest { /** * 文件系统ID */ FileSystemId: string; } /** * ModifyResourceTags返回参数结构体 */ export interface ModifyResourceTagsResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * 生命周期规则 */ export interface LifeCycleRule { /** * 生命周期规则ID */ LifeCycleRuleId?: number; /** * 生命周期规则名称 */ LifeCycleRuleName?: string; /** * 生命周期规则路径(目录或文件) */ Path?: string; /** * 生命周期规则转换列表 */ Transitions?: Array<Transition>; /** * 生命周期规则状态(1:打开;2:关闭) */ Status?: number; /** * 创建时间 */ CreateTime?: string; } /** * CreateAccessRules请求参数结构体 */ export interface CreateAccessRulesRequest { /** * 多个权限规则,上限为10 */ AccessRules: Array<AccessRule>; /** * 权限组ID */ AccessGroupId: string; } /** * DescribeAccessRules返回参数结构体 */ export interface DescribeAccessRulesResponse { /** * 权限规则列表 */ AccessRules?: Array<AccessRule>; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * 挂载点 */ export interface MountPoint { /** * 挂载点ID */ MountPointId: string; /** * 挂载点名称 */ MountPointName?: string; /** * 文件系统ID */ FileSystemId: string; /** * 权限组ID */ AccessGroupId: string; /** * VPC网络ID */ VpcId: string; /** * 挂载点状态(1:打开;2:关闭) */ Status: number; /** * 创建时间 */ CreateTime: string; /** * VPC网络类型 */ VpcType: number; } /** * DescribeMountPoints返回参数结构体 */ export interface DescribeMountPointsResponse { /** * 挂载点列表 */ MountPoints?: Array<MountPoint>; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * DescribeRestoreTasks请求参数结构体 */ export interface DescribeRestoreTasksRequest { /** * 文件系统ID */ FileSystemId: string; } /** * DescribeAccessRules请求参数结构体 */ export interface DescribeAccessRulesRequest { /** * 权限组ID */ AccessGroupId: string; /** * 偏移量,默认为0 */ Offset?: number; /** * 返回数量,默认为所有 */ Limit?: number; } /** * ModifyAccessRules返回参数结构体 */ export interface ModifyAccessRulesResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * CreateLifeCycleRules返回参数结构体 */ export interface CreateLifeCycleRulesResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * ModifyAccessGroup返回参数结构体 */ export interface ModifyAccessGroupResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * 资源标签。 */ export interface Tag { /** * 标签键 */ Key: string; /** * 标签值 */ Value: string; } /** * ModifyMountPoint返回参数结构体 */ export interface ModifyMountPointResponse { /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; } /** * ModifyAccessRules请求参数结构体 */ export interface ModifyAccessRulesRequest { /** * 多个权限规则,上限为10 */ AccessRules: Array<AccessRule>; } /** * CreateAccessGroup请求参数结构体 */ export interface CreateAccessGroupRequest { /** * 权限组名称 */ AccessGroupName: string; /** * 权限组描述 */ Description?: string; } /** * DescribeMountPoint返回参数结构体 */ export interface DescribeMountPointResponse { /** * 挂载点 */ MountPoint?: MountPoint; /** * 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 */ RequestId?: string; }
the_stack
import { PagedAsyncIterableIterator } from "@azure/core-paging"; import { StorageAccounts } from "../operationsInterfaces"; import * as coreClient from "@azure/core-client"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; import { DataLakeAnalyticsAccountManagementClient } from "../dataLakeAnalyticsAccountManagementClient"; import { StorageAccountInformation, StorageAccountsListByAccountNextOptionalParams, StorageAccountsListByAccountOptionalParams, StorageContainer, StorageAccountsListStorageContainersNextOptionalParams, StorageAccountsListStorageContainersOptionalParams, SasTokenInformation, StorageAccountsListSasTokensNextOptionalParams, StorageAccountsListSasTokensOptionalParams, StorageAccountsListByAccountResponse, AddStorageAccountParameters, StorageAccountsAddOptionalParams, StorageAccountsGetOptionalParams, StorageAccountsGetResponse, StorageAccountsUpdateOptionalParams, StorageAccountsDeleteOptionalParams, StorageAccountsListStorageContainersResponse, StorageAccountsGetStorageContainerOptionalParams, StorageAccountsGetStorageContainerResponse, StorageAccountsListSasTokensResponse, StorageAccountsListByAccountNextResponse, StorageAccountsListStorageContainersNextResponse, StorageAccountsListSasTokensNextResponse } from "../models"; /// <reference lib="esnext.asynciterable" /> /** Class containing StorageAccounts operations. */ export class StorageAccountsImpl implements StorageAccounts { private readonly client: DataLakeAnalyticsAccountManagementClient; /** * Initialize a new instance of the class StorageAccounts class. * @param client Reference to the service client */ constructor(client: DataLakeAnalyticsAccountManagementClient) { this.client = client; } /** * Gets the first page of Azure Storage accounts, if any, linked to the specified Data Lake Analytics * account. The response includes a link to the next page, if any. * @param resourceGroupName The name of the Azure resource group. * @param accountName The name of the Data Lake Analytics account. * @param options The options parameters. */ public listByAccount( resourceGroupName: string, accountName: string, options?: StorageAccountsListByAccountOptionalParams ): PagedAsyncIterableIterator<StorageAccountInformation> { const iter = this.listByAccountPagingAll( resourceGroupName, accountName, options ); return { next() { return iter.next(); }, [Symbol.asyncIterator]() { return this; }, byPage: () => { return this.listByAccountPagingPage( resourceGroupName, accountName, options ); } }; } private async *listByAccountPagingPage( resourceGroupName: string, accountName: string, options?: StorageAccountsListByAccountOptionalParams ): AsyncIterableIterator<StorageAccountInformation[]> { let result = await this._listByAccount( resourceGroupName, accountName, options ); yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { result = await this._listByAccountNext( resourceGroupName, accountName, continuationToken, options ); continuationToken = result.nextLink; yield result.value || []; } } private async *listByAccountPagingAll( resourceGroupName: string, accountName: string, options?: StorageAccountsListByAccountOptionalParams ): AsyncIterableIterator<StorageAccountInformation> { for await (const page of this.listByAccountPagingPage( resourceGroupName, accountName, options )) { yield* page; } } /** * Lists the Azure Storage containers, if any, associated with the specified Data Lake Analytics and * Azure Storage account combination. The response includes a link to the next page of results, if any. * @param resourceGroupName The name of the Azure resource group. * @param accountName The name of the Data Lake Analytics account. * @param storageAccountName The name of the Azure storage account from which to list blob containers. * @param options The options parameters. */ public listStorageContainers( resourceGroupName: string, accountName: string, storageAccountName: string, options?: StorageAccountsListStorageContainersOptionalParams ): PagedAsyncIterableIterator<StorageContainer> { const iter = this.listStorageContainersPagingAll( resourceGroupName, accountName, storageAccountName, options ); return { next() { return iter.next(); }, [Symbol.asyncIterator]() { return this; }, byPage: () => { return this.listStorageContainersPagingPage( resourceGroupName, accountName, storageAccountName, options ); } }; } private async *listStorageContainersPagingPage( resourceGroupName: string, accountName: string, storageAccountName: string, options?: StorageAccountsListStorageContainersOptionalParams ): AsyncIterableIterator<StorageContainer[]> { let result = await this._listStorageContainers( resourceGroupName, accountName, storageAccountName, options ); yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { result = await this._listStorageContainersNext( resourceGroupName, accountName, storageAccountName, continuationToken, options ); continuationToken = result.nextLink; yield result.value || []; } } private async *listStorageContainersPagingAll( resourceGroupName: string, accountName: string, storageAccountName: string, options?: StorageAccountsListStorageContainersOptionalParams ): AsyncIterableIterator<StorageContainer> { for await (const page of this.listStorageContainersPagingPage( resourceGroupName, accountName, storageAccountName, options )) { yield* page; } } /** * Gets the SAS token associated with the specified Data Lake Analytics and Azure Storage account and * container combination. * @param resourceGroupName The name of the Azure resource group. * @param accountName The name of the Data Lake Analytics account. * @param storageAccountName The name of the Azure storage account for which the SAS token is being * requested. * @param containerName The name of the Azure storage container for which the SAS token is being * requested. * @param options The options parameters. */ public listSasTokens( resourceGroupName: string, accountName: string, storageAccountName: string, containerName: string, options?: StorageAccountsListSasTokensOptionalParams ): PagedAsyncIterableIterator<SasTokenInformation> { const iter = this.listSasTokensPagingAll( resourceGroupName, accountName, storageAccountName, containerName, options ); return { next() { return iter.next(); }, [Symbol.asyncIterator]() { return this; }, byPage: () => { return this.listSasTokensPagingPage( resourceGroupName, accountName, storageAccountName, containerName, options ); } }; } private async *listSasTokensPagingPage( resourceGroupName: string, accountName: string, storageAccountName: string, containerName: string, options?: StorageAccountsListSasTokensOptionalParams ): AsyncIterableIterator<SasTokenInformation[]> { let result = await this._listSasTokens( resourceGroupName, accountName, storageAccountName, containerName, options ); yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { result = await this._listSasTokensNext( resourceGroupName, accountName, storageAccountName, containerName, continuationToken, options ); continuationToken = result.nextLink; yield result.value || []; } } private async *listSasTokensPagingAll( resourceGroupName: string, accountName: string, storageAccountName: string, containerName: string, options?: StorageAccountsListSasTokensOptionalParams ): AsyncIterableIterator<SasTokenInformation> { for await (const page of this.listSasTokensPagingPage( resourceGroupName, accountName, storageAccountName, containerName, options )) { yield* page; } } /** * Gets the first page of Azure Storage accounts, if any, linked to the specified Data Lake Analytics * account. The response includes a link to the next page, if any. * @param resourceGroupName The name of the Azure resource group. * @param accountName The name of the Data Lake Analytics account. * @param options The options parameters. */ private _listByAccount( resourceGroupName: string, accountName: string, options?: StorageAccountsListByAccountOptionalParams ): Promise<StorageAccountsListByAccountResponse> { return this.client.sendOperationRequest( { resourceGroupName, accountName, options }, listByAccountOperationSpec ); } /** * Updates the specified Data Lake Analytics account to add an Azure Storage account. * @param resourceGroupName The name of the Azure resource group. * @param accountName The name of the Data Lake Analytics account. * @param storageAccountName The name of the Azure Storage account to add * @param parameters The parameters containing the access key and optional suffix for the Azure Storage * Account. * @param options The options parameters. */ add( resourceGroupName: string, accountName: string, storageAccountName: string, parameters: AddStorageAccountParameters, options?: StorageAccountsAddOptionalParams ): Promise<void> { return this.client.sendOperationRequest( { resourceGroupName, accountName, storageAccountName, parameters, options }, addOperationSpec ); } /** * Gets the specified Azure Storage account linked to the given Data Lake Analytics account. * @param resourceGroupName The name of the Azure resource group. * @param accountName The name of the Data Lake Analytics account. * @param storageAccountName The name of the Azure Storage account for which to retrieve the details. * @param options The options parameters. */ get( resourceGroupName: string, accountName: string, storageAccountName: string, options?: StorageAccountsGetOptionalParams ): Promise<StorageAccountsGetResponse> { return this.client.sendOperationRequest( { resourceGroupName, accountName, storageAccountName, options }, getOperationSpec ); } /** * Updates the Data Lake Analytics account to replace Azure Storage blob account details, such as the * access key and/or suffix. * @param resourceGroupName The name of the Azure resource group. * @param accountName The name of the Data Lake Analytics account. * @param storageAccountName The Azure Storage account to modify * @param options The options parameters. */ update( resourceGroupName: string, accountName: string, storageAccountName: string, options?: StorageAccountsUpdateOptionalParams ): Promise<void> { return this.client.sendOperationRequest( { resourceGroupName, accountName, storageAccountName, options }, updateOperationSpec ); } /** * Updates the specified Data Lake Analytics account to remove an Azure Storage account. * @param resourceGroupName The name of the Azure resource group. * @param accountName The name of the Data Lake Analytics account. * @param storageAccountName The name of the Azure Storage account to remove * @param options The options parameters. */ delete( resourceGroupName: string, accountName: string, storageAccountName: string, options?: StorageAccountsDeleteOptionalParams ): Promise<void> { return this.client.sendOperationRequest( { resourceGroupName, accountName, storageAccountName, options }, deleteOperationSpec ); } /** * Lists the Azure Storage containers, if any, associated with the specified Data Lake Analytics and * Azure Storage account combination. The response includes a link to the next page of results, if any. * @param resourceGroupName The name of the Azure resource group. * @param accountName The name of the Data Lake Analytics account. * @param storageAccountName The name of the Azure storage account from which to list blob containers. * @param options The options parameters. */ private _listStorageContainers( resourceGroupName: string, accountName: string, storageAccountName: string, options?: StorageAccountsListStorageContainersOptionalParams ): Promise<StorageAccountsListStorageContainersResponse> { return this.client.sendOperationRequest( { resourceGroupName, accountName, storageAccountName, options }, listStorageContainersOperationSpec ); } /** * Gets the specified Azure Storage container associated with the given Data Lake Analytics and Azure * Storage accounts. * @param resourceGroupName The name of the Azure resource group. * @param accountName The name of the Data Lake Analytics account. * @param storageAccountName The name of the Azure storage account from which to retrieve the blob * container. * @param containerName The name of the Azure storage container to retrieve * @param options The options parameters. */ getStorageContainer( resourceGroupName: string, accountName: string, storageAccountName: string, containerName: string, options?: StorageAccountsGetStorageContainerOptionalParams ): Promise<StorageAccountsGetStorageContainerResponse> { return this.client.sendOperationRequest( { resourceGroupName, accountName, storageAccountName, containerName, options }, getStorageContainerOperationSpec ); } /** * Gets the SAS token associated with the specified Data Lake Analytics and Azure Storage account and * container combination. * @param resourceGroupName The name of the Azure resource group. * @param accountName The name of the Data Lake Analytics account. * @param storageAccountName The name of the Azure storage account for which the SAS token is being * requested. * @param containerName The name of the Azure storage container for which the SAS token is being * requested. * @param options The options parameters. */ private _listSasTokens( resourceGroupName: string, accountName: string, storageAccountName: string, containerName: string, options?: StorageAccountsListSasTokensOptionalParams ): Promise<StorageAccountsListSasTokensResponse> { return this.client.sendOperationRequest( { resourceGroupName, accountName, storageAccountName, containerName, options }, listSasTokensOperationSpec ); } /** * ListByAccountNext * @param resourceGroupName The name of the Azure resource group. * @param accountName The name of the Data Lake Analytics account. * @param nextLink The nextLink from the previous successful call to the ListByAccount method. * @param options The options parameters. */ private _listByAccountNext( resourceGroupName: string, accountName: string, nextLink: string, options?: StorageAccountsListByAccountNextOptionalParams ): Promise<StorageAccountsListByAccountNextResponse> { return this.client.sendOperationRequest( { resourceGroupName, accountName, nextLink, options }, listByAccountNextOperationSpec ); } /** * ListStorageContainersNext * @param resourceGroupName The name of the Azure resource group. * @param accountName The name of the Data Lake Analytics account. * @param storageAccountName The name of the Azure storage account from which to list blob containers. * @param nextLink The nextLink from the previous successful call to the ListStorageContainers method. * @param options The options parameters. */ private _listStorageContainersNext( resourceGroupName: string, accountName: string, storageAccountName: string, nextLink: string, options?: StorageAccountsListStorageContainersNextOptionalParams ): Promise<StorageAccountsListStorageContainersNextResponse> { return this.client.sendOperationRequest( { resourceGroupName, accountName, storageAccountName, nextLink, options }, listStorageContainersNextOperationSpec ); } /** * ListSasTokensNext * @param resourceGroupName The name of the Azure resource group. * @param accountName The name of the Data Lake Analytics account. * @param storageAccountName The name of the Azure storage account for which the SAS token is being * requested. * @param containerName The name of the Azure storage container for which the SAS token is being * requested. * @param nextLink The nextLink from the previous successful call to the ListSasTokens method. * @param options The options parameters. */ private _listSasTokensNext( resourceGroupName: string, accountName: string, storageAccountName: string, containerName: string, nextLink: string, options?: StorageAccountsListSasTokensNextOptionalParams ): Promise<StorageAccountsListSasTokensNextResponse> { return this.client.sendOperationRequest( { resourceGroupName, accountName, storageAccountName, containerName, nextLink, options }, listSasTokensNextOperationSpec ); } } // Operation Specifications const serializer = coreClient.createSerializer(Mappers, /* isXml */ false); const listByAccountOperationSpec: coreClient.OperationSpec = { path: "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/storageAccounts", httpMethod: "GET", responses: { 200: { bodyMapper: Mappers.StorageAccountInformationListResult }, default: { bodyMapper: Mappers.ErrorResponse } }, queryParameters: [ Parameters.filter, Parameters.top, Parameters.skip, Parameters.select, Parameters.orderby, Parameters.count, Parameters.apiVersion ], urlParameters: [ Parameters.$host, Parameters.subscriptionId, Parameters.resourceGroupName, Parameters.accountName ], headerParameters: [Parameters.accept], serializer }; const addOperationSpec: coreClient.OperationSpec = { path: "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/storageAccounts/{storageAccountName}", httpMethod: "PUT", responses: { 200: {}, default: { bodyMapper: Mappers.ErrorResponse } }, requestBody: Parameters.parameters4, queryParameters: [Parameters.apiVersion], urlParameters: [ Parameters.$host, Parameters.subscriptionId, Parameters.resourceGroupName, Parameters.accountName, Parameters.storageAccountName ], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", serializer }; const getOperationSpec: coreClient.OperationSpec = { path: "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/storageAccounts/{storageAccountName}", httpMethod: "GET", responses: { 200: { bodyMapper: Mappers.StorageAccountInformation }, default: { bodyMapper: Mappers.ErrorResponse } }, queryParameters: [Parameters.apiVersion], urlParameters: [ Parameters.$host, Parameters.subscriptionId, Parameters.resourceGroupName, Parameters.accountName, Parameters.storageAccountName ], headerParameters: [Parameters.accept], serializer }; const updateOperationSpec: coreClient.OperationSpec = { path: "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/storageAccounts/{storageAccountName}", httpMethod: "PATCH", responses: { 200: {}, default: { bodyMapper: Mappers.ErrorResponse } }, requestBody: Parameters.parameters5, queryParameters: [Parameters.apiVersion], urlParameters: [ Parameters.$host, Parameters.subscriptionId, Parameters.resourceGroupName, Parameters.accountName, Parameters.storageAccountName ], headerParameters: [Parameters.accept, Parameters.contentType], mediaType: "json", serializer }; const deleteOperationSpec: coreClient.OperationSpec = { path: "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/storageAccounts/{storageAccountName}", httpMethod: "DELETE", responses: { 200: {}, default: { bodyMapper: Mappers.ErrorResponse } }, queryParameters: [Parameters.apiVersion], urlParameters: [ Parameters.$host, Parameters.subscriptionId, Parameters.resourceGroupName, Parameters.accountName, Parameters.storageAccountName ], headerParameters: [Parameters.accept], serializer }; const listStorageContainersOperationSpec: coreClient.OperationSpec = { path: "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/storageAccounts/{storageAccountName}/containers", httpMethod: "GET", responses: { 200: { bodyMapper: Mappers.StorageContainerListResult }, default: { bodyMapper: Mappers.ErrorResponse } }, queryParameters: [Parameters.apiVersion], urlParameters: [ Parameters.$host, Parameters.subscriptionId, Parameters.resourceGroupName, Parameters.accountName, Parameters.storageAccountName ], headerParameters: [Parameters.accept], serializer }; const getStorageContainerOperationSpec: coreClient.OperationSpec = { path: "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/storageAccounts/{storageAccountName}/containers/{containerName}", httpMethod: "GET", responses: { 200: { bodyMapper: Mappers.StorageContainer }, default: { bodyMapper: Mappers.ErrorResponse } }, queryParameters: [Parameters.apiVersion], urlParameters: [ Parameters.$host, Parameters.subscriptionId, Parameters.resourceGroupName, Parameters.accountName, Parameters.storageAccountName, Parameters.containerName ], headerParameters: [Parameters.accept], serializer }; const listSasTokensOperationSpec: coreClient.OperationSpec = { path: "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataLakeAnalytics/accounts/{accountName}/storageAccounts/{storageAccountName}/containers/{containerName}/listSasTokens", httpMethod: "POST", responses: { 200: { bodyMapper: Mappers.SasTokenInformationListResult }, default: { bodyMapper: Mappers.ErrorResponse } }, queryParameters: [Parameters.apiVersion], urlParameters: [ Parameters.$host, Parameters.subscriptionId, Parameters.resourceGroupName, Parameters.accountName, Parameters.storageAccountName, Parameters.containerName ], headerParameters: [Parameters.accept], serializer }; const listByAccountNextOperationSpec: coreClient.OperationSpec = { path: "{nextLink}", httpMethod: "GET", responses: { 200: { bodyMapper: Mappers.StorageAccountInformationListResult }, default: { bodyMapper: Mappers.ErrorResponse } }, queryParameters: [ Parameters.filter, Parameters.top, Parameters.skip, Parameters.select, Parameters.orderby, Parameters.count, Parameters.apiVersion ], urlParameters: [ Parameters.$host, Parameters.subscriptionId, Parameters.resourceGroupName, Parameters.accountName, Parameters.nextLink ], headerParameters: [Parameters.accept], serializer }; const listStorageContainersNextOperationSpec: coreClient.OperationSpec = { path: "{nextLink}", httpMethod: "GET", responses: { 200: { bodyMapper: Mappers.StorageContainerListResult }, default: { bodyMapper: Mappers.ErrorResponse } }, queryParameters: [Parameters.apiVersion], urlParameters: [ Parameters.$host, Parameters.subscriptionId, Parameters.resourceGroupName, Parameters.accountName, Parameters.nextLink, Parameters.storageAccountName ], headerParameters: [Parameters.accept], serializer }; const listSasTokensNextOperationSpec: coreClient.OperationSpec = { path: "{nextLink}", httpMethod: "GET", responses: { 200: { bodyMapper: Mappers.SasTokenInformationListResult }, default: { bodyMapper: Mappers.ErrorResponse } }, queryParameters: [Parameters.apiVersion], urlParameters: [ Parameters.$host, Parameters.subscriptionId, Parameters.resourceGroupName, Parameters.accountName, Parameters.nextLink, Parameters.storageAccountName, Parameters.containerName ], headerParameters: [Parameters.accept], serializer };
the_stack
import { fireEvent, render } from "@testing-library/svelte"; import * as connections from "../../commons/src/"; import { CalendarStore } from "../../commons/src/store/calendars"; import { EventStore } from "../../commons/src/store/events"; import { ManifestStore } from "../../commons/src/store/manifest"; import Agenda from "../../components/agenda/src/Agenda.svelte"; import { mockAgendaCalendar } from "../mocks/MockCalendars"; import { mockEvents } from "../mocks/MockEvents"; import { mockAgendaManifest } from "../mocks/MockManifests"; jest.mock("../../commons/src/connections/manifest", () => ({ ...(jest.requireActual("../../commons/src/connections/manifest") as any), fetchManifest: jest.fn(), })); import { fetchManifest } from "../../commons/src/connections/manifest"; describe("Agenda Props", () => { beforeEach(() => { jest .spyOn(connections, "fetchManifest") .mockImplementation( () => new Promise((resolve) => resolve(mockAgendaManifest)), ); jest .spyOn(EventStore, "getEvents") .mockImplementation( () => new Promise((resolve) => resolve(mockEvents as any[])), ); jest .spyOn(CalendarStore, "getCalendars") .mockImplementation( () => new Promise((resolve) => resolve(mockAgendaCalendar)), ); }); afterEach(() => { CalendarStore.reset(); EventStore.reset(); }); afterAll(() => jest.restoreAllMocks()); it("should load an Agenda component with events passed as props", (done) => { const { container: agenda } = render(Agenda, { events: mockEvents, }); setTimeout(() => { expect(agenda.querySelectorAll(".event").length).toBe(mockEvents.length); done(); }); }); it("should call a custom function passed in as the click_action", async (done) => { const clickHandler = jest.fn((event, calendarEvent) => { expect(event).toBeTruthy(); expect(calendarEvent.id).toBe(mockEvents[0].id); done(); }); const { getByText } = render(Agenda, { events: mockEvents, click_action: clickHandler, }); setTimeout( async () => await fireEvent.click(getByText(<string>mockEvents[0].title)), ); }); it("should show the date change arrows only if allow_date_change is set to true", (done) => { const { container: agenda, component } = render(Agenda, { events: mockEvents, allow_date_change: true, }); setTimeout(() => { expect(agenda.querySelectorAll(".change-date").length).toBe(2); component.allow_date_change = false; expect(agenda.querySelectorAll(".change-date").length).toBe(0); done(); }); }); it("should change to the next day when the next arrow is clicked", async (done) => { const { container: agenda } = render(Agenda, { events: mockEvents, allow_date_change: true, }); setTimeout(async () => { const tomorrow = new Date(new Date().setDate(new Date().getDate() + 1)); // Assume that default is current date await fireEvent.click( agenda.querySelector(".change-date.next") as Element, ); const nextMonth = (agenda.querySelector("header .month h1") as Element) .textContent; const nextDay = (agenda.querySelector("header .day") as Element) .textContent; const nextDate = new Date(`${nextDay} ${nextMonth}`); expect(nextDate.toDateString()).toBe(tomorrow.toDateString()); done(); }); }); it("should change to the previous day when the previous arrow is clicked", async (done) => { const { container: agenda } = render(Agenda, { events: mockEvents, allow_date_change: true, }); setTimeout(async () => { const yesterday = new Date(new Date().setDate(new Date().getDate() - 1)); // Assume that default is current date await fireEvent.click( agenda.querySelector(".change-date.prev") as Element, ); const prevMonth = (agenda.querySelector("header .month h1") as Element) .textContent; const prevDay = (agenda.querySelector("header .day") as Element) .textContent; const prevDate = new Date(`${prevDay} ${prevMonth}`); expect(prevDate.toDateString()).toBe(yesterday.toDateString()); done(); }); }); it("should not allow changing the zoom level if the component is at the max zoom", async (done) => { const { container: agenda } = render(Agenda, { events: mockEvents, prevent_zoom: false, auto_time_box: false, }); setTimeout(async () => { const zoomableEvent = agenda.querySelector(".event") as Element; const initialZoomLevel = zoomableEvent.getAttribute("style"); // Zooming in shouldn't work, because we are at max zoom by default await fireEvent.wheel(zoomableEvent, { deltaY: 5 }); expect(initialZoomLevel).toBe(zoomableEvent.getAttribute("style")); // Zooming out should still work await fireEvent.wheel(zoomableEvent, { deltaY: -5 }); expect(initialZoomLevel).not.toBe(zoomableEvent.getAttribute("style")); done(); }); }); it("should change the zoom level when scrolling only if prevent_zoom is false", async (done) => { const { container: agenda, component } = render(Agenda, { events: mockEvents, prevent_zoom: true, }); setTimeout(async () => { const zoomableEvent = agenda.querySelector(".event") as Element; const initialZoomLevel = zoomableEvent.getAttribute("style"); await fireEvent.wheel(zoomableEvent, { deltaY: -5 }); expect(initialZoomLevel).toBe(zoomableEvent.getAttribute("style")); component.prevent_zoom = false; await fireEvent.wheel(zoomableEvent, { deltaY: -5 }); expect(initialZoomLevel).not.toBe(zoomableEvent.getAttribute("style")); done(); }); }); it("should change the zoom level when scrolling only if condensed_view is false", async (done) => { const { container: agenda, component } = render(Agenda, { events: mockEvents, condensed_view: true, }); setTimeout(async () => { const zoomableEvent = agenda.querySelector(".event") as Element; const initialZoomLevel = zoomableEvent.getAttribute("style"); await fireEvent.wheel(zoomableEvent, { deltaY: -5 }); expect(initialZoomLevel).toBe(zoomableEvent.getAttribute("style")); component.condensed_view = false; await fireEvent.wheel(zoomableEvent, { deltaY: -5 }); expect(initialZoomLevel).not.toBe(zoomableEvent.getAttribute("style")); done(); }); }); it("should hide the header if header_type is none", (done) => { const { container: agenda, component } = render(Agenda, { events: mockEvents, header_type: "none", }); setTimeout(() => { expect(agenda.querySelector(".headless")).toBeTruthy(); component.header_type = "full"; expect(agenda.querySelector(".headless")).toBeFalsy(); done(); }); }); it("should hide the month if header_type is day", (done) => { const { container: agenda, component } = render(Agenda, { events: mockEvents, header_type: "full", }); setTimeout(() => { expect(agenda.querySelector("header .month")).toBeTruthy(); component.header_type = "day"; expect(agenda.querySelector("header .month")).toBeFalsy(); done(); }); }); it("should show the current time marker when hide_current_time is false", (done) => { const { container: agenda, component } = render(Agenda, { events: mockEvents, hide_current_time: false, }); setTimeout(async () => { expect(agenda.querySelector("span.now")).toBeTruthy(); component.hide_current_time = true; expect(agenda.querySelector("span.now")).toBeFalsy(); done(); }); }); }); describe("Store and web requests", () => { let getEventSpy: jest.SpyInstance; let getCalendarSpy: jest.SpyInstance; beforeEach(() => { getEventSpy = jest .spyOn(EventStore, "getEvents") .mockImplementation( () => new Promise((resolve) => resolve(mockEvents as any[])), ); getCalendarSpy = jest .spyOn(CalendarStore, "getCalendars") .mockImplementation( () => new Promise((resolve) => resolve(mockAgendaCalendar)), ); }); afterEach(() => { ManifestStore.set({}); CalendarStore.reset(); EventStore.reset(); jest.resetAllMocks(); }); afterAll(() => jest.restoreAllMocks()); it("should set the theme to the manifest's value if none is provided as a property", async (done) => { const mockManifestClone = { ...mockAgendaManifest, theme: "theme-4" }; (<any>fetchManifest).mockImplementation( () => new Promise((resolve) => resolve(mockManifestClone)), ); const { container: agenda } = render(Agenda, { id: "mock agenda ID 2", }); setTimeout(() => { expect(getEventSpy).toHaveBeenCalled(); expect(getCalendarSpy).toHaveBeenCalled(); expect(agenda.querySelector(".theme-4")).toBeTruthy(); done(); }); }); it("should fetch from the event store if no events are passed in as props", (done) => { const mockComponentId = "mock agenda ID"; const mockManifestKey = JSON.stringify({ component_id: mockComponentId, access_token: "", }); ManifestStore.update(() => ({ [mockManifestKey]: new Promise((resolve) => resolve(mockAgendaManifest)), })); const { container: agenda } = render(Agenda, { id: mockComponentId, }); setTimeout(() => { expect(getEventSpy).toHaveBeenCalled(); expect(getCalendarSpy).toHaveBeenCalled(); expect(agenda.querySelectorAll(".event").length).toBe(mockEvents.length); done(); }); }); it("should set the current day's events when multiple ", (done) => { getEventSpy.mockReset(); getEventSpy = jest .spyOn(EventStore, "getEvents") .mockImplementationOnce( () => new Promise((resolve) => setTimeout(() => resolve([] as any[]), 10)), ) .mockImplementationOnce( () => new Promise((resolve) => setTimeout(() => resolve([mockEvents[0]] as any[]), 10), ), ) .mockImplementationOnce( () => new Promise((resolve) => setTimeout(() => resolve([mockEvents[1]] as any[]), 20), ), ) .mockImplementationOnce( () => new Promise((resolve) => setTimeout(() => resolve(mockEvents as any[]), 5), ), ); const mockComponentId = "mock agenda ID"; const mockManifestKey = JSON.stringify({ component_id: mockComponentId, access_token: "", }); ManifestStore.update(() => ({ [mockManifestKey]: new Promise((resolve) => resolve(mockAgendaManifest)), })); const { container: agenda } = render(Agenda, { id: mockComponentId, }); setTimeout(async () => { await fireEvent.click( agenda.querySelector(".change-date.next") as Element, ); await fireEvent.click( agenda.querySelector(".change-date.next") as Element, ); setTimeout(() => { expect(agenda.querySelectorAll(".event").length).toBe( mockEvents.length, ); done(); }, 200); }); }); });
the_stack
import { isWhitespace } from './utils/text'; import EventEmitter from './utils/eventEmitter'; import { Row, Col, CursorOptions } from './types'; import Path from './path'; import Document from './document'; import Session from './session'; const wordRegex = /^[a-z0-9_]+$/i; // options for word movements, e.g. w/e/b/f/t type WordMovementOptions = { // whether the word should consider all non-whitespace characters whitespaceWord?: boolean, // whether to stop before the character found beforeFound?: boolean, cursor?: CursorOptions, }; // TODO: make a view class which includes viewRoot and cursor /* Cursor represents a cursor within a session Handles movement logic */ export default class Cursor extends EventEmitter { public col: Col; public path: Path; public session: Session; public document: Document; private moveCol: Col; constructor( session: Session, path: Path, col: Col = 0, moveCol: Col | null = null ) { super(); this.session = session; this.document = session.document; this.path = path; this.col = col; // -1 means last col this.moveCol = moveCol !== null ? moveCol : col; } get row() { return this.path.row; } public clone() { // paths are immutable so this is okay return new Cursor(this.session, this.path, this.col, this.moveCol); } public async _setPath(path: Path) { await this.emitAsync('rowChange', this.path, path); this.path = path; } private async _setCol(col: Col) { await this.emitAsync('colChange', this.col, col); this.col = col; } public async from(other: Cursor) { await this._setPath(other.path); await this._setCol(other.col); this.moveCol = other.moveCol; } public async setPosition(path: Path, col: Col, cursorOptions?: CursorOptions) { await this._setPath(path); await this.setCol(col, cursorOptions); } public async setPath(path: Path, cursorOptions?: CursorOptions) { await this._setPath(path); await this._fromMoveCol(cursorOptions); } public async setCol(moveCol: Col, cursorOptions: CursorOptions = { pastEnd: true }) { this.moveCol = moveCol; await this._fromMoveCol(cursorOptions); // if moveCol was too far, fix it // NOTE: this should happen for setting column, but not path if (this.moveCol >= 0) { this.moveCol = this.col; } } private async _fromMoveCol(cursorOptions: CursorOptions = {}) { const len = await this.document.getLength(this.path.row); const maxcol = len - (cursorOptions.pastEnd ? 0 : 1); let col; if (this.moveCol < 0) { col = Math.max(0, len + this.moveCol + 1); } else { col = Math.max(0, Math.min(maxcol, this.moveCol)); } await this._setCol(col); } private async _left() { await this.setCol(this.col - 1); } private async _right() { await this.setCol(this.col + 1); } public async left() { if (this.col > 0) { await this._left(); } } public async right( cursorOptions: {pastEnd?: boolean} = {} ) { const shift = cursorOptions.pastEnd ? 0 : 1; if (this.col < (await this.document.getLength(this.path.row)) - shift) { await this._right(); } } public async backIfNeeded() { if (this.col > (await this.document.getLength(this.path.row)) - 1) { await this.left(); return true; } return false; } public async atVisibleEnd() { if (this.col < (await this.document.getLength(this.path.row)) - 1) { return false; } else { const nextpath = await this.session.nextVisible(this.path); if (nextpath !== null) { return false; } } return true; } private async _nextChar() { if (this.col < (await this.document.getLength(this.path.row)) - 1) { await this._right(); return true; } else { const nextpath = await this.session.nextVisible(this.path); if (nextpath !== null) { await this.setPosition(nextpath, 0); return true; } } return false; } public async atVisibleStart() { if (this.col > 0) { return false; } else { const prevpath = await this.session.prevVisible(this.path); if (prevpath !== null) { return false; } } return true; } private async _prevChar() { if (this.col > 0) { await this._left(); return true; } else { const prevpath = await this.session.prevVisible(this.path); if (prevpath !== null) { await this.setPosition(prevpath, -1); return true; } } return false; } public async home() { await this.setCol(0); return this; } public async end(cursorOptions: CursorOptions = {}) { await this.setCol(cursorOptions.pastEnd ? -1 : -2); return this; } public async visibleHome() { let path; if (this.session.viewRoot.isRoot()) { const firstChild = await this.session.nextVisible(this.session.viewRoot); if (firstChild == null) { throw new Error('No next visible for root?'); } if (firstChild.parent == null) { throw new Error('Next visible of root was root?'); } path = firstChild; } else { path = this.session.viewRoot; } await this.setPosition(path, 0); return this; } public async visibleEnd() { const path = await this.session.lastVisible(); await this.setPosition(path, 0); return this; } public async isInWhitespace(path: Path, col: Col) { const char = await this.document.getChar(path.row, col); return isWhitespace(char); } public async isInWord(path: Path, col: Col, matchChar: string) { if (isWhitespace(matchChar)) { return false; } const char = await this.document.getChar(path.row, col); if (isWhitespace(char)) { return false; } if (wordRegex.test(char)) { return wordRegex.test(matchChar); } else { return !(wordRegex.test(matchChar)); } } // return function that sees whether we're still in the word private _getWordCheck(options: WordMovementOptions, matchChar: string) { if (options.whitespaceWord) { return async (path: Path, col: Col) => { return !(await this.isInWhitespace(path, col)); }; } else { return async (path: Path, col: Col) => await this.isInWord(path, col, matchChar); } } public async beginningWord(options: WordMovementOptions = {}) { if (await this.atVisibleStart()) { return this; } await this._prevChar(); while ((!(await this.atVisibleStart())) && (await this.isInWhitespace(this.path, this.col))) { await this._prevChar(); } const wordcheck = this._getWordCheck( options, await this.document.getChar(this.path.row, this.col) ); while ((this.col > 0) && (await wordcheck(this.path, this.col - 1))) { await this._left(); } return this; } public async endWord(options: WordMovementOptions = {}) { if (await this.atVisibleEnd()) { if (options.cursor && options.cursor.pastEnd) { await this._right(); } return this; } await this._nextChar(); while ((!(await this.atVisibleEnd())) && (await this.isInWhitespace(this.path, this.col))) { await this._nextChar(); } let end = (await this.document.getLength(this.path.row)) - 1; const wordcheck = this._getWordCheck( options, await this.document.getChar(this.path.row, this.col) ); while ((this.col < end) && (await wordcheck(this.path, this.col + 1))) { await this._right(); } if (options.cursor && options.cursor.pastEndWord) { await this._right(); } end = (await this.document.getLength(this.path.row)) - 1; if (this.col === end && options.cursor && options.cursor.pastEnd) { await this._right(); } return this; } public async nextWord(options: WordMovementOptions = {}) { if (await this.atVisibleEnd()) { if (options.cursor && options.cursor.pastEnd) { await this._right(); } return this; } let end = (await this.document.getLength(this.path.row)) - 1; const wordcheck = this._getWordCheck( options, await this.document.getChar(this.path.row, this.col) ); while ((this.col < end) && (await wordcheck(this.path, this.col + 1))) { await this._right(); } await this._nextChar(); let found_next_word = false; let found_whitespace = false; while (true) { if (!await this.isInWhitespace(this.path, this.col)) { if (found_whitespace) { found_next_word = true; } break; } found_whitespace = true; if (await this.atVisibleEnd()) { break; } await this._nextChar(); } if (!found_next_word) { if (options.cursor && options.cursor.pastEnd) { end = (await this.document.getLength(this.path.row)) - 1; if (this.col === end) { await this._right(); } } } return this; } public async findNextChar(char: string, options: WordMovementOptions = {}) { const end = (await this.document.getLength(this.path.row)) - 1; if (this.col === end) { return; } let col = this.col; if (options.beforeFound) { col += 1; } let found: number | null = null; while (col < end) { col += 1; if ((await this.document.getChar(this.path.row, col)) === char) { found = col; break; } } if (found === null) { return; } await this.setCol(found); if (options.cursor && options.cursor.pastEnd) { await this._right(); } if (options.beforeFound) { return await this._left(); } } public async findPrevChar(char: string, options: WordMovementOptions = {}) { if (this.col === 0) { return; } let col = this.col; if (options.beforeFound) { col -= 1; } let found: number | null = null; while (col > 0) { col -= 1; if ((await this.document.getChar(this.path.row, col)) === char) { found = col; break; } } if (found === null) { return; } await this.setCol(found); if (options.beforeFound) { await this._right(); } } public async up(cursorOptions: CursorOptions = {}) { const path = await this.session.prevVisible(this.path); if (path !== null) { await this.setPath(path, cursorOptions); } } public async down(cursorOptions: CursorOptions = {}) { const path = await this.session.nextVisible(this.path); if (path !== null) { await this.setPath(path, cursorOptions); } } public async parent(cursorOptions: CursorOptions = {}) { const newpath = this.path.parent; if (newpath == null) { throw new Error('Cursor was at root'); } if (newpath.parent == null) { return; } if (this.path.is(this.session.viewRoot)) { await this.session.changeViewRoot(newpath); } return await this.setPath(newpath, cursorOptions); } public async prevSibling(cursorOptions: CursorOptions = {}) { const prevsib = await this.document.getSiblingBefore(this.path); if (prevsib !== null) { return await this.setPath(prevsib, cursorOptions); } } public async nextSibling(cursorOptions: CursorOptions = {}) { const nextsib = await this.document.getSiblingAfter(this.path); if (nextsib !== null) { return await this.setPath(nextsib, cursorOptions); } } } // NOTE: this doesnt go into the document tree since // 1. it doesnt deduplicate cloned rows // 2. implementation details, e.g. this isn't immutable export class CursorsInfoTree { public row: Row; // actual cursor! public cursor: Col | null; // has regular selection public selected: {[col: number]: boolean}; // is visually selected (entire row) public visual: boolean; // has children with selections public children: {[row: number]: CursorsInfoTree}; public parent: CursorsInfoTree | null; public hasSelection: boolean; constructor(row: Row, parent: null | CursorsInfoTree = null) { if (parent === null && (row !== Path.rootRow())) { throw new Error('CursorsInfoTree rooted at non-root row'); } this.row = row; this.visual = false; this.selected = {}; this.cursor = null; this.children = {}; this.parent = parent; this.hasSelection = false; } public getPath(path: Path): CursorsInfoTree { let result = (this as CursorsInfoTree); path.getAncestry().forEach((row) => { result = result.getChild(row); }); return result; } public getChild(row: Row): CursorsInfoTree { const child = this.children[row]; if (child != null) { return child; } const newChild = new CursorsInfoTree(row, this); this.children[row] = newChild; return newChild; } private markSelected() { if (!this.hasSelection) { this.hasSelection = true; if (this.parent) { this.parent.markSelected(); } } } public markCols(cols: Array<Col>) { cols.forEach((col) => { this.selected[col] = true; }); this.markSelected(); } public markCursor(col: Col) { this.cursor = col; this.markSelected(); } public markVisual() { this.visual = true; this.markSelected(); } public markChildrenVisual(childRows: Array<Row>) { childRows.forEach((row) => { const child = this.getChild(row); child.markVisual(); }); } }
the_stack
import { Readable, Writable } from 'stream' import Heap from 'qheap' import { PeerPool } from '../../net/peerpool' import { Peer } from '../../net/peer' import { Config } from '../../config' import { Event } from '../../types' import { Job } from './types' export interface FetcherOptions { /* Common chain config*/ config: Config /* Peer pool */ pool: PeerPool /* Fetch task timeout in ms (default: 8000) */ timeout?: number /* How long to ban misbehaving peers in ms (default: 60000) */ banTime?: number /* Max write queue size (default: 16) */ maxQueue?: number /* Retry interval in ms (default: 1000) */ interval?: number /* Destroy the fetcher once we are done */ destroyWhenDone?: boolean } /** * Base class for fetchers that retrieve various data from peers. Subclasses must * request(), process() and store() methods. Tasks can be arbitrary objects whose structure * is defined by subclasses. A priority queue is used to ensure tasks are fetched * inorder. Three types need to be provided: the JobTask, which describes a task the job should perform, * a JobResult, which is the direct result when a Peer replies to a Task, and a StorageItem, which * represents the to-be-stored items. * @memberof module:sync/fetcher */ export abstract class Fetcher<JobTask, JobResult, StorageItem> extends Readable { public config: Config protected pool: PeerPool protected timeout: number protected interval: number protected banTime: number protected maxQueue: number protected in: Heap<Job<JobTask, JobResult, StorageItem>> protected out: Heap<Job<JobTask, JobResult, StorageItem>> protected total: number protected processed: number // number of processed tasks, awaiting the write job protected finished: number // number of tasks which are both processed and also finished writing protected running: boolean protected reading: boolean private destroyWhenDone: boolean // Destroy the fetcher once we are finished processing each task. private _readableState?: { // This property is inherited from Readable. We only need `length`. length: number } /** * Create new fetcher * @param {FetcherOptions} */ constructor(options: FetcherOptions) { super({ ...options, objectMode: true }) this.config = options.config this.pool = options.pool this.timeout = options.timeout ?? 8000 this.interval = options.interval ?? 1000 this.banTime = options.banTime ?? 60000 this.maxQueue = options.maxQueue ?? 16 this.in = new Heap({ comparBefore: ( a: Job<JobTask, JobResult, StorageItem>, b: Job<JobTask, JobResult, StorageItem> ) => a.index < b.index, }) this.out = new Heap({ comparBefore: ( a: Job<JobTask, JobResult, StorageItem>, b: Job<JobTask, JobResult, StorageItem> ) => a.index < b.index, }) this.total = 0 this.processed = 0 this.finished = 0 this.running = false this.reading = false this.destroyWhenDone = options.destroyWhenDone ?? true } /** * Request results from peer for the given job. Resolves with the raw result. If `undefined` is returned, * re-queue the job. * @param job * @param peer * @return {Promise} */ abstract request( _job?: Job<JobTask, JobResult, StorageItem>, _peer?: Peer ): Promise<JobResult | undefined> /** * Process the reply for the given job. If the reply contains unexpected data, return `undefined`, this * re-queues the job. * @param job fetch job * @param result result data */ abstract process( _job?: Job<JobTask, JobResult, StorageItem>, _result?: JobResult ): StorageItem[] | undefined /** * Store fetch result. Resolves once store operation is complete. * @param result fetch result * @return {Promise} */ abstract store(_result: StorageItem[]): Promise<void> /** * Generate list of tasks to fetch * @return {Object[]} tasks */ tasks(): JobTask[] { return [] } /** * Enqueue job * @param job */ enqueue(job: Job<JobTask, JobResult, StorageItem>) { if (this.running) { this.in.insert({ ...job, time: Date.now(), state: 'idle', }) } } /** * Dequeue all done tasks that completed in order */ dequeue() { for (let f = this.out.peek(); f && f.index === this.processed; ) { this.processed++ const { result } = this.out.remove()! if (!this.push(result)) { return } f = this.out.peek() } } /** * Enqueues a task. If autoRestart is true, and Fetcher is not running, then restart the fetcher. * @param task * @param autoRestart */ enqueueTask(task: JobTask, autoRestart = false) { if (!this.running && !autoRestart) { return } const job: Job<JobTask, JobResult, StorageItem> = { task, time: Date.now(), index: this.total++, state: 'idle', peer: null, } this.in.insert(job) if (!this.running && autoRestart) { // eslint-disable-next-line @typescript-eslint/no-floating-promises this.fetch() } } /** * Implements Readable._read() by pushing completed tasks to the read queue */ _read() { this.dequeue() } /** * handle successful job completion * @private * @param job successful job * @param result job result */ success(job: Job<JobTask, JobResult, StorageItem>, result?: JobResult) { if (job.state !== 'active') return if (result === undefined) { this.enqueue(job) // TODO: should this promise actually float? // eslint-disable-next-line @typescript-eslint/no-floating-promises this.wait().then(() => { job.peer!.idle = true }) } else { job.peer!.idle = true job.result = this.process(job, result) if (job.result) { this.out.insert(job) this.dequeue() } else { this.enqueue(job) } } this.next() } /** * handle failed job completion * @private * @param job failed job * @param [error] error */ failure(job: Job<JobTask, JobResult, StorageItem>, error?: Error) { if (job.state !== 'active') return job.peer!.idle = true this.pool.ban(job.peer!, this.banTime) this.enqueue(job) if (error) { this.error(error, job) } this.next() } /** * Process next task */ next() { const job = this.in.peek() if ( !job || this._readableState!.length > this.maxQueue || job.index > this.processed + this.maxQueue || this.processed === this.total ) { return false } const peer = this.peer() if (peer) { peer.idle = false this.in.remove() job.peer = peer job.state = 'active' const timeout = setTimeout(() => { this.expire(job) }, this.timeout) this.request(job, peer) .then((result?: JobResult) => this.success(job, result)) .catch((error: Error) => this.failure(job, error)) .finally(() => clearTimeout(timeout)) return job } } /** * Clears all outstanding tasks from the fetcher */ clear() { while (this.in.length > 0) { this.in.remove() } } /** * Handle error * @param {Error} error error object * @param {Object} job task */ error(error: Error, job?: Job<JobTask, JobResult, StorageItem>) { if (this.running) { this.config.events.emit(Event.SYNC_FETCHER_ERROR, error, job && job.task, job && job.peer) } } /** * Setup writer pipe and start writing fetch results. A pipe is used in order * to support backpressure from storing results. */ write() { const _write = async (result: StorageItem[], encoding: string | null, cb: Function) => { try { await this.store(result) this.finished++ cb() } catch (error: any) { this.config.logger.warn(`Error along storing received block or header result: ${error}`) cb(error) } } const writer = new Writable({ objectMode: true, write: _write, writev: (many: { chunk: StorageItem; encoding: string }[], cb: Function) => _write( (<StorageItem[]>[]).concat( ...many.map((x: { chunk: StorageItem; encoding: string }) => x.chunk) ), null, cb ), }) this.on('close', () => { this.running = false writer.destroy() }) .pipe(writer) .on('finish', () => { this.running = false }) .on('error', (error: Error) => { this.error(error) this.running = false writer.destroy() }) } /** * Run the fetcher. Returns a promise that resolves once all tasks are completed. * @return {Promise} */ async fetch() { if (this.running) { return false } this.write() this.running = true this.tasks().forEach((task: JobTask) => this.enqueueTask(task)) while (this.running) { if (!this.next()) { if (this.finished === this.total) { this.push(null) } await this.wait() } } this.running = false if (this.destroyWhenDone) { this.destroy() } } /** * Returns an idle peer that can process a next job. */ peer() { return this.pool.idle() } /** * Expire job that has timed out and ban associated peer. Timed out tasks will * be re-inserted into the queue. */ expire(job: Job<JobTask, JobResult, StorageItem>) { job.state = 'expired' if (this.pool.contains(job.peer!)) { this.config.logger.debug( `Task timed out for peer (banning) ${JSON.stringify(job.task)} ${job.peer}` ) this.pool.ban(job.peer!, this.banTime) } else { this.config.logger.debug( `Peer disconnected while performing task ${JSON.stringify(job.task)} ${job.peer}` ) } this.enqueue(job) } async wait(delay?: number) { await new Promise((resolve) => setTimeout(resolve, delay ?? this.interval)) } }
the_stack
import * as os from 'os' import { exec, spawnSync } from 'child_process' import {existsSync, readdirSync, readFileSync, appendFileSync, opendirSync, Dirent, writeFileSync, mkdirSync} from 'fs' import * as fsPath from 'path' import { parseKeys } from './parseKeys' import { fuzzyMatch } from './fuzzy' class InputError extends Error { title: string constructor(title: string, message: string) { super(message) this.title = title this.message = message } } // Sub-commands function isSubCommand(args: string[]): boolean { const isDashDashSubCommand = [ '--list', '--version', '-l', '-v', '-h' ].includes(args[0]) const isMultiArgumentSubCommand = ( args.length > 1 && ['new', 'delete', 'rename'].includes(args[0]) ) return isDashDashSubCommand || isMultiArgumentSubCommand } function executeSubCommand(name: string, args: string[]) { switch (name) { case '--list': case '-l': { listSubCommand() break } case '--version': case '-v': { versionSubCommand() break } // --help is handled by git natively, it open man page // using ./git-jump.1 case '-h': { helpSubCommand() break } case 'new': { newSubCommand(args) break } case 'rename': { renameSubCommand(args) break } case 'delete': { deleteSubCommand(args) break } default: { throw new InputError(`Unknown command ${bold(`git jump ${name}`)}`, `See ${bold('git jump --help')} for the list of supported commands.`) } } } function versionSubCommand() { process.stdout.write(`${readVersion()}\n`) process.exit(0) } function listSubCommand(): void { state.isInteractive = false view(state) process.exit(0) } function newSubCommand(args: string[]): void { const { status, message } = gitSwitch(['--create', ...args]) state.scene = Scene.Message state.message = message if (status === 0) { updateBranchLastSwitch(args[0], Date.now(), state) } view(state) process.exit(status) } function helpSubCommand(): void { let help = readFileSync(fsPath.join(__dirname, '../help.txt')).toString() help = help.replace(/\{bold\}(.+)\{\/bold\}/g, (substring, content) => bold(content)) help = help.replace(/\{dim\}(.+)\{\/dim\}/g, (substring, content) => dim(content)) help = help.replace(/\{wrap:(\d+)\}(.+)\{\/wrap\}/g, (substring, paddingSize, content) => { return multilineTextLayout( content.trim(), process.stdout.columns - parseInt(paddingSize) ).map((line, index) => { // Padding only the lines which wrap to the next line, // first line supposed to be already padded return index === 0 ? line : ' '.repeat(paddingSize) + line }).join('\n') }) process.stdout.write(help) process.exit(0) } function renameSubCommand(args: string[]): void { if (args.length < 2) { throw new InputError('Wrong Format.', `You should specify both current and new branch name, ${bold('git jump rename <old branch name> <new branch name>')}.`) } const { status, message } = gitCommand('branch', ['--move', args[0], args[1]]) state.scene = Scene.Message state.message = message if (status === 0) { renameJumpDataBranch(args[0], args[1], state) state.message.push('Renamed.') } view(state) process.exit(status) } function deleteSubCommand(args: string[]): void { const { status, message } = gitCommand('branch', ['--delete', ...args]) state.scene = Scene.Message state.message = message if (status === 0) { deleteJumpDataBranch(args, state) } view(state) process.exit(status) } // Bare interface BranchData { name: string lastSwitch: number } interface CurrentHEAD { detached: boolean sha: string | null branchName: string | null } enum ListItemType { Head, Branch } interface ListItem { type: ListItemType, content: CurrentHEAD | BranchData, searchMatchScore: number } interface PackageInfo { version: string engines: { node: string } } enum Scene { List, Message } interface State { rows: number columns: number highlightedLineIndex: number maxRows: number branches: BranchData[] searchString: string searchStringCursorPosition: number currentHEAD: CurrentHEAD list: ListItem[] lineSelected: boolean scene: Scene message: string[] gitRepoFolder: string | null isInteractive: boolean latestPackageVersion: string | null, packageInfo: PackageInfo | null } const state: State = { rows: process.stdout.rows, columns: process.stdout.columns, highlightedLineIndex: 0, maxRows: process.stdout.rows, branches: [], searchString: '', searchStringCursorPosition: 0, currentHEAD: { detached: false, sha: null, branchName: null }, list: [], lineSelected: false, scene: Scene.List, message: [], gitRepoFolder: null, isInteractive: true, latestPackageVersion: null, packageInfo: null } function dim(s: string): string { return `\x1b[2m${s}\x1b[22m` } function bold(s: string): string { return `\x1b[1m${s}\x1b[22m` } function highlight(s: string): string { return `\x1b[38;5;4m${s}\x1b[39m` } function green(s: string): string { return `\x1b[38;5;2m${s}\x1b[39m` } function yellow(s: string): string { return `\x1b[38;5;3m${s}\x1b[39m` } function red(s: string): string { return `\x1b[38;5;1m${s}\x1b[39m` } interface LinesWindow { topIndex: number bottomIndex: number } function calculateLinesWindow(linesCount: number, highlightedLineIndex: number): LinesWindow { const windowSize = state.rows - 2 const windowHalf = Math.floor(windowSize / 2) const topIndex = Math.max( 0, Math.min( linesCount - windowSize, state.highlightedLineIndex - windowHalf ) ) const bottomIndex = topIndex + (windowSize - 1) return { topIndex, bottomIndex } } enum LayoutColumnType { Index, BranchName, LastUsed, MoreIndicator } interface LayoutColumn { type: LayoutColumnType width: number } function calculateLayout(state: State): LayoutColumn[] { const indexColumnWidth = 3 const moreIndicatorColumnWidth = 5 const branchNameColumnWidth = Math.min( state.columns - indexColumnWidth - moreIndicatorColumnWidth, Math.max.apply(null, state.branches.map((branch: BranchData) => { return branch.name.length })) ) const moreIndicatorSpacingWidth = state.columns - indexColumnWidth - branchNameColumnWidth - moreIndicatorColumnWidth return [ { type: LayoutColumnType.Index, width: indexColumnWidth }, { type: LayoutColumnType.BranchName, width: branchNameColumnWidth }, { type: LayoutColumnType.MoreIndicator, width: moreIndicatorSpacingWidth + moreIndicatorColumnWidth } ] } function highlightLine(line: string, lineIndex: number, highlightedLineIndex: number, selected: boolean = false) { if (lineIndex === highlightedLineIndex) { return selected ? green(line) : highlight(line) } return line } function addScrollIndicator(line: string, lineIndex: number, listLength: number, listWindow: LinesWindow, layout: LayoutColumn[]): string { if (lineIndex === listWindow.bottomIndex && listWindow.bottomIndex < listLength - 1) { return line + dim(' ↓ '.padStart(layout[layout.length - 1].width, ' ')) } return line } function truncate(s: string, maxWidth: number): string { let truncated = s.slice(0, maxWidth) if (truncated.length < s.length) { truncated = `${truncated.substring(0, truncated.length - 1)}…` } return truncated } function getQuickSelectLines(list: ListItem[]): ListItem[] { return list.filter((line: ListItem) => { return line.type !== ListItemType.Head }).slice(0, 10) } // Views const branchIndexPadding = ' ' function viewCurrentHEAD(currentHEAD: CurrentHEAD, layout: LayoutColumn[]): string { return layout.reduce((line: string, column: LayoutColumn) => { if (column.type === LayoutColumnType.Index) { return line + branchIndexPadding } if (column.type === LayoutColumnType.BranchName) { const branch = currentHEAD.detached ? `${bold(currentHEAD.sha)} ${dim('(detached)')}` : bold(currentHEAD.branchName) return line + branch } return line }, '') } function viewBranch( branch: BranchData, index: number, layout: LayoutColumn[] ): string { return layout.reduce((line: string, column: LayoutColumn) => { if (column.type === LayoutColumnType.Index) { return line + (index < 10 ? ` ${dim(index.toString())} ` : branchIndexPadding) } if (column.type === LayoutColumnType.BranchName) { return line + truncate(branch.name, column.width).padEnd(column.width, ' ') } return line }, '') } function viewListLines(state: State, layout: LayoutColumn[]): string[] { let quickSelectIndex = -1 return state.list.map((line: ListItem) => { switch (line.type) { case ListItemType.Head: { return viewCurrentHEAD(line.content as CurrentHEAD, layout) } case ListItemType.Branch: { quickSelectIndex++ return viewBranch( line.content as BranchData, quickSelectIndex, layout ) } } }) } function viewNonInteractiveList(state: State): string[] { const layout = [ { type: LayoutColumnType.BranchName, width: state.columns }, ] return viewListLines(state, layout) } function viewList(state: State): string[] { if (state.list.length === 0) { return [`${branchIndexPadding}${dim('No such branches')}`] } const layout = calculateLayout(state) const listWindow = calculateLinesWindow(state.list.length, state.highlightedLineIndex) return viewListLines(state, layout) .map((line, index) => { return addScrollIndicator( highlightLine(line, index, state.highlightedLineIndex), index, state.list.length, listWindow, layout ) }) .slice(listWindow.topIndex, listWindow.bottomIndex + 1) } function viewQuickSelectHint(maxIndex: number, columnWidth: number): string { const trailingIndex = maxIndex > 0 ? `..${maxIndex}` : '' const modifierKey = os.type() === 'Darwin' ? '⌥' : 'Alt' return dim(`${modifierKey}+0${trailingIndex} quick select `.padStart(columnWidth, ' ')) } function viewSearch(state: State, width: number): string { const SEARCH_PLACEHOLDER = 'Search' return state.searchString === '' ? dim(SEARCH_PLACEHOLDER.padEnd(width, ' ')) : truncate(state.searchString, width).padEnd(width, ' ') } function viewSearchLine(state: State): string { const searchPlaceholderWidth = 6 const searchWidth = Math.min( state.columns - branchIndexPadding.length, Math.max(state.searchString.length, searchPlaceholderWidth) ) const hintMinWidth = 25 let line = branchIndexPadding + viewSearch(state, searchWidth) const hintColumnWidth = state.columns - (branchIndexPadding.length + searchWidth) if (hintColumnWidth < hintMinWidth) { return line } const quickSelectLines = getQuickSelectLines(state.list) if (quickSelectLines.length === 0) { return line } line += viewQuickSelectHint(quickSelectLines.length - 1, hintColumnWidth) return line } function view(state: State) { switch (state.scene) { case Scene.List: { if (!state.isInteractive) { // concat(['']) will add trailing newline render(viewNonInteractiveList(state).concat([''])) return } let lines: string[] = [] lines.push(viewSearchLine(state)) lines = lines.concat(viewList(state)) clear() render(lines) cursorTo(branchIndexPadding.length + state.searchStringCursorPosition + 1, 1) break } case Scene.Message: { clear() const lineSpacer = ' ' const lines = [ '', ...state.message.reduce((lines: string[], line: string) => { if (line === '') { lines.push('') return lines } return lines.concat(multilineTextLayout(line, process.stdout.columns - lineSpacer.length)) }, []).map(line => lineSpacer + line), '', '' ] render(lines) break } } } /** * These properties cannot live in the main * app state as they are affected by rendering itself, * not by application logic. They are part of a different, * more low-level sub-system. */ interface RenderState { cursorY: number } const renderState: RenderState = { cursorY: 1 } function clear() { cursorTo(1, 1) // Clear everything after the cursor process.stdout.write(`\x1b[0J`) } function render(lines: string[]) { process.stdout.write(lines.join('\n')) // Keep track of the cursor's vertical position // in order to know how many lines to move up // to clean the screen later renderState.cursorY = lines.length } function cursorTo(x: number, y: number) { const yDelta = renderState.cursorY - y // Move cursor back to the first line // \x1b[0A will still move one line up, so // do not move in case there is only one line if (yDelta > 0) { process.stdout.write(`\x1b[${yDelta}A`) } // There is an escape sequence for moving // cursor horizontally using absolute coordinate, // so no need to use delta here, like for Y process.stdout.write(`\x1b[${x}G`) renderState.cursorY = y } const CTRL_C = Buffer.from('03', 'hex') const UP = Buffer.from('1b5b41', 'hex') const DOWN = Buffer.from('1b5b42', 'hex') const RIGHT = Buffer.from('1b5b43', 'hex') const LEFT = Buffer.from('1b5b44', 'hex') const DELETE = Buffer.from('7f', 'hex') const BACKSPACE = Buffer.from('08', 'hex') const ENTER = Buffer.from('0d', 'hex') function log(s: any) { appendFileSync('./log', Buffer.from(`${JSON.stringify(s)}\n`)) } const escapeCode = 0x1b const UNICODE_C0_RANGE: [Number, Number] = [0x00, 0x1f] const UNICODE_C1_RANGE: [Number, Number] = [0x80, 0x9f] function isEscapeCode(data: Buffer): boolean { return data[0] === escapeCode } function isC0C1ControlCode(data: Buffer): boolean { // If key buffer has more then one byte it's not a control character if (data.length > 1) { return false } const code = data[0] const inC0Range = code >= UNICODE_C0_RANGE[0] && code <= UNICODE_C0_RANGE[1] const inC1Range = code >= UNICODE_C1_RANGE[0] && code <= UNICODE_C1_RANGE[1] return inC0Range || inC1Range } function isDeleteKey(data: Buffer) { return data.length === 1 && data[0] === DELETE[0] } function isMetaPlusNumberCombination(key: Buffer) { if (key.length === 2 && key[0] === escapeCode) { return key[1] >= 0x30 && key[1] <=0x39 } } function getNumberFromMetaPlusCombination(key: Buffer): number { // E.g. number = 5 = 0x35 = 0011 0101; 0011 0101 & 0000 1111 = 0000 0101 = 5 return key[1] & 0x0F } function isSpecialKey(key: Buffer): boolean { return isEscapeCode(key) || isC0C1ControlCode(key) || isDeleteKey(key) } enum ListSortCriterion { LastSwitch, SearchMatchScore } function sortedListLines(list: ListItem[], criterion: ListSortCriterion): ListItem[] { if (criterion === ListSortCriterion.LastSwitch) { return list.slice().sort((a: ListItem, b: ListItem) => { if (b.type === ListItemType.Head) { return 1 } return (b.content as BranchData).lastSwitch - (a.content as BranchData).lastSwitch }) } return list.slice().sort((a: ListItem, b: ListItem) => { return b.searchMatchScore - a.searchMatchScore }) } function generateList(state: State) { let list: ListItem[] = [] list.push({ type: ListItemType.Head, content: state.currentHEAD, searchMatchScore: state.searchString === '' ? 1 : fuzzyMatch(state.searchString, state.currentHEAD.detached ? state.currentHEAD.sha : state.currentHEAD.branchName) }) const branchLines: ListItem[] = state.branches // Filter out current branch if HEAD is not detached, // because current branch will be displayed as the first list .filter(branch => { return ( state.currentHEAD.detached || branch.name !== state.currentHEAD.branchName ) }) .map((branch: BranchData) => { return { type: ListItemType.Branch, content: branch, searchMatchScore: state.searchString === '' ? 1 : fuzzyMatch(state.searchString, branch.name) } }) list = list.concat(branchLines) .filter((line: ListItem) => line.searchMatchScore > 0) const sortCriterion = state.searchString === '' ? ListSortCriterion.LastSwitch : ListSortCriterion.SearchMatchScore return sortedListLines(list, sortCriterion) } function locateGitRepoFolder(folder: string): string { const dir = opendirSync(folder) let item = dir.readSync() let found = false while(item !== null && !found) { found = item.isDirectory() && item.name === '.git' item = dir.readSync() } dir.closeSync() if (found) { return folder } if (folder === '/') { throw new InputError(`You're not in Git repo.`, 'There is no Git repository in current or any parent folder.') } return locateGitRepoFolder(fsPath.resolve(folder, '..')) } function readPackageInfo() { if (state.packageInfo !== null) { return state.packageInfo } state.packageInfo = JSON.parse(readFileSync(fsPath.join(__dirname, '../package.json')).toString()) return state.packageInfo } function readVersion() { return readPackageInfo().version } function readRequiredNodeVersion() { const semverString = readPackageInfo().engines.node const match = semverString.match(/\d+\.\d+\.\d+/) return match === null ? null : match[0] } function readRawGitBranches(): string[] { const { stdout, stderr, error } = spawnSync('git', ['branch', `--format=%(refname:short)`], { encoding: 'utf-8' }) if (error) { throw new Error(`Could not get the list of Git branches. Cause: ${error.message}. Stacktrace: ${error.stack}.`) } if (stderr !== '') { throw new Error(`Could not get the list of Git branches. Cause: ${stderr}.`) } return stdout.split('\n').filter(branchName => branchName !== '') } type BranchDataCollection = {[key: string]: BranchData} const JUMP_FOLDER = '.jump' const DATA_FILE_PATH = `${JUMP_FOLDER}/data.json` function readBranchesJumpData(gitRepoFolder: string): BranchDataCollection { try { return JSON.parse(readFileSync(fsPath.join(gitRepoFolder, DATA_FILE_PATH)).toString()) } catch (e) { throw new Error(`JSON in "${DATA_FILE_PATH}" is not valid, could not parse it.`) } } function saveBranchesJumpData(gitRepoFolder: string, jumpData: BranchDataCollection): void { try { writeFileSync(fsPath.join(gitRepoFolder, DATA_FILE_PATH), JSON.stringify(jumpData, null, 2)) } catch (e) { throw new Error(`Could not write data into "${DATA_FILE_PATH}".`) } } /** * Cleans up branches that do not exists in Git already * but still present in jump data. */ function cleanUpJumpData(gitRepoFolder: string, jumpData: BranchDataCollection, rawGitBranches: string[]): void { const cleanJumpData = Object.keys(jumpData).reduce((cleanData, jumpDataBranchName) => { if (rawGitBranches.includes(jumpDataBranchName)) { cleanData[jumpDataBranchName] = jumpData[jumpDataBranchName] } return cleanData }, {} as BranchDataCollection) saveBranchesJumpData(gitRepoFolder, cleanJumpData) } function readBranchesData(gitRepoFolder: string): BranchData[] { const rawGitBranches = readRawGitBranches() const branchesJumpData = readBranchesJumpData(gitRepoFolder) cleanUpJumpData(gitRepoFolder, branchesJumpData, rawGitBranches) return rawGitBranches .map(branch => { const jumpData = branchesJumpData[branch] return { name: branch, lastSwitch: jumpData !== undefined ? jumpData.lastSwitch : 0 } }) } function updateBranchLastSwitch(name: string, lastSwitch: number, state: State): void { const jumpData = readBranchesJumpData(state.gitRepoFolder) jumpData[name] = { name, lastSwitch } saveBranchesJumpData(state.gitRepoFolder, jumpData) } function renameJumpDataBranch(currentName: string, newName: string, state: State): void { const jumpData = readBranchesJumpData(state.gitRepoFolder) const currentJumpData = jumpData[currentName] if (currentJumpData === undefined) { return } jumpData[newName] = { ...currentJumpData, name: newName } delete jumpData[currentName] saveBranchesJumpData(state.gitRepoFolder, jumpData) } function deleteJumpDataBranch(branchNames: string[], state: State): void { const jumpData = readBranchesJumpData(state.gitRepoFolder) branchNames.forEach((name) => { if (jumpData[name] === undefined) { return } delete jumpData[name] }) saveBranchesJumpData(state.gitRepoFolder, jumpData) } function readCurrentHEAD(gitRepoFolder: string): CurrentHEAD { const head = readFileSync(fsPath.join(gitRepoFolder, '.git/HEAD')).toString() const detached = !head.startsWith('ref:') return { detached, sha: detached ? head.slice(0, 7).trim() : null, branchName: detached ? null : head.slice(16).trim() } } /** * Reads branch name from provided list line. * Returns null in case current HEAD was selected * and it's detached. */ function getBranchNameForLine(line: ListItem): string | null { switch (line.type) { case ListItemType.Head: { const content = line.content as CurrentHEAD return content.detached ? content.sha : content.branchName } case ListItemType.Branch: { return (line.content as BranchData).name } } } interface GitCommandResult { status: number message: string[] stdout: string stderr: string } function gitCommand(command: string, args: string[]): GitCommandResult { const commandString = ['git', command, ...args].join(' ') const { stdout, stderr, error, status } = spawnSync('git', [command, ...args], { encoding: 'utf-8' }) if (error) { throw new Error(`Could not run ${bold(commandString)}.`) } const cleanLines = (text: string) => text.trim().split('\n').filter(line => line !== '') const statusIndicatorColor = status > 0 ? red : green const message = [ statusIndicatorColor('‣ ') + dim(commandString), ...cleanLines(stdout), ...cleanLines(stderr) ] return { status, message, stdout, stderr } } function gitSwitch(args: string[]): GitCommandResult { const isParameter = (argument: string) => argument.startsWith('-') || argument.startsWith('--') const switchResult = gitCommand('switch', args) const branchName = args.length === 1 && !isParameter(args[0]) ? args[0] : null if (switchResult.status === 0 && branchName !== null) { updateBranchLastSwitch(branchName, Date.now(), state) } return switchResult } function switchToListItem(item: ListItem): void { const branchName = getBranchNameForLine(item) if (item.type === ListItemType.Head) { state.scene = Scene.Message state.message = [`Staying on ${bold(branchName)}`] view(state) process.exit(0) } const { status, message } = gitSwitch([branchName]) state.scene = Scene.Message state.message = message view(state) process.exit(status) } function handleSpecialKey(key: Buffer) { // Supported special key codes // 1b5b44 - left // 1b5b43 - right // 1b5b41 - up // 1b5b42 - down // 1b62 - Option+left, word jump // 1b66 - Option+right, word jump // 1b4f48, 01 - Cmd+left, Control+a, Home // 1b4f46, 05 - Cmd+right, Control+e, End // 7f, 08 - Delete, 08 on Windows // 0d - Enter // 1b5b337e - fn+Delete, Forward Delete // 1b7f - Option+Delete, delete whole word // 17 - Control+w, delete the whole line // 0b - Control+k, delete from cursor to the end of the line // 1b30 .. 1b39 - Alt+0..9 if (key.equals(CTRL_C)) { clear() process.exit() } if (key.equals(ENTER)) { switchToListItem(state.list[state.highlightedLineIndex]) return } if (key.equals(UP)) { state.highlightedLineIndex = Math.max(0, state.highlightedLineIndex - 1) view(state) return } if (key.equals(RIGHT)) { if (state.searchStringCursorPosition === state.searchString.length) { return } state.searchStringCursorPosition += 1 view(state) return } if (key.equals(LEFT)) { if (state.searchStringCursorPosition === 0) { return } state.searchStringCursorPosition -= 1 view(state) return } if (key.equals(DOWN)) { state.highlightedLineIndex = Math.min(state.list.length - 1, state.highlightedLineIndex + 1) view(state) return } if (key.equals(DELETE) || key.equals(BACKSPACE)) { if (state.searchStringCursorPosition === 0) { return } state.searchString = state.searchString.substring(0, state.searchStringCursorPosition - 1) + state.searchString.substring(state.searchStringCursorPosition, state.searchString.length) state.searchStringCursorPosition -= 1 state.list = generateList(state) state.highlightedLineIndex = 0 view(state) return } if (isMetaPlusNumberCombination(key)) { const quickSelectIndex = getNumberFromMetaPlusCombination(key) const quickSelectLines = getQuickSelectLines(state.list) if (quickSelectIndex < quickSelectLines.length) { switchToListItem(quickSelectLines[quickSelectIndex]) } return } } function handleStringKey(key: Buffer) { const inputString = key.toString() state.searchString = state.searchString.substring(0, state.searchStringCursorPosition) + inputString + state.searchString.substring(state.searchStringCursorPosition, state.searchString.length) state.searchStringCursorPosition += inputString.length state.list = generateList(state) state.highlightedLineIndex = 0 view(state) } function bare() { view(state) if (!state.isInteractive) { process.exit(0) } process.stdin.setRawMode(true) process.stdin.on('data', (data: Buffer) => { parseKeys(data).forEach((key: Buffer) => { if (isSpecialKey(key)) { handleSpecialKey(key) return } handleStringKey(key) }) }) } // Jump to a branch function jumpTo(args: string[]) { const switchResult = gitSwitch(args) if (switchResult.status === 0) { state.scene = Scene.Message state.message = switchResult.message view(state) process.exit(0) } // Generate filtered and sorted list of branches state.searchString = args[0] state.list = generateList(state) if (state.list.length === 0) { state.scene = Scene.Message state.message = [`${bold(yellow(state.searchString))} does not match any branch`] view(state) process.exit(1) } switchToListItem(state.list[0]) } function multilineTextLayout(text: string, columns: number): string[] { if (text.length === 0) { return [] } const words = text.split(' ') const escapeCodePattern = /\x1b.+?m/gi return words.slice(1).reduce((lines, word) => { const currentLine = lines[lines.length - 1] const sanitizedCurrentLine = currentLine.replace(escapeCodePattern, '') const sanitizedWord = word.replace(escapeCodePattern, '') // +1 at the end is for the space in front of the word if (sanitizedCurrentLine.length + sanitizedWord.length + 1 <= columns) { lines[lines.length - 1] = currentLine + ' ' + word } else { lines.push(word) } return lines }, [words[0]]) } function checkUpdates(): void { const VERSION_PATTERN = /^\d+\.\d+\.\d+$/ exec('npm info git-jump dist-tags.latest', (error, stdout) => { if (error) { return } const output = stdout.trim() if (!VERSION_PATTERN.test(output)) { return } state.latestPackageVersion = output }) } function compareSemver(a: string, b: string): number { return a.localeCompare(b, undefined, { numeric: true }) } function handleError(error: Error): void { if (error instanceof InputError) { state.message = [`${yellow(error.title)} ${error.message}`] } else { state.message = [ `${red('Error:')} ${error.message}`, '', `${bold('What to do?')}`, 'Help improve git-jump, create GitHub issue with this error and steps to reproduce it. Thank you!', '', `GitHub Issues: https://github.com/mykolaharmash/git-jump/issues` ] } state.scene = Scene.Message view(state) process.exit(1) } function handleExit() { if (state.latestPackageVersion === null) { return } const currentVersion = readVersion() if (compareSemver(currentVersion, state.latestPackageVersion) === -1) { const sourcePackageManager = existsSync(fsPath.join(__dirname, '../homebrew')) ? 'homebrew' : 'npm' const updateCommand = sourcePackageManager === 'npm' ? 'npm install -g git-jump' : 'brew upgrade git-jump' state.scene = Scene.Message state.message = state.message.concat([ '', `New version of git-jump is available: ${yellow(currentVersion)} → ${green(state.latestPackageVersion)}.`, `Changelog: https://github.com/mykolaharmash/git-jump/releases/tag/v${state.latestPackageVersion}`, '', `${bold(updateCommand)} to update.` ]) view(state) } } function initialize() { state.isInteractive = process.stdout.isTTY === true state.gitRepoFolder = locateGitRepoFolder(process.cwd()) const jumpFolderPath = fsPath.join(state.gitRepoFolder, JUMP_FOLDER) const dataFileFullPath = fsPath.join(state.gitRepoFolder, DATA_FILE_PATH) if (!existsSync(jumpFolderPath)) { mkdirSync(jumpFolderPath) // Exclude .jump from Git tracking appendFileSync( fsPath.join(state.gitRepoFolder, '.git', 'info', 'exclude'), `\n${JUMP_FOLDER}` ) } if (!existsSync(dataFileFullPath)) { writeFileSync(dataFileFullPath, '{}', { flag: 'a' }) } state.currentHEAD = readCurrentHEAD(state.gitRepoFolder) state.branches = readBranchesData(state.gitRepoFolder) state.list = generateList(state) state.highlightedLineIndex = 0 } function ensureNodeVersion() { const currentVersion = process.versions.node const requiredVersion = readRequiredNodeVersion() if (requiredVersion === null) { return } if (compareSemver(currentVersion, requiredVersion) === -1) { throw new InputError('Unsupported Node.js version.', `git-jump requires Node.js version >=${requiredVersion}, you're using ${currentVersion}.`) } } function main(args: string[]) { process.on('uncaughtException', handleError) process.on('exit', handleExit) ensureNodeVersion() initialize() if (args.length === 0) { // Checking for updates only when interactive UI is started // as only then there potentially a chance for update // request to finish before git-jump exists checkUpdates() bare() return } if (isSubCommand(args)) { executeSubCommand(args[0], args.slice(1)) return } jumpTo(args) } main(process.argv.slice(2))
the_stack
import './assert-extend' import { EOL } from 'os' import test from 'japa' import { join } from 'path' import dedent from 'dedent-js' import { Filesystem } from '@poppinss/dev-utils' import { Edge } from '../src/Edge' import { GLOBALS } from '../src/Edge/globals' const fs = new Filesystem(join(__dirname, 'views')) test.group('Edge', (group) => { group.afterEach(async () => { await fs.cleanup() }) test('mount default disk', async (assert) => { const edge = new Edge() edge.mount(fs.basePath) assert.deepEqual(edge.loader.mounted, { default: fs.basePath }) }) test('mount named disk', async (assert) => { const edge = new Edge() edge.mount('foo', fs.basePath) assert.deepEqual(edge.loader.mounted, { foo: fs.basePath }) }) test('unmount named disk', async (assert) => { const edge = new Edge() edge.mount('foo', fs.basePath) edge.unmount('foo') assert.deepEqual(edge.loader.mounted, {}) }) test('register globals', async (assert) => { const edge = new Edge() edge.global('foo', 'bar') assert.deepEqual(edge.GLOBALS.foo, 'bar') }) test('add a custom tag to the tags list', async (assert) => { const edge = new Edge() class MyTag { public static tagName = 'mytag' public static block = true public static seekable = true public static compile(): void {} } edge.registerTag(MyTag) assert.deepEqual(edge.compiler['tags'].mytag, MyTag) }) test('invoke tag boot method when registering the tag', async (assert) => { assert.plan(2) const edge = new Edge() class MyTag { public static tagName = 'mytag' public static block = true public static seekable = true public static compile(): void {} public static boot(): void { assert.isTrue(true) } } edge.registerTag(MyTag) assert.deepEqual(edge.compiler['tags'].mytag, MyTag) }) test('render a view using the render method', async (assert) => { const edge = new Edge() await fs.add('foo.edge', 'Hello {{ username }}') edge.mount(fs.basePath) assert.equal((await edge.render('foo', { username: 'virk' })).trim(), 'Hello virk') }) test('pass locals to the view context', async (assert) => { const edge = new Edge() await fs.add('foo.edge', "Hello {{ username || 'guest' }}") edge.mount(fs.basePath) const tmpl = edge.getRenderer() tmpl.share({ username: 'nikk' }) assert.equal((await tmpl.render('foo', {})).trim(), 'Hello nikk') assert.equal((await edge.render('foo', {})).trim(), 'Hello guest') }) test('register a template as a string', async (assert) => { const edge = new Edge() edge.registerTemplate('foo', { template: 'Hello {{ username }}', }) assert.equal((await edge.render('foo', { username: 'virk' })).trim(), 'Hello virk') }) test('register a template on a named disk', async (assert) => { const edge = new Edge() edge.mount('hello', fs.basePath) edge.registerTemplate('hello::foo', { template: 'Hello {{ username }}', }) assert.equal((await edge.render('hello::foo', { username: 'virk' })).trim(), 'Hello virk') }) test('clear compiled cache when template is removed', async (assert) => { const edge = new Edge({ cache: true }) edge.registerTemplate('foo', { template: 'Hello {{ username }}', }) assert.equal((await edge.render('foo', { username: 'virk' })).trim(), 'Hello virk') assert.equal(edge.renderSync('foo', { username: 'virk' }).trim(), 'Hello virk') edge.removeTemplate('foo') edge.registerTemplate('foo', { template: 'Hi {{ username }}', }) assert.equal((await edge.render('foo', { username: 'virk' })).trim(), 'Hi virk') assert.equal(edge.renderSync('foo', { username: 'virk' }).trim(), 'Hi virk') }) test('pass absolute path of template to lexer errors', async (assert) => { assert.plan(1) await fs.add('foo.edge', '@if(1 + 1)') const edge = new Edge() edge.mount(fs.basePath) try { await edge.render('foo', false) } catch ({ stack }) { assert.equal( stack.split('\n')[1].trim(), `at anonymous (${join(fs.basePath, 'foo.edge')}:1:4)` ) } }) test('pass absolute path of template to parser errors', async (assert) => { assert.plan(1) await fs.add('foo.edge', 'Hello {{ a,:b }}') const edge = new Edge() edge.mount(fs.basePath) try { await edge.render('foo', false) } catch ({ stack }) { assert.equal( stack.split('\n')[1].trim(), `at anonymous (${join(fs.basePath, 'foo.edge')}:1:11)` ) } }) test('pass absolute path of layout to lexer errors', async (assert) => { assert.plan(1) await fs.add('foo.edge', "@layout('bar')") await fs.add('bar.edge', '@if(username)') const edge = new Edge() edge.mount(fs.basePath) try { await edge.render('foo', false) } catch ({ stack }) { assert.equal( stack.split('\n')[1].trim(), `at anonymous (${join(fs.basePath, 'bar.edge')}:1:4)` ) } }) test('pass absolute path of layout to parser errors', async (assert) => { assert.plan(1) await fs.add('foo.edge', "@layout('bar')") await fs.add('bar.edge', '{{ a:b }}') const edge = new Edge() edge.mount(fs.basePath) try { await edge.render('foo', false) } catch ({ stack }) { assert.equal( stack.split('\n')[1].trim(), `at anonymous (${join(fs.basePath, 'bar.edge')}:1:3)` ) } }) test('pass absolute path of partial to lexer errors', async (assert) => { assert.plan(1) await fs.add('foo.edge', "@include('bar')") await fs.add('bar.edge', '@if(username)') const edge = new Edge() edge.mount(fs.basePath) try { await edge.render('foo', false) } catch ({ stack }) { assert.equal( stack.split('\n')[1].trim(), `at anonymous (${join(fs.basePath, 'bar.edge')}:1:4)` ) } }) test('pass absolute path of partial to parser errors', async (assert) => { assert.plan(1) await fs.add('foo.edge', "@include('bar')") await fs.add('bar.edge', '{{ a:b }}') const edge = new Edge() edge.mount(fs.basePath) try { await edge.render('foo', false) } catch ({ stack }) { assert.equal( stack.split('\n')[1].trim(), `at anonymous (${join(fs.basePath, 'bar.edge')}:1:3)` ) } }) test('pass absolute path of component to lexer errors', async (assert) => { assert.plan(1) await fs.add('foo.edge', "@!component('bar')") await fs.add('bar.edge', '@if(username)') const edge = new Edge() edge.mount(fs.basePath) try { await edge.render('foo', false) } catch ({ stack }) { assert.equal( stack.split('\n')[1].trim(), `at anonymous (${join(fs.basePath, 'bar.edge')}:1:4)` ) } }) test('pass absolute path of component to parser errors', async (assert) => { assert.plan(1) await fs.add('foo.edge', "@!component('bar')") await fs.add('bar.edge', '{{ a:b }}') const edge = new Edge() edge.mount(fs.basePath) try { await edge.render('foo', false) } catch ({ stack }) { assert.equal( stack.split('\n')[1].trim(), `at anonymous (${join(fs.basePath, 'bar.edge')}:1:3)` ) } }) test('register and call plugins before rendering a view', async (assert) => { assert.plan(3) const edge = new Edge() edge.use(($edge) => { assert.deepEqual($edge.loader.mounted, { hello: fs.basePath }) assert.deepEqual(edge.loader.templates, { 'hello::foo': { template: 'Hello {{ username }}' }, }) }) edge.mount('hello', fs.basePath) edge.registerTemplate('hello::foo', { template: 'Hello {{ username }}', }) assert.equal((await edge.render('hello::foo', { username: 'virk' })).trim(), 'Hello virk') }) test('do not run plugins until a view is rendered', async (assert) => { assert.plan(0) const edge = new Edge() edge.use(($edge) => { assert.deepEqual($edge.loader.mounted, { hello: fs.basePath }) assert.deepEqual(edge.loader.templates, { 'hello::foo': { template: 'Hello {{ username }}' }, }) }) edge.mount('hello', fs.basePath) edge.registerTemplate('hello::foo', { template: 'Hello {{ username }}', }) }) test('run plugins only once', async (assert) => { assert.plan(5) const edge = new Edge() edge.use(($edge) => { assert.deepEqual($edge.loader.mounted, { hello: fs.basePath }) assert.deepEqual(edge.loader.templates, { 'hello::foo': { template: 'Hello {{ username }}' }, }) }) edge.mount('hello', fs.basePath) edge.registerTemplate('hello::foo', { template: 'Hello {{ username }}', }) assert.equal((await edge.render('hello::foo', { username: 'virk' })).trim(), 'Hello virk') assert.equal((await edge.render('hello::foo', { username: 'virk' })).trim(), 'Hello virk') assert.equal((await edge.render('hello::foo', { username: 'virk' })).trim(), 'Hello virk') }) test('run recurring plugins again and again', async (assert) => { assert.plan(9) const edge = new Edge() edge.use( ($edge) => { assert.deepEqual($edge.loader.mounted, { hello: fs.basePath }) assert.deepEqual(edge.loader.templates, { 'hello::foo': { template: 'Hello {{ username }}' }, }) }, { recurring: true } ) edge.mount('hello', fs.basePath) edge.registerTemplate('hello::foo', { template: 'Hello {{ username }}', }) assert.equal((await edge.render('hello::foo', { username: 'virk' })).trim(), 'Hello virk') assert.equal((await edge.render('hello::foo', { username: 'virk' })).trim(), 'Hello virk') assert.equal((await edge.render('hello::foo', { username: 'virk' })).trim(), 'Hello virk') }) }) test.group('Edge | regression', () => { test('render non-existy values', async (assert) => { const edge = new Edge() edge.registerTemplate('numeric', { template: 'Total {{ total }}', }) edge.registerTemplate('boolean', { template: 'Is Active {{ isActive }}', }) assert.equal(await edge.render('numeric', { total: 0 }), 'Total 0') assert.equal(await edge.render('boolean', { isActive: false }), 'Is Active false') }) test('render inline scripts with regex', async (assert) => { const edge = new Edge() edge.registerTemplate('eval', { template: dedent` <script type="text/javascript"> var pl = /\+/g </script> `, }) assert.stringEqual( await edge.render('eval'), dedent` <script type="text/javascript"> var pl = /\+/g </script> ` ) }) test('render complex binary expressions', async (assert) => { const edge = new Edge() edge.registerTemplate('eval', { template: dedent` {{ line.lineName + ( (user.line.id === line.id) ? ' (current)' : (' (' + (line.user.username || 'unselected') + ')') ) }}`, }) assert.equal( await edge.render('eval', { line: { id: 1, lineName: 'aaa', user: {} }, user: { line: {} }, }), dedent` aaa (unselected) ` ) }) test('do not escape when using safe global method', async (assert) => { const edge = new Edge() Object.keys(GLOBALS).forEach((key) => edge.global(key, GLOBALS[key])) edge.registerTemplate('eval', { template: 'Hello {{ safe(username) }}', }) assert.equal(await edge.render('eval', { username: '<p>virk</p>' }), 'Hello <p>virk</p>') }) test('truncate string by characters', async (assert) => { const edge = new Edge() Object.keys(GLOBALS).forEach((key) => edge.global(key, GLOBALS[key])) edge.registerTemplate('eval', { template: '{{{ truncate(text, 10) }}}', }) assert.equal( await edge.render('eval', { text: '<p>hello world & universe</p>' }), '<p>hello world...</p>' ) }) test('truncate string by characters in strict mode', async (assert) => { const edge = new Edge() Object.keys(GLOBALS).forEach((key) => edge.global(key, GLOBALS[key])) edge.registerTemplate('eval', { template: '{{{ truncate(text, 10, { strict: true }) }}}', }) assert.equal( await edge.render('eval', { text: '<p>hello world & universe</p>' }), '<p>hello worl...</p>' ) }) test('define custom suffix for truncate', async (assert) => { const edge = new Edge() Object.keys(GLOBALS).forEach((key) => edge.global(key, GLOBALS[key])) edge.registerTemplate('eval', { template: '{{{ truncate(text, 10, { suffix: ". more" }) }}}', }) assert.equal( await edge.render('eval', { text: '<p>hello world & universe</p>' }), '<p>hello world. more</p>' ) }) test('generate string excerpt', async (assert) => { const edge = new Edge() Object.keys(GLOBALS).forEach((key) => edge.global(key, GLOBALS[key])) edge.registerTemplate('eval', { template: '{{{ excerpt(text, 10) }}}', }) assert.equal( await edge.render('eval', { text: '<p>hello world & universe</p>' }), 'hello world...' ) }) test('excerpt remove in-between tag', async (assert) => { const edge = new Edge() Object.keys(GLOBALS).forEach((key) => edge.global(key, GLOBALS[key])) edge.registerTemplate('eval', { template: '{{{ excerpt(text, 10) }}}', }) assert.equal( await edge.render('eval', { text: '<p>hello <strong>world</strong> & <strong>universe</strong></p>', }), 'hello world...' ) }) test('generate excerpt in strict mode', async (assert) => { const edge = new Edge() Object.keys(GLOBALS).forEach((key) => edge.global(key, GLOBALS[key])) edge.registerTemplate('eval', { template: '{{{ excerpt(text, 10, { strict: true }) }}}', }) assert.equal( await edge.render('eval', { text: '<p>hello <strong>world</strong> & <strong>universe</strong></p>', }), 'hello worl...' ) }) test('add custom suffix for excerpt', async (assert) => { const edge = new Edge() Object.keys(GLOBALS).forEach((key) => edge.global(key, GLOBALS[key])) edge.registerTemplate('eval', { template: '{{{ excerpt(text, 10, { suffix: ". more" }) }}}', }) assert.equal( await edge.render('eval', { text: '<p>hello <strong>world</strong> & <strong>universe</strong></p>', }), 'hello world. more' ) }) test('convert newline to br tags', async (assert) => { const edge = new Edge() Object.keys(GLOBALS).forEach((key) => edge.global(key, GLOBALS[key])) /** * Intentionally using `EOL`, so that we can test that in windows * the newlines are also converted to br tags */ edge.registerTemplate('eval', { template: '{{{ nl2br(text) }}}', }) assert.equal(await edge.render('eval', { text: `Hello${EOL}world` }), 'Hello<br>world') }) test('escape user input except the new lines', async (assert) => { const edge = new Edge() Object.keys(GLOBALS).forEach((key) => edge.global(key, GLOBALS[key])) /** * Intentionally using `EOL`, so that we can test that in windows * the newlines are also converted to br tags */ edge.registerTemplate('eval', { template: '{{{ nl2br(e(text)) }}}', }) assert.equal( await edge.render('eval', { text: `Hello${EOL}<strong>world</strong>` }), 'Hello<br>&lt;strong&gt;world&lt;/strong&gt;' ) }) test('stringify data structures', async (assert) => { const edge = new Edge() Object.keys(GLOBALS).forEach((key) => edge.global(key, GLOBALS[key])) /** * Intentionally using `EOL`, so that we can test that in windows * the newlines are also converted to br tags */ edge.registerTemplate('eval', { template: `{{ stringify({ user: { username: 'virk' } }) }}`, }) assert.equal( await edge.render('eval'), '{&quot;user&quot;:{&quot;username&quot;:&quot;virk&quot;}}' ) }) })
the_stack
import getopts from 'getopts' import { extname } from 'path' import fastGlob from 'fast-glob' import inclusion from 'inclusion' import { pathToFileURL } from 'url' import { Hooks } from '@poppinss/hooks' import { ErrorsPrinter } from '@japa/errors-printer' import { Emitter, Refiner, TestExecutor, ReporterContract } from '@japa/core' import { Test, TestContext, Group, Suite, Runner } from './src/Core' import { Config, Filters, PluginFn, RunnerHooksHandler, RunnerHooksCleanupHandler, } from './src/Contracts' export { Test, Config, Suite, Runner, Group, PluginFn, TestContext, ReporterContract, RunnerHooksHandler, RunnerHooksCleanupHandler, } /** * Filtering layers allowed by the refiner */ const refinerFilteringLayers = ['tests', 'groups', 'tags'] as const /** * Reference to the recently imported file. We pass it to the * test and the group both */ let recentlyImportedFile: string /** * Global timeout for tests. Fetched from runner options or suites * options */ let globalTimeout: number /** * Function to create the test context for the test */ const getContext = (testInstance: Test<any>) => new TestContext(testInstance) /** * The global reference to the tests emitter */ const emitter = new Emitter() /** * Active suite for tests */ let activeSuite = new Suite('default', emitter) /** * Currently active group */ let activeGroup: Group | undefined /** * Configuration options */ let runnerOptions: Required<Config> /** * Ensure the configure method has been called */ function ensureIsConfigured(message: string) { if (!runnerOptions) { throw new Error(message) } } /** * Validate suites filter to ensure a wrong suite is not * mentioned */ function validateSuitesFilter() { if (!('suites' in runnerOptions)) { return } if (!runnerOptions.filters.suites || !runnerOptions.filters.suites.length) { return } const suites = runnerOptions.suites.map(({ name }) => name) const invalidSuites = runnerOptions.filters.suites.filter((suite) => !suites.includes(suite)) if (invalidSuites.length) { throw new Error( `Unrecognized suite "${invalidSuites[0]}". Make sure to define it in the config first` ) } } /** * End tests. We wait for the "beforeExit" event when * forceExit is not set to true */ async function endTests(runner: Runner) { if (runnerOptions.forceExit) { await runner.end() } else { return new Promise<void>((resolve) => { async function beforeExit() { process.removeListener('beforeExit', beforeExit) await runner.end() resolve() } process.on('beforeExit', beforeExit) }) } } /** * Process command line argument into a string value */ function processAsString( argv: Record<string, any>, flagName: string, onMatch: (value: string[]) => any ): void { const flag = argv[flagName] if (flag) { onMatch((Array.isArray(flag) ? flag : flag.split(',')).map((tag: string) => tag.trim())) } } /** * Find if the file path matches the files filter array. * The ending of the file is matched */ function isFileAllowed(filePath: string, filters: string[]): boolean { return !!filters.find((matcher) => { if (filePath.endsWith(matcher)) { return true } return filePath.replace(extname(filePath), '').endsWith(matcher) }) } /** * Returns "true" when no filters are applied or the name is part * of the applied filter */ function isSuiteAllowed(name: string, filters?: string[]) { if (!filters || !filters.length) { return true } return filters.includes(name) } /** * Configure the tests runner */ export function configure(options: Config) { const defaultOptions: Required<Config> = { cwd: process.cwd(), files: [], suites: [], plugins: [], reporters: [], timeout: 2000, filters: {}, setup: [], teardown: [], importer: (filePath) => inclusion(pathToFileURL(filePath).href), refiner: new Refiner({}), forceExit: false, configureSuite: () => {}, } runnerOptions = Object.assign(defaultOptions, options) } /** * Add a new test */ export function test(title: string, callback?: TestExecutor<TestContext, undefined>) { ensureIsConfigured('Cannot add test without configuring the test runner') const testInstance = new Test<undefined>(title, getContext, emitter, runnerOptions.refiner) /** * Set filename */ testInstance.options.meta.fileName = recentlyImportedFile /** * Define timeout on the test when exists globally */ if (globalTimeout !== undefined) { testInstance.timeout(globalTimeout) } /** * Define test executor function */ if (callback) { testInstance.run(callback) } /** * Add test to the group or suite */ if (activeGroup) { activeGroup.add(testInstance) } else { activeSuite.add(testInstance) } return testInstance } /** * Define test group */ test.group = function (title: string, callback: (group: Group) => void) { ensureIsConfigured('Cannot add test group without configuring the test runner') /** * Disallow nested groups */ if (activeGroup) { throw new Error('Cannot create nested test groups') } activeGroup = new Group(title, emitter, runnerOptions.refiner) /** * Set filename */ activeGroup.options.meta.fileName = recentlyImportedFile /** * Add group to the default suite */ activeSuite.add(activeGroup) callback(activeGroup) activeGroup = undefined } /** * Collect files using the files collector function or by processing * the glob pattern */ async function collectFiles(files: string | string[] | (() => string[] | Promise<string[]>)) { if (Array.isArray(files) || typeof files === 'string') { return await fastGlob(files, { absolute: true, onlyFiles: true, cwd: runnerOptions.cwd }) } else if (typeof files === 'function') { return await files() } throw new Error('Invalid value for "files" property. Expected a string, array or a function') } /** * Import test files using the configured importer. Also * filter files using the file filter. (if mentioned). */ async function importFiles(files: string[]) { for (let file of files) { recentlyImportedFile = file if (runnerOptions.filters.files && runnerOptions.filters.files.length) { if (isFileAllowed(file, runnerOptions.filters.files)) { await runnerOptions.importer(file) } } else { await runnerOptions.importer(file) } } } /** * Run japa tests */ export async function run() { const runner = new Runner(emitter) runner.manageUnHandledExceptions() runner.onSuite(runnerOptions.configureSuite) const hooks = new Hooks() let setupRunner: ReturnType<Hooks['runner']> let teardownRunner: ReturnType<Hooks['runner']> try { ensureIsConfigured('Cannot run tests without configuring the tests runner') /** * Step 1: Run all plugins * * Plugins can also mutate config. So we process the config after * running plugins only */ for (let plugin of runnerOptions.plugins) { await plugin(runnerOptions, runner, { Test, TestContext, Group }) } validateSuitesFilter() /** * Step 2: Notify runner about reporters */ runnerOptions.reporters.forEach((reporter) => runner.registerReporter(reporter)) /** * Step 3: Configure runner hooks. */ runnerOptions.setup.forEach((hook) => hooks.add('setup', hook)) runnerOptions.teardown.forEach((hook) => hooks.add('teardown', hook)) setupRunner = hooks.runner('setup') teardownRunner = hooks.runner('teardown') /** * Step 3.1: Run setup hooks * * We run the setup hooks before importing test files. It * allows hooks to setup the app environment for the * test files. */ await setupRunner.run(runner) /** * Step 4: Entertain files property and import test files * as part of the default suite */ if ('files' in runnerOptions && runnerOptions.files.length) { globalTimeout = runnerOptions.timeout const files = await collectFiles(runnerOptions.files) runner.add(activeSuite) await importFiles(files) } /** * Step 5: Entertain suites property and import test files * for the filtered suites. */ if ('suites' in runnerOptions) { for (let suite of runnerOptions.suites) { if (isSuiteAllowed(suite.name, runnerOptions.filters.suites)) { if (suite.timeout !== undefined) { globalTimeout = suite.timeout } else { globalTimeout = runnerOptions.timeout } activeSuite = new Suite(suite.name, emitter) if (typeof suite.configure === 'function') { suite.configure(activeSuite) } const files = await collectFiles(suite.files) runner.add(activeSuite) await importFiles(files) } } } /** * Step 6: Add filters to the refiner */ Object.keys(runnerOptions.filters).forEach((layer: 'tests' | 'groups' | 'tags') => { if (refinerFilteringLayers.includes(layer)) { const values = runnerOptions.filters[layer] if (values) { runnerOptions.refiner.add(layer, values) } } }) /** * Step 7.1: Start the tests runner */ await runner.start() /** * Step 7.2: Execute all the tests */ await runner.exec() /** * Step 7.3: Run cleanup and teardown hooks */ await setupRunner.cleanup(runner) await teardownRunner.run(runner) await teardownRunner.cleanup(runner) /** * Step 7.4: End or wait for process to exit */ await endTests(runner) /** * Step 8: Update the process exit code */ const summary = runner.getSummary() if (summary.hasError) { process.exitCode = 1 } runnerOptions.forceExit && process.exit() } catch (error) { if (setupRunner! && setupRunner.isCleanupPending) { await setupRunner.cleanup(error, runner) } if (teardownRunner! && teardownRunner.isCleanupPending) { await teardownRunner.cleanup(error, runner) } const printer = new ErrorsPrinter() await printer.printError(error) process.exitCode = 1 runnerOptions.forceExit && process.exit() } } /** * Process CLI arguments into configuration options. The following * command line arguments are processed. * * * --tests=Specify test titles * * --tags=Specify test tags * * --groups=Specify group titles * * --ignore-tags=Specify negated tags * * --files=Specify files to match and run * * --force-exit=Enable/disable force exit * * --timeout=Define timeout for all the tests */ export function processCliArgs(argv: string[]): Partial<Config> { const parsed = getopts(argv, { string: ['tests', 'tags', 'groups', 'ignoreTags', 'files', 'timeout'], boolean: ['forceExit'], alias: { ignoreTags: 'ignore-tags', forceExit: 'force-exit', }, }) const config: { filters: Filters; timeout?: number; forceExit?: boolean } = { filters: {}, } processAsString(parsed, 'tags', (tags) => (config.filters.tags = tags)) processAsString(parsed, 'ignoreTags', (tags) => { config.filters.tags = config.filters.tags || [] tags.forEach((tag) => config.filters.tags!.push(`!${tag}`)) }) processAsString(parsed, 'groups', (groups) => (config.filters.groups = groups)) processAsString(parsed, 'tests', (tests) => (config.filters.tests = tests)) processAsString(parsed, 'files', (files) => (config.filters.files = files)) /** * Get suites */ if (parsed._.length) { processAsString({ suites: parsed._ }, 'suites', (suites) => (config.filters.suites = suites)) } /** * Get timeout */ if (parsed.timeout) { const value = Number(parsed.timeout) if (!isNaN(value)) { config.timeout = value } } /** * Get forceExit */ if (parsed.forceExit) { config.forceExit = true } return config }
the_stack
import * as C from './interpreter.constants'; import * as U from './interpreter.util'; declare var stackmachineJsHelper; export class State { /** * 3 properties for the management of the operations to be executed * * - operations contains the sequence of machine instructions that are executed sequentially * - pc, the program counter, is the index of the NEXT operation to be executed * if either the actual array of operations is exhausted */ private operations: any[]; public pc: number; // the hash map of function definitions private bindings; // the binding of values to names (the 'environment') private stack: any[]; // the stack of values private currentBlocks: string[]; //current blocks being executed private debugMode: boolean; /** * initialization of the state. * Gets the array of operations and the function definitions and resets the whole state * * . @param ops the array of operations * . @param fct the function definitions */ constructor(ops: any[]) { this.operations = ops; this.pc = 0; this.bindings = {}; this.stack = []; this.currentBlocks = []; this.debugMode = false; // p( 'storeCode with state reset' ); } public incrementProgramCounter() { this.pc++; } /** returns the boolean debugMode */ public getDebugMode() { return this.debugMode; } /** updates the boolean debugMode */ public setDebugMode(mode: boolean) { this.debugMode = mode; } /** * introduces a new binding. An old binding (if it exists) is hidden, until an unbinding occurs. * * . @param name the name to which a value is bound * . @param value the value that is bound to a name */ public bindVar(name: string, value) { this.checkValidName(name); this.checkValidValue(value); var nameBindings = this.bindings[name]; if (nameBindings === undefined || nameBindings === null || nameBindings === []) { this.bindings[name] = [value]; U.debug('bind new ' + name + ' with ' + value + ' of type ' + typeof value); } else { nameBindings.unshift(value); U.debug('bind&hide ' + name + ' with ' + value + ' of type ' + typeof value); } } /** * remove a binding. An old binding (if it exists) is re-established. * * . @param name the name to be unbound */ public unbindVar(name: string) { this.checkValidName(name); var oldBindings = this.bindings[name]; if (oldBindings.length < 1) { U.dbcException('unbind failed for: ' + name); } oldBindings.shift(); U.debug('unbind ' + name + ' remaining bindings are ' + oldBindings.length); } /** * get the value of a binding. * * . @param name the name whose value is requested */ public getVar(name: string) { this.checkValidName(name); var nameBindings = this.bindings[name]; if (nameBindings === undefined || nameBindings === null || nameBindings.length < 1) { U.dbcException('getVar failed for: ' + name); } // p( 'get ' + name + ': ' + nameBindings[0] ); return nameBindings[0]; } /** * gets all the bindings. */ public getVariables() { return this.bindings; } /** * update the value of a binding. * * . @param name the name whose value is updated * . @param value the new value for that binding */ public setVar(name: string, value: any) { this.checkValidName(name); this.checkValidValue(value); if (value === undefined || value === null) { U.dbcException('setVar value invalid'); } var nameBindings = this.bindings[name]; if (nameBindings === undefined || nameBindings === null || nameBindings.length < 1) { U.dbcException('setVar failed for: ' + name); } nameBindings[0] = value; // p( 'set ' + name + ': ' + nameBindings[0] ); } /** * push a value onto the stack * * . @param value the value to be pushed */ public push(value) { this.checkValidValue(value); this.stack.push(value); U.debug('push ' + value + ' of type ' + typeof value); } /** * pop a value from the stack: * - discard the value * - return the value */ public pop() { if (this.stack.length < 1) { U.dbcException('pop failed with empty stack'); } var value = this.stack.pop(); // p( 'pop ' + value ); return value; } /** * get the first (top) value from the stack. Do not discard the value */ public get0() { return this.get(0); } /** * get the second value from the stack. Do not discard the value */ public get1() { return this.get(1); } /** * get the third value from the stack. Do not discard the value */ public get2() { return this.get(2); } /** * helper: get a value from the stack. Do not discard the value * * . @param i the i'th value (starting from 0) is requested */ private get(i: number) { if (this.stack.length === 0) { U.dbcException('get failed with empty stack'); } return this.stack[this.stack.length - 1 - i]; } /** * for early error detection: assert, that a name given (for a binding) is valid */ private checkValidName(name) { if (name === undefined || name === null) { U.dbcException('invalid name'); } } /** * for early error detection: assert, that a value given (for a binding) is valid */ private checkValidValue(value) { if (value === undefined || value === null) { U.dbcException('bindVar value invalid'); } } /** * get the next operation to be executed from the actual array of operations. */ public getOp() { return this.operations[this.pc]; } /** * FOR DEBUGGING: write the actual array of operations to the 'console.log'. The actual operation is prefixed by '*' * * . @param msg the prefix of the message (for easy reading of the logs) */ public opLog(msg: string) { U.opLog(msg, this.operations, this.pc); } public evalHighlightings(currentStmt, lastStmt) { if (this.debugMode) { let initiations: string[] = currentStmt?.[C.HIGHTLIGHT_PLUS] || []; let terminations: string[] = lastStmt?.[C.HIGHTLIGHT_MINUS]?.filter((term) => initiations.indexOf(term) < 0); this.evalTerminations(terminations); this.evalInitiations(initiations); } } /** adds block to currentBlocks and applies correct highlight to block**/ public evalInitiations(initiations: string[]) { initiations .map((blockId) => stackmachineJsHelper.getBlockById(blockId)) .forEach((block) => { if (stackmachineJsHelper.getJqueryObject(block?.svgPath_).hasClass('breakpoint')) { stackmachineJsHelper.getJqueryObject(block?.svgPath_).removeClass('breakpoint').addClass('selectedBreakpoint'); } this.highlightBlock(block); this.addToCurrentBlock(block.id); }); } /** removes block froms currentBlocks and removes highlighting from block**/ public evalTerminations(terminations: string[]) { terminations ?.map((blockId) => stackmachineJsHelper.getBlockById(blockId)) .forEach((block) => { if (stackmachineJsHelper.getJqueryObject(block?.svgPath_).hasClass('selectedBreakpoint')) { stackmachineJsHelper.getJqueryObject(block?.svgPath_).removeClass('selectedBreakpoint').addClass('breakpoint'); } this.removeBlockHighlight(block); this.removeFromCurrentBlock(block.id); }); } /** Returns true if the current block is currently being executed**/ public beingExecuted(stmt) { let blockId = stmt[C.HIGHTLIGHT_PLUS].slice(-1).pop(); return blockId && this.isInCurrentBlock(blockId); } private highlightBlock(block) { stackmachineJsHelper.getJqueryObject(block.svgPath_).stop(true, true).animate({ 'fill-opacity': '1' }, 0); } private removeBlockHighlight(block) { stackmachineJsHelper.getJqueryObject(block.svgPath_).stop(true, true).animate({ 'fill-opacity': '0.3' }, 50); } /** Will add highlights from all currently blocks being currently executed and all given Breakpoints * @param breakPoints the array of breakpoint block id's to have their highlights added*/ public addHighlights(breakPoints: any[]) { Array.from(this.currentBlocks) .map((blockId) => stackmachineJsHelper.getBlockById(blockId)) .forEach((block) => this.highlightBlock(block)); breakPoints.forEach((id) => { let block = stackmachineJsHelper.getBlockById(id); if (block !== null) { if (this.currentBlocks.hasOwnProperty(id)) { stackmachineJsHelper.getJqueryObject(block.svgPath_).addClass('selectedBreakpoint'); } else { stackmachineJsHelper.getJqueryObject(block.svgPath_).addClass('breakpoint'); } } }); } /** Will remove highlights from all currently blocks being currently executed and all given Breakpoints * @param breakPoints the array of breakpoint block id's to have their highlights removed*/ public removeHighlights(breakPoints: any[]) { Array.from(this.currentBlocks) .map((blockId) => stackmachineJsHelper.getBlockById(blockId)) .forEach((block) => { let object = stackmachineJsHelper.getJqueryObject(block); if (object.hasClass('selectedBreakpoint')) { object.removeClass('selectedBreakpoint').addClass('breakpoint'); } this.removeBlockHighlight(block); }); breakPoints .map((blockId) => stackmachineJsHelper.getBlockById(blockId)) .forEach((block) => { if (block !== null) { stackmachineJsHelper.getJqueryObject(block.svgPath_).removeClass('breakpoint').removeClass('selectedBreakpoint'); } }); } private addToCurrentBlock(id: string): void { const index = this.currentBlocks.indexOf(id, 0); if (index > -1) { return; } this.currentBlocks.push(id); } private removeFromCurrentBlock(id: string): void { const index = this.currentBlocks.indexOf(id, 0); if (index > -1) { this.currentBlocks.splice(index, 1); } } private isInCurrentBlock(id: string): boolean { return this.currentBlocks.indexOf(id, 0) > -1; } }
the_stack
* @packageDocumentation * @hidden */ import { AttributeName, Buffer, BufferUsageBit, Device, Feature, MemoryUsageBit, DescriptorSet, BufferInfo } from '../../core/gfx'; import { Mesh } from './mesh'; import { Texture2D } from '../../core/assets/texture-2d'; import { ImageAsset } from '../../core/assets/image-asset'; import { UBOMorph, UNIFORM_NORMAL_MORPH_TEXTURE_BINDING, UNIFORM_POSITION_MORPH_TEXTURE_BINDING, UNIFORM_TANGENT_MORPH_TEXTURE_BINDING } from '../../core/pipeline/define'; import { warn } from '../../core/platform/debug'; import { Morph, MorphRendering, MorphRenderingInstance, SubMeshMorph } from './morph'; import { assertIsNonNullable, assertIsTrue } from '../../core/data/utils/asserts'; import { log2, nextPow2 } from '../../core/math/bits'; import { IMacroPatch } from '../../core/renderer'; import { legacyCC } from '../../core/global-exports'; import { PixelFormat } from '../../core/assets/asset-enum'; /** * True if force to use cpu computing based sub-mesh rendering. */ const preferCpuComputing = false; /** * Standard morph rendering. * The standard morph rendering renders each of sub-mesh morph separately. * Sub-mesh morph rendering may select different technique according sub-mesh morph itself. */ export class StdMorphRendering implements MorphRendering { private _mesh: Mesh; private _subMeshRenderings: (SubMeshMorphRendering | null)[] = []; constructor (mesh: Mesh, gfxDevice: Device) { this._mesh = mesh; if (!this._mesh.struct.morph) { return; } const nSubMeshes = this._mesh.struct.primitives.length; this._subMeshRenderings = new Array(nSubMeshes).fill(null); for (let iSubMesh = 0; iSubMesh < nSubMeshes; ++iSubMesh) { const subMeshMorph = this._mesh.struct.morph.subMeshMorphs[iSubMesh]; if (!subMeshMorph) { continue; } if (preferCpuComputing || subMeshMorph.targets.length > UBOMorph.MAX_MORPH_TARGET_COUNT) { this._subMeshRenderings[iSubMesh] = new CpuComputing( this._mesh, iSubMesh, this._mesh.struct.morph, gfxDevice, ); } else { this._subMeshRenderings[iSubMesh] = new GpuComputing( this._mesh, iSubMesh, this._mesh.struct.morph, gfxDevice, ); } } } public createInstance (): MorphRenderingInstance { const nSubMeshes = this._mesh.struct.primitives.length; const subMeshInstances: (SubMeshMorphRenderingInstance | null)[] = new Array(nSubMeshes); for (let iSubMesh = 0; iSubMesh < nSubMeshes; ++iSubMesh) { subMeshInstances[iSubMesh] = this._subMeshRenderings[iSubMesh]?.createInstance() ?? null; } return { setWeights (subMeshIndex: number, weights: number[]) { subMeshInstances[subMeshIndex]?.setWeights(weights); }, requiredPatches: (subMeshIndex: number) => { assertIsNonNullable(this._mesh.struct.morph); const subMeshMorph = this._mesh.struct.morph.subMeshMorphs[subMeshIndex]; const subMeshRenderingInstance = subMeshInstances[subMeshIndex]; if (subMeshRenderingInstance === null) { return null; } assertIsNonNullable(subMeshMorph); const patches: IMacroPatch[] = [ { name: 'CC_USE_MORPH', value: true }, { name: 'CC_MORPH_TARGET_COUNT', value: subMeshMorph.targets.length }, ]; if (subMeshMorph.attributes.includes(AttributeName.ATTR_POSITION)) { patches.push({ name: 'CC_MORPH_TARGET_HAS_POSITION', value: true }); } if (subMeshMorph.attributes.includes(AttributeName.ATTR_NORMAL)) { patches.push({ name: 'CC_MORPH_TARGET_HAS_NORMAL', value: true }); } if (subMeshMorph.attributes.includes(AttributeName.ATTR_TANGENT)) { patches.push({ name: 'CC_MORPH_TARGET_HAS_TANGENT', value: true }); } patches.push(...subMeshRenderingInstance.requiredPatches()); return patches; }, adaptPipelineState: (subMeshIndex: number, descriptorSet: DescriptorSet) => { subMeshInstances[subMeshIndex]?.adaptPipelineState(descriptorSet); }, destroy: () => { for (const subMeshInstance of subMeshInstances) { subMeshInstance?.destroy(); } }, }; } } /** * Describes how to render a sub-mesh morph. */ interface SubMeshMorphRendering { /** * Creates a rendering instance. */ createInstance (): SubMeshMorphRenderingInstance; } /** * The instance of once sub-mesh morph rendering. */ interface SubMeshMorphRenderingInstance { /** * Set weights of each morph target. * @param weights The weights. */ setWeights (weights: number[]): void; /** * Asks the define overrides needed to do the rendering. */ requiredPatches (): IMacroPatch[]; /** * Adapts the pipelineState to apply the rendering. * @param pipelineState */ adaptPipelineState (descriptorSet: DescriptorSet): void; /** * Destroy this instance. */ destroy (): void; } /** * (General purpose) Gpu computing based sub-mesh morph rendering. * This technique computes final attribute displacements on GPU. * Target displacements of each attribute are transferred through vertex texture, say, morph texture. */ class GpuComputing implements SubMeshMorphRendering { private _gfxDevice: Device; private _subMeshMorph: SubMeshMorph; private _textureInfo: { width: number; height: number; }; private _attributes: { name: string; morphTexture: MorphTexture; }[]; private _verticesCount: number; constructor (mesh: Mesh, subMeshIndex: number, morph: Morph, gfxDevice: Device) { this._gfxDevice = gfxDevice; const subMeshMorph = morph.subMeshMorphs[subMeshIndex]; assertIsNonNullable(subMeshMorph); this._subMeshMorph = subMeshMorph; enableVertexId(mesh, subMeshIndex, gfxDevice); const nVertices = mesh.struct.vertexBundles[mesh.struct.primitives[subMeshIndex].vertexBundelIndices[0]].view.count; this._verticesCount = nVertices; const nTargets = subMeshMorph.targets.length; const vec4Required = nVertices * nTargets; const vec4TextureFactory = createVec4TextureFactory(gfxDevice, vec4Required); this._textureInfo = { width: vec4TextureFactory.width, height: vec4TextureFactory.height, }; // Creates texture for each attribute. this._attributes = subMeshMorph.attributes.map((attributeName, attributeIndex) => { const vec4Tex = vec4TextureFactory.create(); const valueView = vec4Tex.valueView; // if (DEV) { // Make it easy to view texture in profilers... // for (let i = 0; i < valueView.length / 4; ++i) { // valueView[i * 4 + 3] = 1.0; // } // } subMeshMorph.targets.forEach((morphTarget, morphTargetIndex) => { const displacementsView = morphTarget.displacements[attributeIndex]; const displacements = new Float32Array(mesh.data.buffer, mesh.data.byteOffset + displacementsView.offset, displacementsView.count); const displacementsOffset = (nVertices * morphTargetIndex) * 4; for (let iVertex = 0; iVertex < nVertices; ++iVertex) { valueView[displacementsOffset + 4 * iVertex + 0] = displacements[3 * iVertex + 0]; valueView[displacementsOffset + 4 * iVertex + 1] = displacements[3 * iVertex + 1]; valueView[displacementsOffset + 4 * iVertex + 2] = displacements[3 * iVertex + 2]; } }); vec4Tex.updatePixels(); return { name: attributeName, morphTexture: vec4Tex, }; }); } public destroy () { for (const attribute of this._attributes) { attribute.morphTexture.destroy(); } } public createInstance () { const morphUniforms = new MorphUniforms(this._gfxDevice, this._subMeshMorph.targets.length); morphUniforms.setMorphTextureInfo(this._textureInfo.width, this._textureInfo.height); morphUniforms.setVerticesCount(this._verticesCount); morphUniforms.commit(); return { setWeights: (weights: number[]) => { morphUniforms.setWeights(weights); morphUniforms.commit(); }, requiredPatches: (): IMacroPatch[] => [{ name: 'CC_MORPH_TARGET_USE_TEXTURE', value: true }], adaptPipelineState: (descriptorSet: DescriptorSet) => { for (const attribute of this._attributes) { let binding: number | undefined; switch (attribute.name) { case AttributeName.ATTR_POSITION: binding = UNIFORM_POSITION_MORPH_TEXTURE_BINDING; break; case AttributeName.ATTR_NORMAL: binding = UNIFORM_NORMAL_MORPH_TEXTURE_BINDING; break; case AttributeName.ATTR_TANGENT: binding = UNIFORM_TANGENT_MORPH_TEXTURE_BINDING; break; default: warn('Unexpected attribute!'); break; } if (binding !== undefined) { descriptorSet.bindSampler(binding, attribute.morphTexture.sampler); descriptorSet.bindTexture(binding, attribute.morphTexture.texture); } } descriptorSet.bindBuffer(UBOMorph.BINDING, morphUniforms.buffer); descriptorSet.update(); }, destroy: () => { }, }; } } /** * Cpu computing based sub-mesh morph rendering. * This technique computes final attribute displacements on CPU. * The displacements, then, are passed to GPU. */ class CpuComputing implements SubMeshMorphRendering { private _gfxDevice: Device; private _attributes: { name: string; targets: { displacements: Float32Array; }[]; }[] = []; constructor (mesh: Mesh, subMeshIndex: number, morph: Morph, gfxDevice: Device) { this._gfxDevice = gfxDevice; const subMeshMorph = morph.subMeshMorphs[subMeshIndex]; assertIsNonNullable(subMeshMorph); enableVertexId(mesh, subMeshIndex, gfxDevice); this._attributes = subMeshMorph.attributes.map((attributeName, attributeIndex) => ({ name: attributeName, targets: subMeshMorph.targets.map((attributeDisplacement) => ({ displacements: new Float32Array( mesh.data.buffer, mesh.data.byteOffset + attributeDisplacement.displacements[attributeIndex].offset, attributeDisplacement.displacements[attributeIndex].count, ), })), })); } /** * DO NOT use this field. */ get data () { return this._attributes; } public createInstance () { return new CpuComputingRenderingInstance( this, this._attributes[0].targets[0].displacements.length / 3, this._gfxDevice, ); } } class CpuComputingRenderingInstance implements SubMeshMorphRenderingInstance { private _attributes: { attributeName: string; morphTexture: MorphTexture; }[]; private _owner: CpuComputing; private _morphUniforms: MorphUniforms; public constructor (owner: CpuComputing, nVertices: number, gfxDevice: Device) { this._owner = owner; this._morphUniforms = new MorphUniforms(gfxDevice, 0 /* TODO? */); const vec4TextureFactory = createVec4TextureFactory(gfxDevice, nVertices); this._morphUniforms.setMorphTextureInfo(vec4TextureFactory.width, vec4TextureFactory.height); this._morphUniforms.commit(); this._attributes = this._owner.data.map((attributeMorph, attributeIndex) => { const morphTexture = vec4TextureFactory.create(); return { attributeName: attributeMorph.name, morphTexture, }; }); } public setWeights (weights: number[]) { for (let iAttribute = 0; iAttribute < this._attributes.length; ++iAttribute) { const myAttribute = this._attributes[iAttribute]; const valueView = myAttribute.morphTexture.valueView; const attributeMorph = this._owner.data[iAttribute]; assertIsTrue(weights.length === attributeMorph.targets.length); for (let iTarget = 0; iTarget < attributeMorph.targets.length; ++iTarget) { const targetDisplacements = attributeMorph.targets[iTarget].displacements; const weight = weights[iTarget]; const nVertices = targetDisplacements.length / 3; if (iTarget === 0) { for (let iVertex = 0; iVertex < nVertices; ++iVertex) { valueView[4 * iVertex + 0] = targetDisplacements[3 * iVertex + 0] * weight; valueView[4 * iVertex + 1] = targetDisplacements[3 * iVertex + 1] * weight; valueView[4 * iVertex + 2] = targetDisplacements[3 * iVertex + 2] * weight; } } else if (weight !== 0.0) { for (let iVertex = 0; iVertex < nVertices; ++iVertex) { valueView[4 * iVertex + 0] += targetDisplacements[3 * iVertex + 0] * weight; valueView[4 * iVertex + 1] += targetDisplacements[3 * iVertex + 1] * weight; valueView[4 * iVertex + 2] += targetDisplacements[3 * iVertex + 2] * weight; } } } myAttribute.morphTexture.updatePixels(); } } public requiredPatches (): IMacroPatch[] { return [ { name: 'CC_MORPH_TARGET_USE_TEXTURE', value: true }, { name: 'CC_MORPH_PRECOMPUTED', value: true }, ]; } public adaptPipelineState (descriptorSet: DescriptorSet) { for (const attribute of this._attributes) { const attributeName = attribute.attributeName; let binding: number | undefined; switch (attributeName) { case AttributeName.ATTR_POSITION: binding = UNIFORM_POSITION_MORPH_TEXTURE_BINDING; break; case AttributeName.ATTR_NORMAL: binding = UNIFORM_NORMAL_MORPH_TEXTURE_BINDING; break; case AttributeName.ATTR_TANGENT: binding = UNIFORM_TANGENT_MORPH_TEXTURE_BINDING; break; default: warn('Unexpected attribute!'); break; } if (binding !== undefined) { descriptorSet.bindSampler(binding, attribute.morphTexture.sampler); descriptorSet.bindTexture(binding, attribute.morphTexture.texture); } } descriptorSet.bindBuffer(UBOMorph.BINDING, this._morphUniforms.buffer); descriptorSet.update(); } public destroy () { this._morphUniforms.destroy(); for (let iAttribute = 0; iAttribute < this._attributes.length; ++iAttribute) { const myAttribute = this._attributes[iAttribute]; myAttribute.morphTexture.destroy(); } } } /** * Provides the access to morph related uniforms. */ class MorphUniforms { private _targetCount: number; private _localBuffer: DataView; private _remoteBuffer: Buffer; constructor (gfxDevice: Device, targetCount: number) { this._targetCount = targetCount; this._localBuffer = new DataView(new ArrayBuffer(UBOMorph.SIZE)); this._remoteBuffer = gfxDevice.createBuffer(new BufferInfo( BufferUsageBit.UNIFORM | BufferUsageBit.TRANSFER_DST, MemoryUsageBit.HOST | MemoryUsageBit.DEVICE, UBOMorph.SIZE, UBOMorph.SIZE, )); } public destroy () { this._remoteBuffer.destroy(); } public get buffer () { return this._remoteBuffer; } public setWeights (weights: number[]) { assertIsTrue(weights.length === this._targetCount); for (let iWeight = 0; iWeight < weights.length; ++iWeight) { this._localBuffer.setFloat32(UBOMorph.OFFSET_OF_WEIGHTS + 4 * iWeight, weights[iWeight], legacyCC.sys.isLittleEndian); } } public setMorphTextureInfo (width: number, height: number) { this._localBuffer.setFloat32(UBOMorph.OFFSET_OF_DISPLACEMENT_TEXTURE_WIDTH, width, legacyCC.sys.isLittleEndian); this._localBuffer.setFloat32(UBOMorph.OFFSET_OF_DISPLACEMENT_TEXTURE_HEIGHT, height, legacyCC.sys.isLittleEndian); } public setVerticesCount (count: number) { this._localBuffer.setFloat32(UBOMorph.OFFSET_OF_VERTICES_COUNT, count, legacyCC.sys.isLittleEndian); } public commit () { this._remoteBuffer.update(this._localBuffer.buffer); } } /** * * @param gfxDevice * @param vec4Capacity Capacity of vec4. */ function createVec4TextureFactory (gfxDevice: Device, vec4Capacity: number) { const hasFeatureFloatTexture = gfxDevice.hasFeature(Feature.TEXTURE_FLOAT); let pixelRequired: number; let pixelFormat: PixelFormat; let pixelBytes: number; let UpdateViewConstructor: typeof Float32Array | typeof Uint8Array; if (hasFeatureFloatTexture) { pixelRequired = vec4Capacity; pixelBytes = 16; pixelFormat = Texture2D.PixelFormat.RGBA32F; UpdateViewConstructor = Float32Array; } else { pixelRequired = 4 * vec4Capacity; pixelBytes = 4; pixelFormat = Texture2D.PixelFormat.RGBA8888; UpdateViewConstructor = Uint8Array; } const { width, height } = bestSizeToHavePixels(pixelRequired); assertIsTrue(width * height >= pixelRequired); return { width, height, create: () => { const arrayBuffer = new ArrayBuffer(width * height * pixelBytes); const valueView = new Float32Array(arrayBuffer); const updateView = UpdateViewConstructor === Float32Array ? valueView : new UpdateViewConstructor(arrayBuffer); const image = new ImageAsset({ width, height, _data: updateView, _compressed: false, format: pixelFormat, }); const textureAsset = new Texture2D(); textureAsset.setFilters(Texture2D.Filter.NEAREST, Texture2D.Filter.NEAREST); textureAsset.setMipFilter(Texture2D.Filter.NONE); textureAsset.setWrapMode(Texture2D.WrapMode.CLAMP_TO_EDGE, Texture2D.WrapMode.CLAMP_TO_EDGE, Texture2D.WrapMode.CLAMP_TO_EDGE); textureAsset.image = image; if (!textureAsset.getGFXTexture()) { warn('Unexpected: failed to create morph texture?'); } const sampler = gfxDevice.getSampler(textureAsset.getSamplerInfo()); return { /** * Gets the GFX texture. */ get texture () { return textureAsset.getGFXTexture()!; }, /** * Gets the GFX sampler. */ get sampler () { return sampler; }, /** * Value view. */ get valueView () { return valueView; }, /** * Destroy the texture. Release its GPU resources. */ destroy () { textureAsset.destroy(); // Samplers allocated from `samplerLib` are not required and // should not be destroyed. // this._sampler.destroy(); }, /** * Update the pixels content to `valueView`. */ updatePixels () { textureAsset.uploadData(updateView); }, }; }, }; } type MorphTexture = ReturnType<ReturnType<typeof createVec4TextureFactory>['create']>; /** * When use vertex-texture-fetch technique, we do need * `gl_vertexId` when we sample per-vertex data. * WebGL 1.0 does not have `gl_vertexId`; WebGL 2.0, however, does. * @param mesh * @param subMeshIndex * @param gfxDevice */ function enableVertexId (mesh: Mesh, subMeshIndex: number, gfxDevice: Device) { mesh.renderingSubMeshes[subMeshIndex].enableVertexIdChannel(gfxDevice); } /** * Decides a best texture size to have the specified pixel capacity at least. * The decided width and height has the following characteristics: * - the width and height are both power of 2; * - if the width and height are different, the width would be set to the larger once; * - the width is ensured to be multiple of 4. * @param nPixels Least pixel capacity. */ function bestSizeToHavePixels (nPixels: number) { if (nPixels < 5) { nPixels = 5; } const aligned = nextPow2(nPixels); const epxSum = log2(aligned); const h = epxSum >> 1; const w = (epxSum & 1) ? (h + 1) : h; return { width: 1 << w, height: 1 << h, }; }
the_stack
import fs from 'fs' import * as path from 'path' import { setup } from '../testUtil' const fsPromises = fs.promises const baseDir = path.join(__dirname, '..', '..', 'test', 'data') const simpleBam = path.join(baseDir, 'simple.bam') const simpleBai = path.join(baseDir, 'simple.bai') const testConfig = path.join(baseDir, 'test_config.json') async function initctx(ctx: { dir: string }) { await fsPromises.copyFile(testConfig, path.join(ctx.dir, 'config.json')) } async function init2bit(ctx: { dir: string }) { await fsPromises.copyFile( path.join(baseDir, 'simple.2bit'), path.join(ctx.dir, 'simple.2bit'), ) } async function readConf(ctx: { dir: string }) { return fsPromises.readFile(path.join(ctx.dir, 'config.json'), { encoding: 'utf8', }) } describe('add-track', () => { setup.command(['add-track']).exit(2).it('fails if no track is specified') setup .command(['add-track', simpleBam]) .exit(110) .it('fails if load flag isnt passed in for a localFile') setup .command([ 'add-track', 'https://mysite.com/data/simple.bam', '--load', 'inPlace', ]) .exit(100) .it('fails if URL with load flag is passed') setup .do(initctx) .command(['add-track', simpleBam, '--load', 'copy']) .command(['add-track', simpleBam, '--load', 'copy']) .exit(160) .it('cannot add a track with the same track id') setup .do(initctx) .command(['add-track', simpleBam, '--load', 'symlink']) .command(['add-track', simpleBam, '--load', 'symlink', '--force']) .it('use force to overwrite a symlink') setup .do(initctx) .command(['add-track', simpleBam, '--load', 'copy']) .command(['add-track', simpleBam, '--load', 'copy', '--force']) .it('use force to overwrite a copied file') // setting up a test for move difficult currently, because it would literally // move the file in our test data... // setup // .do(initctx) // .do(async ctx => { // await fsPromises.copyFile(simpleBam, path.join(ctx.dir, 'new.bam')) // await fsPromises.copyFile(simpleBai, path.join(ctx.dir, 'new.bam.bai')) // }) // .command(['add-track', 'new.bam', '--load', 'move']) // .command(['add-track', 'new.bam', '--load', 'move', '--force']) // .it('use force to overwrite a moved file') setup .command(['add-track', simpleBam, '--load', 'copy']) .catch(/no such file or directory/) .it('cannot add a track if there is no config file') setup .do(initctx) .do(ctx => { return fsPromises.writeFile( path.join(ctx.dir, 'config.json'), '{"assemblies":[]}', ) }) .command(['add-track', simpleBam, '--load', 'copy']) .exit(150) .it('fails if it cannot assume the assemblyname') setup .do(initctx) .command(['add-track', simpleBam, '--load', 'copy']) .it('adds a track', async ctx => { const contents = await readConf(ctx) expect(fs.existsSync(path.join(ctx.dir, 'simple.bam'))).toBeTruthy() expect(fs.existsSync(path.join(ctx.dir, 'simple.bam.bai'))).toBeTruthy() expect(JSON.parse(contents).tracks).toEqual([ { type: 'AlignmentsTrack', trackId: 'simple', name: 'simple', assemblyNames: ['testAssembly'], adapter: { type: 'BamAdapter', bamLocation: { uri: 'simple.bam', locationType: 'UriLocation', }, index: { indexType: 'BAI', location: { uri: 'simple.bam.bai', locationType: 'UriLocation', }, }, sequenceAdapter: { type: 'testSeqAdapter', twoBitLocation: { uri: 'test.2bit', locationType: 'UriLocation', }, }, }, }, ]) }) setup .do(initctx) .command(['add-track', '/testing/in/place.bam', '--load', 'inPlace']) .it('adds a track with load inPlace', async ctx => { const contents = await readConf(ctx) expect(JSON.parse(contents).tracks).toEqual([ { type: 'AlignmentsTrack', trackId: 'place', name: 'place', assemblyNames: ['testAssembly'], adapter: { type: 'BamAdapter', bamLocation: { uri: '/testing/in/place.bam', locationType: 'UriLocation', }, index: { indexType: 'BAI', location: { uri: '/testing/in/place.bam.bai', locationType: 'UriLocation', }, }, sequenceAdapter: { type: 'testSeqAdapter', twoBitLocation: { uri: 'test.2bit', locationType: 'UriLocation', }, }, }, }, ]) }) setup .do(initctx) .command([ 'add-track', '/testing/in/place.bam', '--load', 'inPlace', '--indexFile', '/something/else/random.bai', ]) .it('adds a track with load inPlace', async ctx => { const contents = await readConf(ctx) expect(JSON.parse(contents).tracks).toEqual([ { type: 'AlignmentsTrack', trackId: 'place', name: 'place', assemblyNames: ['testAssembly'], adapter: { type: 'BamAdapter', bamLocation: { uri: '/testing/in/place.bam', locationType: 'UriLocation', }, index: { indexType: 'BAI', location: { uri: '/something/else/random.bai', locationType: 'UriLocation', }, }, sequenceAdapter: { type: 'testSeqAdapter', twoBitLocation: { uri: 'test.2bit', locationType: 'UriLocation', }, }, }, }, ]) }) setup .do(initctx) .command([ 'add-track', simpleBam, '--load', 'copy', '--indexFile', simpleBai, ]) .it('adds a track', async ctx => { const contents = await readConf(ctx) expect(fs.existsSync(path.join(ctx.dir, 'simple.bam'))).toBeTruthy() expect(fs.existsSync(path.join(ctx.dir, 'simple.bai'))).toBeTruthy() expect(JSON.parse(contents).tracks).toEqual([ { type: 'AlignmentsTrack', trackId: 'simple', name: 'simple', assemblyNames: ['testAssembly'], adapter: { type: 'BamAdapter', bamLocation: { uri: 'simple.bam', locationType: 'UriLocation', }, index: { indexType: 'BAI', location: { uri: 'simple.bai', locationType: 'UriLocation', }, }, sequenceAdapter: { type: 'testSeqAdapter', twoBitLocation: { uri: 'test.2bit', locationType: 'UriLocation', }, }, }, }, ]) }) setup .do(initctx) .command(['add-track', simpleBam, '--load', 'copy', '--subDir', 'bam']) .it('adds a track with subDir', async ctx => { const contents = await readConf(ctx) expect(JSON.parse(contents).tracks).toEqual([ { type: 'AlignmentsTrack', trackId: 'simple', name: 'simple', assemblyNames: ['testAssembly'], adapter: { type: 'BamAdapter', bamLocation: { uri: 'bam/simple.bam', locationType: 'UriLocation', }, index: { indexType: 'BAI', location: { uri: 'bam/simple.bam.bai', locationType: 'UriLocation', }, }, sequenceAdapter: { type: 'testSeqAdapter', twoBitLocation: { uri: 'test.2bit', locationType: 'UriLocation', }, }, }, }, ]) }) setup .do(initctx) .command([ 'add-track', simpleBam, '--load', 'copy', '--protocol', 'localPath', '--subDir', 'bam', ]) .it('adds a track with subDir', async ctx => { const contents = await readConf(ctx) expect(JSON.parse(contents).tracks).toEqual([ { type: 'AlignmentsTrack', trackId: 'simple', name: 'simple', assemblyNames: ['testAssembly'], adapter: { type: 'BamAdapter', bamLocation: { localPath: 'bam/simple.bam', locationType: 'LocalPathLocation', }, index: { indexType: 'BAI', location: { localPath: 'bam/simple.bam.bai', locationType: 'LocalPathLocation', }, }, sequenceAdapter: { type: 'testSeqAdapter', twoBitLocation: { uri: 'test.2bit', locationType: 'UriLocation', }, }, }, }, ]) }) setup .do(initctx) .command([ 'add-track', simpleBam, '--load', 'copy', '--name', 'customName', '--trackId', 'customTrackId', '--description', 'new description', '--trackType', 'CustomTrackType', '--category', 'newcategory', '--assemblyNames', 'customAssemblyName', '--config', '{"defaultRendering": "test"}', ]) .it('adds a track with all the custom fields', async ctx => { const contents = await readConf(ctx) expect(JSON.parse(contents).tracks).toEqual([ { type: 'CustomTrackType', trackId: 'customTrackId', name: 'customName', description: 'new description', category: ['newcategory'], assemblyNames: ['customAssemblyName'], adapter: { type: 'BamAdapter', bamLocation: { uri: 'simple.bam', locationType: 'UriLocation', }, index: { indexType: 'BAI', location: { uri: 'simple.bam.bai', locationType: 'UriLocation', }, }, }, defaultRendering: 'test', }, ]) }) setup .do(initctx) .command(['add-track', 'https://mysite.com/data/simple.bam']) .it('adds a track from a url', async ctx => { const contents = await readConf(ctx) expect(JSON.parse(contents).tracks).toEqual([ { type: 'AlignmentsTrack', trackId: 'simple', name: 'simple', assemblyNames: ['testAssembly'], adapter: { type: 'BamAdapter', bamLocation: { uri: 'https://mysite.com/data/simple.bam', locationType: 'UriLocation', }, index: { indexType: 'BAI', location: { uri: 'https://mysite.com/data/simple.bam.bai', locationType: 'UriLocation', }, }, sequenceAdapter: { type: 'testSeqAdapter', twoBitLocation: { uri: 'test.2bit', locationType: 'UriLocation', }, }, }, }, ]) }) // fails when there is more than one assembly and none is specified on the // command line setup .do(initctx) .do(init2bit) .command(['add-assembly', 'simple.2bit', '--load', 'copy']) .command(['add-track', simpleBam, '--load', 'copy']) .exit(2) .it('fails multiple assemblies exist but no assemblyNames passed') // fails when there is more than one assembly and none is specified on the // command line setup .do(initctx) .do(init2bit) .command(['add-assembly', 'simple.2bit', '--load', 'copy']) .command([ 'add-track', simpleBam, '--load', 'copy', '--assemblyNames', 'testAssembly', ]) .it('adds a track to a config with multiple assemblies', async ctx => { const contents = await readConf(ctx) expect(JSON.parse(contents).tracks).toEqual([ { type: 'AlignmentsTrack', trackId: 'simple', name: 'simple', assemblyNames: ['testAssembly'], adapter: { type: 'BamAdapter', bamLocation: { uri: 'simple.bam', locationType: 'UriLocation', }, index: { indexType: 'BAI', location: { uri: 'simple.bam.bai', locationType: 'UriLocation', }, }, sequenceAdapter: { twoBitLocation: { uri: 'test.2bit', locationType: 'UriLocation', }, type: 'testSeqAdapter', }, }, }, ]) }) })
the_stack
module uk.co.senab.photoview { import Matrix = android.graphics.Matrix; import Canvas = android.graphics.Canvas; import RectF = android.graphics.RectF; import GestureDetector = android.view.GestureDetector; import View = android.view.View; import ImageView = android.widget.ImageView; import PhotoView = uk.co.senab.photoview.PhotoView; import PhotoViewAttacher = uk.co.senab.photoview.PhotoViewAttacher; export interface IPhotoView { /** * Returns true if the PhotoView is set to allow zooming of Photos. * * @return true if the PhotoView allows zooming. */ canZoom():boolean ; /** * Gets the Display Rectangle of the currently displayed Drawable. The Rectangle is relative to * this View and includes all scaling and translations. * * @return - RectF of Displayed Drawable */ getDisplayRect():RectF ; /** * Sets the Display Matrix of the currently displayed Drawable. The Rectangle is considered * relative to this View and includes all scaling and translations. * * @param finalMatrix target matrix to set PhotoView to * @return - true if rectangle was applied successfully */ setDisplayMatrix(finalMatrix:Matrix):boolean ; /** * Gets the Display Matrix of the currently displayed Drawable. The Rectangle is considered * relative to this View and includes all scaling and translations. * * @return - true if rectangle was applied successfully */ getDisplayMatrix():Matrix ; /** * Use {@link #getMinimumScale()} instead, this will be removed in future release * * @return The current minimum scale level. What this value represents depends on the current * {@link ImageView.ScaleType}. */ getMinScale():number ; /** * @return The current minimum scale level. What this value represents depends on the current * {@link ImageView.ScaleType}. */ getMinimumScale():number ; /** * Use {@link #getMediumScale()} instead, this will be removed in future release * * @return The current middle scale level. What this value represents depends on the current * {@link ImageView.ScaleType}. */ getMidScale():number ; /** * @return The current medium scale level. What this value represents depends on the current * {@link ImageView.ScaleType}. */ getMediumScale():number ; /** * Use {@link #getMaximumScale()} instead, this will be removed in future release * * @return The current maximum scale level. What this value represents depends on the current * {@link ImageView.ScaleType}. */ getMaxScale():number ; /** * @return The current maximum scale level. What this value represents depends on the current * {@link ImageView.ScaleType}. */ getMaximumScale():number ; /** * Returns the current scale value * * @return float - current scale value */ getScale():number ; /** * Return the current scale type in use by the ImageView. * * @return current ImageView.ScaleType */ getScaleType():ImageView.ScaleType ; /** * Whether to allow the ImageView's parent to intercept the touch event when the photo is scroll * to it's horizontal edge. * * @param allow whether to allow intercepting by parent element or not */ setAllowParentInterceptOnEdge(allow:boolean):void ; /** * Use {@link #setMinimumScale(float minimumScale)} instead, this will be removed in future * release * <p>&nbsp;</p> * Sets the minimum scale level. What this value represents depends on the current {@link * ImageView.ScaleType}. * * @param minScale minimum allowed scale */ setMinScale(minScale:number):void ; /** * Sets the minimum scale level. What this value represents depends on the current {@link * ImageView.ScaleType}. * * @param minimumScale minimum allowed scale */ setMinimumScale(minimumScale:number):void ; /** * Use {@link #setMediumScale(float mediumScale)} instead, this will be removed in future * release * <p>&nbsp;</p> * Sets the middle scale level. What this value represents depends on the current {@link * ImageView.ScaleType}. * * @param midScale medium scale preset */ setMidScale(midScale:number):void ; /* * Sets the medium scale level. What this value represents depends on the current {@link android.widget.ImageView.ScaleType}. * * @param mediumScale medium scale preset */ setMediumScale(mediumScale:number):void ; /** * Use {@link #setMaximumScale(float maximumScale)} instead, this will be removed in future * release * <p>&nbsp;</p> * Sets the maximum scale level. What this value represents depends on the current {@link * ImageView.ScaleType}. * * @param maxScale maximum allowed scale preset */ setMaxScale(maxScale:number):void ; /** * Sets the maximum scale level. What this value represents depends on the current {@link * ImageView.ScaleType}. * * @param maximumScale maximum allowed scale preset */ setMaximumScale(maximumScale:number):void ; /** * Allows to set all three scale levels at once, so you don't run into problem with setting * medium/minimum scale before the maximum one * * @param minimumScale minimum allowed scale * @param mediumScale medium allowed scale * @param maximumScale maximum allowed scale preset */ setScaleLevels(minimumScale:number, mediumScale:number, maximumScale:number):void ; /** * Register a callback to be invoked when the Photo displayed by this view is long-pressed. * * @param listener - Listener to be registered. */ setOnLongClickListener(listener:View.OnLongClickListener):void ; /** * Register a callback to be invoked when the Matrix has changed for this View. An example would * be the user panning or scaling the Photo. * * @param listener - Listener to be registered. */ setOnMatrixChangeListener(listener:PhotoViewAttacher.OnMatrixChangedListener):void ; /** * Register a callback to be invoked when the Photo displayed by this View is tapped with a * single tap. * * @param listener - Listener to be registered. */ setOnPhotoTapListener(listener:PhotoViewAttacher.OnPhotoTapListener):void ; /** * Returns a listener to be invoked when the Photo displayed by this View is tapped with a * single tap. * * @return PhotoViewAttacher.OnPhotoTapListener currently set, may be null */ getOnPhotoTapListener():PhotoViewAttacher.OnPhotoTapListener ; /** * Register a callback to be invoked when the View is tapped with a single tap. * * @param listener - Listener to be registered. */ setOnViewTapListener(listener:PhotoViewAttacher.OnViewTapListener):void ; /** * Enables rotation via PhotoView internal functions. * * @param rotationDegree - Degree to rotate PhotoView to, should be in range 0 to 360 */ setRotationTo(rotationDegree:number):void ; /** * Enables rotation via PhotoView internal functions. * * @param rotationDegree - Degree to rotate PhotoView by, should be in range 0 to 360 */ setRotationBy(rotationDegree:number):void ; /** * Returns a callback listener to be invoked when the View is tapped with a single tap. * * @return PhotoViewAttacher.OnViewTapListener currently set, may be null */ getOnViewTapListener():PhotoViewAttacher.OnViewTapListener ; /** * Changes the current scale to the specified value. * * @param scale - Value to scale to */ setScale(scale:number):void ; /** * Changes the current scale to the specified value. * * @param scale - Value to scale to * @param animate - Whether to animate the scale */ setScale(scale:number, animate:boolean):void ; /** * Changes the current scale to the specified value, around the given focal point. * * @param scale - Value to scale to * @param focalX - X Focus Point * @param focalY - Y Focus Point * @param animate - Whether to animate the scale */ setScale(scale:number, focalX:number, focalY:number, animate:boolean):void ; /** * Controls how the image should be resized or moved to match the size of the ImageView. Any * scaling or panning will happen within the confines of this {@link * ImageView.ScaleType}. * * @param scaleType - The desired scaling mode. */ setScaleType(scaleType:ImageView.ScaleType):void ; /** * Allows you to enable/disable the zoom functionality on the ImageView. When disable the * ImageView reverts to using the FIT_CENTER matrix. * * @param zoomable - Whether the zoom functionality is enabled. */ setZoomable(zoomable:boolean):void ; /** * Enables rotation via PhotoView internal functions. Name is chosen so it won't collide with * View.setRotation(float) in API since 11 * * @param rotationDegree - Degree to rotate PhotoView to, should be in range 0 to 360 * @deprecated use {@link #setRotationTo(float)} */ setPhotoViewRotation(rotationDegree:number):void ; /** * Extracts currently visible area to Bitmap object, if there is no image loaded yet or the * ImageView is already destroyed, returns {@code null} * * @return currently visible area as bitmap or null */ getVisibleRectangleBitmap():Canvas ; /** * Allows to change zoom transition speed, default value is 200 (PhotoViewAttacher.DEFAULT_ZOOM_DURATION). * Will default to 200 if provided negative value * * @param milliseconds duration of zoom interpolation */ setZoomTransitionDuration(milliseconds:number):void ; /** * Will return instance of IPhotoView (eg. PhotoViewAttacher), can be used to provide better * integration * * @return IPhotoView implementation instance if available, null if not */ getIPhotoViewImplementation():IPhotoView ; /** * Sets custom double tap listener, to intercept default given functions. To reset behavior to * default, you can just pass in "null" or public field of PhotoViewAttacher.defaultOnDoubleTapListener * * @param newOnDoubleTapListener custom OnDoubleTapListener to be set on ImageView */ setOnDoubleTapListener(newOnDoubleTapListener:GestureDetector.OnDoubleTapListener):void ; /** * Will report back about scale changes * * @param onScaleChangeListener OnScaleChangeListener instance */ setOnScaleChangeListener(onScaleChangeListener:PhotoViewAttacher.OnScaleChangeListener):void ; } export module IPhotoView { export var DEFAULT_MAX_SCALE:number = 3.0; export var DEFAULT_MID_SCALE:number = 1.75; export var DEFAULT_MIN_SCALE:number = 1.0; export var DEFAULT_ZOOM_DURATION:number = 200; export function isImpl(obj):boolean { if(!obj) return false; return obj['canZoom'] && obj['getDisplayRect'] && obj['setDisplayMatrix'] && obj['getDisplayMatrix'] && obj['getMinScale'] && obj['getMinimumScale'] && obj['getMidScale'] && obj['getMediumScale'] && obj['getMaxScale'] && obj['getMaximumScale'] && obj['getScale'] && obj['getScaleType'] && obj['setAllowParentInterceptOnEdge'] && obj['setMinScale'] && obj['setMinimumScale'] && obj['setMidScale'] && obj['setMediumScale'] && obj['setMaxScale'] && obj['setMaximumScale'] && obj['setScaleLevels'] && obj['setOnLongClickListener'] && obj['setOnMatrixChangeListener'] && obj['setOnPhotoTapListener'] && obj['getOnPhotoTapListener'] && obj['setOnViewTapListener'] && obj['setRotationTo'] && obj['setRotationBy'] && obj['getOnViewTapListener'] && obj['setScale'] && obj['setScale'] && obj['setScale'] && obj['setScaleType'] && obj['setZoomable'] && obj['setPhotoViewRotation'] && obj['getVisibleRectangleBitmap'] && obj['setZoomTransitionDuration'] && obj['getIPhotoViewImplementation'] && obj['setOnDoubleTapListener'] && obj['setOnScaleChangeListener']; } } }
the_stack
import connectionPool = require('../../../shared-lib/src/connectionPool') import util = require('util') import * as entities from '../../../shared-lib/src/entities' // update the date_updated field on the given record export async function updateDataFileUpdateDateByIdCommand(id: number) { const sql = 'UPDATE waze.data_files SET date_updated = now() WHERE id = $1'; let result = await connectionPool.getPool().query(sql, [id]); return; }; // insert the new data_file record export async function insertDataFileCommand(data_file: entities.DataFile): Promise<entities.DataFile> { const sql = `INSERT INTO waze.data_files (start_time_millis, end_time_millis, start_time, end_time, date_created, date_updated, file_name, json_hash) VALUES ($1, $2, $3, $4, now(), now(), $5, $6) RETURNING *`; let result = await connectionPool.getPool().query(sql, [ data_file.start_time_millis, data_file.end_time_millis, data_file.start_time, data_file.end_time, data_file.file_name, data_file.json_hash ]); //now that we have the result, let's set the previously unset values data_file.id = result.rows[0].id; data_file.date_created = result.rows[0].date_created; data_file.date_updated = result.rows[0].date_updated; return data_file; }; // upsert an alert record export async function upsertAlertCommand(alert: entities.Alert): Promise<void> { //for simplicity, we'll always insert and update all fields, since our hash should ensure there aren't unexpected changes //this is really more for when we discover later that waze added a new field, we add it in all the code, then reprocess those files //#region UPSERT SQL const sql = `INSERT INTO waze.alerts ( id, uuid, pub_millis, pub_utc_date, road_type, location, street, city, country, magvar, reliability, report_description, report_rating, confidence, type, subtype, report_by_municipality_user, thumbs_up, jam_uuid, datafile_id ) VALUES ( $1, -- id $2, -- uuid $3, -- pub_millis $4, -- pub_utc_date $5, -- road_type $6, -- location $7, -- street $8, -- city $9, -- country $10, -- magvar $11, -- reliability $12, -- report_description $13, -- report_rating $14, -- confidence $15, -- type $16, -- subtype $17, -- report_by_municipality_user $18, -- thumbs_up $19, -- jam_uuid $20 -- datafile_id ) ON CONFLICT (id) DO UPDATE SET uuid=$2, pub_millis=$3, pub_utc_date=$4, road_type=$5, location=$6, street=$7, city=$8, country=$9, magvar=$10, reliability=$11, report_description=$12, report_rating=$13, confidence=$14, type=$15, subtype=$16, report_by_municipality_user=$17, thumbs_up=$18, jam_uuid=$19, datafile_id=$20`; //#endregion let result = await connectionPool.getPool().query(sql, [ alert.id, //id alert.uuid, //uuid alert.pub_millis, //pub_millis alert.pub_utc_date, //pub_utc_date alert.road_type, //road_type alert.location , //location alert.street, //street alert.city, //city alert.country, //country alert.magvar , //magvar alert.reliability, //reliability alert.report_description, //report_description alert.report_rating, //report_rating alert.confidence, //confidence alert.type, //type alert.subtype, //subtype alert.report_by_municipality_user, //report_by_municipality_user alert.thumbs_up, //thumbs_up alert.jam_uuid , //jam_uuid alert.datafile_id, //datafile_id ]); //nothing currently to alter on the alert object based on SQL return return; } // upsert a jam record export async function upsertJamCommand(jam: entities.Jam): Promise<void> { //for simplicity, we'll always insert and update all fields, since our hash should ensure there aren't unexpected changes //this is really more for when we discover later that waze added a new field, we add it in all the code, then reprocess those files //#region UPSERT SQL const sql = `INSERT INTO waze.jams ( id, uuid, pub_millis, pub_utc_date, start_node, end_node, road_type, street, city, country, delay, speed, speed_kmh, length, turn_type, level, blocking_alert_id, line, datafile_id, type, turn_line ) VALUES ( $1, -- id $2, -- uuid $3, -- pub_millis $4, -- pub_utc_date $5, -- start_node $6, -- end_node $7, -- road_type $8, -- street $9, -- city $10, -- country $11, -- delay $12, -- speed $13, -- speed_kmh $14, -- length $15, -- turn_type $16, -- level $17, -- blocking_alert_id $18, -- line $19, -- datafile_id $20, -- type $21 -- turn_line ) ON CONFLICT (id) DO UPDATE SET uuid=$2, pub_millis=$3, pub_utc_date=$4, start_node=$5, end_node=$6, road_type=$7, street=$8, city=$9, country=$10, delay=$11, speed=$12, speed_kmh=$13, length=$14, turn_type=$15, level=$16, blocking_alert_id=$17, line=$18, datafile_id=$19, type=$20, turn_line=$21`; //#endregion let result = await connectionPool.getPool().query(sql, [ jam.id, //id jam.uuid, //uuid jam.pub_millis, //pub_millis jam.pub_utc_date, //pub_utc_date jam.start_node, //start_node jam.end_node, //end_node jam.road_type, //road_type jam.street, //street jam.city, //city jam.country, //country jam.delay, //delay jam.speed, //speed jam.speed_kmh, //speed_kmh jam.length, //length jam.turn_type, //turn_type jam.level, //level jam.blocking_alert_id, //blocking_alert_id jam.line, //line jam.datafile_id, //datafile_id jam.type, //type jam.turn_line, //turn_line ]); //nothing currently to update on the jam object based on SQL return return; } // upsert an irregularity record export async function upsertIrregularityCommand(irregularity: entities.Irregularity): Promise<void> { //for simplicity, we'll always insert and update all fields, since our hash should ensure there aren't unexpected changes //this is really more for when we discover later that waze added a new field, we add it in all the code, then reprocess those files //#region UPSERT SQL const sql = `INSERT INTO waze.irregularities ( id, uuid, detection_date_millis, detection_date, detection_utc_date, update_date_millis, update_date, update_utc_date, street, city, country, is_highway, speed, regular_speed, delay_seconds, seconds, length, trend, type, severity, jam_level, drivers_count, alerts_count, n_thumbs_up, n_comments, n_images, line, datafile_id, cause_type, start_node, end_node ) VALUES ( $1, -- id, $2, -- uuid, $3, -- detection_date_millis, $4, -- detection_date, $5, -- detection_utc_date, $6, -- update_date_millis, $7, -- update_date, $8, -- update_utc_date, $9, -- street, $10, -- city, $11, -- country, $12, -- is_highway, $13, -- speed, $14, -- regular_speed, $15, -- delay_seconds, $16, -- seconds, $17, -- length, $18, -- trend, $19, -- type, $20, -- severity, $21, -- jam_level, $22, -- drivers_count, $23, -- alerts_count, $24, -- n_thumbs_up, $25, -- n_comments, $26, -- n_images, $27, -- line, $28, -- datafile_id $29, -- cause_type $30, -- start_node $31 -- end_node ) ON CONFLICT (id) DO UPDATE SET uuid = $2, detection_date_millis = $3, detection_date = $4, detection_utc_date = $5, update_date_millis = $6, update_date = $7, update_utc_date = $8, street = $9, city = $10, country = $11, is_highway = $12, speed = $13, regular_speed = $14, delay_seconds = $15, seconds = $16, length = $17, trend = $18, type = $19, severity = $20, jam_level = $21, drivers_count = $22, alerts_count = $23, n_thumbs_up = $24, n_comments = $25, n_images = $26, line = $27, datafile_id = $28, cause_type = $29, start_node = $30, end_node = $31`; //#endregion let result = await connectionPool.getPool().query(sql, [ irregularity.id, //id irregularity.uuid, //uuid irregularity.detection_date_millis, //detection_date_millis irregularity.detection_date, //detection_date irregularity.detection_utc_date, //detection_utc_date irregularity.update_date_millis, //update_date_millis irregularity.update_date, //update_date irregularity.update_utc_date, //update_utc_date irregularity.street, //street irregularity.city, //city irregularity.country, //country irregularity.is_highway, //is_highway irregularity.speed, //speed irregularity.regular_speed, //regular_speed irregularity.delay_seconds, //delay_seconds irregularity.seconds, //seconds irregularity.length, //length irregularity.trend, //trend irregularity.type, //type irregularity.severity, //severity irregularity.jam_level, //jam_level irregularity.drivers_count, //drivers_count irregularity.alerts_count, //alerts_count irregularity.n_thumbs_up, //n_thumbs_up irregularity.n_comments, //n_comments irregularity.n_images, //n_images irregularity.line, //line irregularity.datafile_id, //datafile_id irregularity.cause_type, //cause_type irregularity.start_node, //start_node irregularity.end_node //end_node ]); //nothing currently to jam on the alert object based on SQL return return; } // upsert a coordinate record export async function upsertCoordinateCommand(coordinate: entities.Coordinate): Promise<void> { //for simplicity, we'll always insert and update all fields, since our hash should ensure there aren't unexpected changes //this is really more for when we discover later that waze added a new field, we add it in all the code, then reprocess those files //#region UPSERT SQL const sql = `INSERT INTO waze.coordinates ( id, latitude, longitude, "order", jam_id, irregularity_id, alert_id, coordinate_type_id ) VALUES ( $1, -- id $2, -- latitude $3, -- longitude $4, -- order $5, -- jam_id $6, -- irregularity_id $7, -- alert_id $8 -- coordinate_type_id ) ON CONFLICT (id) DO UPDATE SET id=$1, latitude=$2, longitude=$3, "order"=$4, jam_id=$5, irregularity_id=$6, alert_id=$7, coordinate_type_id=$8`; //#endregion let result = await connectionPool.getPool().query(sql, [ coordinate.id, //id coordinate.latitude, //latitude coordinate.longitude, //longitude coordinate.order, //order coordinate.jam_id, //jam_id coordinate.irregularity_id, //irregularity_id coordinate.alert_id, //alert_id coordinate.coordinate_type_id //coordinate_type_id ]); //nothing currently to alter on the coordinate object based on SQL return return; }
the_stack
import { ChangeDetectorRef, Component, ViewChild, HostBinding, ChangeDetectionStrategy, TemplateRef, Directive, OnDestroy, ElementRef, Input, ViewRef, ContentChild, Output, EventEmitter, Optional, Host, } from '@angular/core'; import { IgxOverlayService } from '../../../services/public_api'; import { IgxFilteringService, ExpressionUI } from '../grid-filtering.service'; import { FilteringExpressionsTree, IFilteringExpressionsTree } from '../../../data-operations/filtering-expressions-tree'; import { resolveNestedPath, parseDate, uniqueDates, PlatformUtil } from '../../../core/utils'; import { GridColumnDataType } from '../../../data-operations/data-util'; import { Subscription, Subject } from 'rxjs'; import { takeUntil } from 'rxjs/operators'; import { IgxColumnComponent } from '../../columns/column.component'; import { IgxGridBaseDirective } from '../../grid-base.directive'; import { DisplayDensity } from '../../../core/density'; import { GridSelectionMode } from '../../common/enums'; import { GridBaseAPIService } from '../../api.service'; import { FormattedValuesFilteringStrategy } from '../../../data-operations/filtering-strategy'; import { TreeGridFormattedValuesFilteringStrategy } from '../../tree-grid/tree-grid.filtering.strategy'; import { getLocaleCurrencyCode } from '@angular/common'; import { SortingDirection } from '../../../data-operations/sorting-expression.interface'; /** * @hidden */ export class FilterListItem { public value: any; public label: any; public isSelected: boolean; public indeterminate: boolean; public isFiltered: boolean; public isSpecial = false; public isBlanks = false; } @Directive({ selector: 'igx-excel-style-column-operations,[igxExcelStyleColumnOperations]' }) export class IgxExcelStyleColumnOperationsTemplateDirective { } @Directive({ selector: 'igx-excel-style-filter-operations,[igxExcelStyleFilterOperations]' }) export class IgxExcelStyleFilterOperationsTemplateDirective { } /** * A component used for presenting Excel style filtering UI for a specific column. * It is used internally in the Grid, but could also be hosted in a container outside of it. * * Example: * ```html * <igx-grid-excel-style-filtering * [column]="grid1.columns[0]"> * </igx-grid-excel-style-filtering> * ``` */ @Component({ changeDetection: ChangeDetectionStrategy.OnPush, selector: 'igx-grid-excel-style-filtering', templateUrl: './grid.excel-style-filtering.component.html' }) export class IgxGridExcelStyleFilteringComponent implements OnDestroy { /** * @hidden @internal */ @HostBinding('class.igx-excel-filter') public defaultClass = true; /** * @hidden @internal */ @HostBinding('class.igx-excel-filter--inline') public inline = true; /** * @hidden @internal */ @Output() public loadingStart = new EventEmitter(); /** * @hidden @internal */ @Output() public loadingEnd = new EventEmitter(); /** * @hidden @internal */ @Output() public initialized = new EventEmitter(); /** * @hidden @internal */ @Output() public sortingChanged = new EventEmitter(); /** * @hidden @internal */ @Output() public columnChange = new EventEmitter<IgxColumnComponent>(); /** * @hidden @internal */ @Output() public listDataLoaded = new EventEmitter(); @ViewChild('mainDropdown', { read: ElementRef }) public mainDropdown: ElementRef; /** * @hidden @internal */ @ContentChild(IgxExcelStyleColumnOperationsTemplateDirective, { read: IgxExcelStyleColumnOperationsTemplateDirective }) public excelColumnOperationsDirective: IgxExcelStyleColumnOperationsTemplateDirective; /** * @hidden @internal */ @ContentChild(IgxExcelStyleFilterOperationsTemplateDirective, { read: IgxExcelStyleFilterOperationsTemplateDirective }) public excelFilterOperationsDirective: IgxExcelStyleFilterOperationsTemplateDirective; /** * @hidden @internal */ @ViewChild('defaultExcelColumnOperations', { read: TemplateRef, static: true }) protected defaultExcelColumnOperations: TemplateRef<any>; /** * @hidden @internal */ @ViewChild('defaultExcelFilterOperations', { read: TemplateRef, static: true }) protected defaultExcelFilterOperations: TemplateRef<any>; /** * An @Input property that sets the column. */ @Input() public set column(value: IgxColumnComponent) { this._column = value; this.listData = new Array<FilterListItem>(); this.columnChange.emit(this._column); if (this._columnPinning) { this._columnPinning.unsubscribe(); } if (this._columnVisibilityChanged) { this._columnVisibilityChanged.unsubscribe(); } if (this._sortingChanged) { this._sortingChanged.unsubscribe(); } if (this._filteringChanged) { this._filteringChanged.unsubscribe(); } if (this._densityChanged) { this._densityChanged.unsubscribe(); } if (this._columnMoved) { this._columnMoved.unsubscribe(); } if (this._column) { this._column.grid.filteringService.registerSVGIcons(); this.init(); this.sortingChanged.emit(); this._columnPinning = this.grid.columnPin.pipe(takeUntil(this.destroy$)).subscribe(() => { requestAnimationFrame(() => { if (!(this.cdr as ViewRef).destroyed) { this.cdr.detectChanges(); } }); }); this._columnVisibilityChanged = this.grid.columnVisibilityChanged.pipe(takeUntil(this.destroy$)).subscribe(() => { this.cdr.detectChanges(); }); this._sortingChanged = this.grid.sortingExpressionsChange.pipe(takeUntil(this.destroy$)).subscribe(() => { this.sortingChanged.emit(); }); this._filteringChanged = this.grid.filteringExpressionsTreeChange.pipe(takeUntil(this.destroy$)).subscribe(() => { this.init(); }); this._densityChanged = this.grid.onDensityChanged.pipe(takeUntil(this.destroy$)).subscribe(() => { this.cdr.detectChanges(); }); this._columnMoved = this.grid.columnMovingEnd.pipe(takeUntil(this.destroy$)).subscribe(() => { this.cdr.markForCheck(); }); } } /** * Returns the current column. */ public get column(): IgxColumnComponent { return this._column; } /** * @hidden @internal */ public get filteringService(): IgxFilteringService { return this.grid.filteringService; } /** * @hidden @internal */ public expressionsList = new Array<ExpressionUI>(); /** * @hidden @internal */ public listData = new Array<FilterListItem>(); /** * @hidden @internal */ public uniqueValues = []; /** * @hidden @internal */ public overlayService: IgxOverlayService; /** * @hidden @internal */ public overlayComponentId: string; private _minHeight; /** * Gets the minimum height. */ @Input() public get minHeight(): string { if (this._minHeight || this._minHeight === 0) { return this._minHeight; } if (!this.inline) { let minHeight = 645; switch (this.displayDensity) { case DisplayDensity.cosy: minHeight = 465; break; case DisplayDensity.compact: minHeight = 330; break; default: break; } return `${minHeight}px`; } } /** * Sets the minimum height. */ public set minHeight(value: string) { this._minHeight = value; } private _maxHeight; private destroy$ = new Subject<boolean>(); private containsNullOrEmpty = false; private selectAllSelected = true; private selectAllIndeterminate = false; private filterValues = new Set<any>(); private _column: IgxColumnComponent; private _columnPinning: Subscription; private _columnVisibilityChanged: Subscription; private _sortingChanged: Subscription; private _filteringChanged: Subscription; private _densityChanged: Subscription; private _columnMoved: Subscription; private _originalDisplay: string; /** * Gets the maximum height. */ @Input() @HostBinding('style.max-height') public get maxHeight(): string { if (this._maxHeight) { return this._maxHeight; } if (!this.inline) { let maxHeight = 775; switch (this.displayDensity) { case DisplayDensity.cosy: maxHeight = 565; break; case DisplayDensity.compact: maxHeight = 405; break; default: break; } return `${maxHeight}px`; } } /** * Sets the maximum height. */ public set maxHeight(value: string) { this._maxHeight = value; } /** * @hidden @internal */ public get grid(): IgxGridBaseDirective { return this.column?.grid ?? this.gridAPI?.grid; } /** * @hidden @internal */ public get displayDensity() { return this.grid?.displayDensity; } constructor( private cdr: ChangeDetectorRef, public element: ElementRef, protected platform: PlatformUtil, @Host() @Optional() private gridAPI?: GridBaseAPIService<IgxGridBaseDirective>) { } /** * @hidden @internal */ public ngOnDestroy(): void { this.destroy$.next(true); this.destroy$.complete(); } /** * @hidden @internal */ public selectedClass() { return this.column.selected ? 'igx-excel-filter__actions-selected' : 'igx-excel-filter__actions-select'; } /** * @hidden @internal */ public initialize(column: IgxColumnComponent, overlayService: IgxOverlayService) { this.inline = false; this.column = column; this.overlayService = overlayService; if (this._originalDisplay) { this.element.nativeElement.style.display = this._originalDisplay; } this.initialized.emit(); this.grid.columnMoving.pipe(takeUntil(this.destroy$)).subscribe(() => { this.closeDropdown(); }); } /** * @hidden @internal */ public onPin() { this.column.pinned = !this.column.pinned; this.closeDropdown(); } /** * @hidden @internal */ public onSelect() { if (!this.column.selected) { this.grid.selectionService.selectColumn(this.column.field, this.grid.columnSelection === GridSelectionMode.single); } else { this.grid.selectionService.deselectColumn(this.column.field); } this.grid.notifyChanges(); } /** * @hidden @internal */ public columnSelectable() { return this.grid?.columnSelection !== GridSelectionMode.none && this.column?.selectable; } /** * @hidden @internal */ public onHideToggle() { this.column.toggleVisibility(); this.closeDropdown(); } /** * @hidden @internal */ public cancel() { if (!this.overlayComponentId) { this.init(); } this.closeDropdown(); } /** * @hidden @internal */ public closeDropdown() { if (this.overlayComponentId) { this.overlayService.hide(this.overlayComponentId); this.overlayComponentId = null; } } /** * @hidden @internal */ public onKeyDown(eventArgs: KeyboardEvent) { if (this.platform.isFilteringKeyCombo(eventArgs)) { eventArgs.preventDefault(); this.closeDropdown(); } eventArgs.stopPropagation(); } /** * @hidden @internal */ public hide() { this._originalDisplay = document.defaultView.getComputedStyle(this.element.nativeElement).display; this.element.nativeElement.style.display = 'none'; } /** * @hidden @internal */ public detectChanges() { this.cdr.detectChanges(); } private init() { this.expressionsList = new Array<ExpressionUI>(); this.filteringService.generateExpressionsList(this.column.filteringExpressionsTree, this.grid.filteringLogic, this.expressionsList); this.populateColumnData(); } private areExpressionsSelectable() { if (this.expressionsList.length === 1 && (this.expressionsList[0].expression.condition.name === 'equals' || this.expressionsList[0].expression.condition.name === 'at' || this.expressionsList[0].expression.condition.name === 'true' || this.expressionsList[0].expression.condition.name === 'false' || this.expressionsList[0].expression.condition.name === 'empty' || this.expressionsList[0].expression.condition.name === 'in')) { return true; } const selectableExpressionsCount = this.expressionsList.filter(exp => (exp.beforeOperator === 1 || exp.afterOperator === 1) && (exp.expression.condition.name === 'equals' || exp.expression.condition.name === 'at' || exp.expression.condition.name === 'true' || exp.expression.condition.name === 'false' || exp.expression.condition.name === 'empty' || exp.expression.condition.name === 'in')).length; return selectableExpressionsCount === this.expressionsList.length; } private areExpressionsValuesInTheList() { if (this.column.dataType === GridColumnDataType.Boolean) { return true; } if (this.filterValues.size === 1) { const firstValue = this.filterValues.values().next().value; if (!firstValue && firstValue !== 0) { return true; } } for (const expression of this.uniqueValues) { const value = this.getExpressionValue(expression); if (this.filterValues.has(value)) { return true; } } return false; } private populateColumnData() { if (this.grid.uniqueColumnValuesStrategy) { this.cdr.detectChanges(); this.renderColumnValuesRemotely(); } else { this.renderColumnValuesFromData(); } } private renderColumnValuesRemotely() { this.loadingStart.emit(); const expressionsTree: FilteringExpressionsTree = this.getColumnFilterExpressionsTree(); const prevColumn = this.column; this.grid.uniqueColumnValuesStrategy(this.column, expressionsTree, (colVals: any[]) => { if (!this.column || this.column !== prevColumn) { return; } const columnValues = (this.column.dataType === GridColumnDataType.Date) ? colVals.map(value => { const label = this.getFilterItemLabel(value); return { label, value }; }) : colVals; this.renderValues(columnValues); this.loadingEnd.emit(); }); } private shouldFormatValues() { return this.column.formatter && (this.grid.filterStrategy instanceof FormattedValuesFilteringStrategy || this.grid.filterStrategy instanceof TreeGridFormattedValuesFilteringStrategy) && this.grid.filterStrategy.shouldApplyFormatter(this.column.field); } private renderColumnValuesFromData() { const expressionsTree = this.getColumnFilterExpressionsTree(); const data = this.column.gridAPI.filterDataByExpressions(expressionsTree); const shouldFormatValues = this.shouldFormatValues(); const columnField = this.column.field; const columnValues = (this.column.dataType === GridColumnDataType.Date) ? data.map(record => { const value = (resolveNestedPath(record, columnField)); const label = this.getFilterItemLabel(value, true, record); return { label, value }; }) : data.map(record => { const value = resolveNestedPath(record, columnField); return shouldFormatValues ? this.column.formatter(value, record) : value; }); this.renderValues(columnValues); } private renderValues(columnValues: any[]) { this.generateUniqueValues(columnValues); this.generateFilterValues(this.column.dataType === GridColumnDataType.Date || this.column.dataType === GridColumnDataType.DateTime); this.generateListData(); } private generateUniqueValues(columnValues: any[]) { if (this.column.dataType === GridColumnDataType.String && this.column.filteringIgnoreCase) { const filteredUniqueValues = columnValues.map(s => s?.toString().toLowerCase()) .reduce((map, val, i) => map.get(val) ? map : map.set(val, columnValues[i]), new Map()); this.uniqueValues = Array.from(filteredUniqueValues.values()); } else if (this.column.dataType === GridColumnDataType.DateTime) { this.uniqueValues = Array.from(new Set(columnValues.map(v => v?.toLocaleString()))); this.uniqueValues.forEach((d, i) => this.uniqueValues[i] = d ? new Date(d) : d); } else if (this.column.dataType === GridColumnDataType.Time) { this.uniqueValues = Array.from(new Set(columnValues.map(v => { if (v) { v = new Date(v); return new Date().setHours(v.getHours(), v.getMinutes(), v.getSeconds()); } else { return v; } }))); this.uniqueValues.forEach((d, i) => this.uniqueValues[i] = d ? new Date(d) : d); } else { this.uniqueValues = this.column.dataType === GridColumnDataType.Date ? uniqueDates(columnValues) : Array.from(new Set(columnValues)); } } private generateFilterValues(isDateColumn: boolean = false) { if (isDateColumn) { this.filterValues = new Set<any>(this.expressionsList.reduce((arr, e) => { if (e.expression.condition.name === 'in') { return [...arr, ...Array.from((e.expression.searchVal as Set<any>).values()).map(v => new Date(v).toISOString())]; } return [...arr, ...[e.expression.searchVal ? e.expression.searchVal.toISOString() : e.expression.searchVal]]; }, [])); } else if (this.column.dataType === GridColumnDataType.Time) { this.filterValues = new Set<any>(this.expressionsList.reduce((arr, e) => { if (e.expression.condition.name === 'in') { return [ ...arr, ...Array.from((e.expression.searchVal as Set<any>).values()).map(v => typeof v === 'string' ? v : new Date(v).toLocaleTimeString()) ]; } return [ ...arr, ...[e.expression.searchVal ? e.expression.searchVal.toLocaleTimeString() : e.expression.searchVal] ]; }, [])); } else { this.filterValues = new Set<any>(this.expressionsList.reduce((arr, e) => { if (e.expression.condition.name === 'in') { return [...arr, ...Array.from((e.expression.searchVal as Set<any>).values())]; } return [...arr, ...[e.expression.searchVal]]; }, [])); } } private generateListData() { this.listData = new Array<FilterListItem>(); const shouldUpdateSelection = this.areExpressionsSelectable() && this.areExpressionsValuesInTheList(); if (this.column.dataType === GridColumnDataType.Boolean) { this.addBooleanItems(); } else { this.addItems(shouldUpdateSelection); } this.listData = this.column.sortStrategy.sort(this.listData, 'value', SortingDirection.Asc, this.column.sortingIgnoreCase, (obj, key) => { let resolvedValue = obj[key]; if (this.column.dataType === GridColumnDataType.Time) { resolvedValue = new Date().setHours( resolvedValue.getHours(), resolvedValue.getMinutes(), resolvedValue.getSeconds(), resolvedValue.getMilliseconds()); } return resolvedValue; }); if (this.containsNullOrEmpty) { this.addBlanksItem(shouldUpdateSelection); } if (this.listData.length > 0) { this.addSelectAllItem(); } if (!(this.cdr as any).destroyed) { this.cdr.detectChanges(); } this.listDataLoaded.emit(); } private getColumnFilterExpressionsTree() { const gridExpressionsTree: IFilteringExpressionsTree = this.grid.filteringExpressionsTree; const expressionsTree = new FilteringExpressionsTree(gridExpressionsTree.operator, gridExpressionsTree.fieldName); for (const operand of gridExpressionsTree.filteringOperands) { if (operand instanceof FilteringExpressionsTree) { const columnExprTree = operand as FilteringExpressionsTree; if (columnExprTree.fieldName === this.column.field) { break; } } expressionsTree.filteringOperands.push(operand); } return expressionsTree; } private addBooleanItems() { this.selectAllSelected = true; this.selectAllIndeterminate = false; this.uniqueValues.forEach(element => { const filterListItem = new FilterListItem(); if (element !== undefined && element !== null && element !== '') { if (this.column.filteringExpressionsTree) { if (element === true && this.expressionsList.find(exp => exp.expression.condition.name === 'true')) { filterListItem.isSelected = true; filterListItem.isFiltered = true; this.selectAllIndeterminate = true; } else if (element === false && this.expressionsList.find(exp => exp.expression.condition.name === 'false')) { filterListItem.isSelected = true; filterListItem.isFiltered = true; this.selectAllIndeterminate = true; } else { filterListItem.isSelected = false; filterListItem.isFiltered = false; } } else { filterListItem.isSelected = true; filterListItem.isFiltered = true; } filterListItem.value = element; filterListItem.label = element ? this.grid.resourceStrings.igx_grid_filter_true : this.grid.resourceStrings.igx_grid_filter_false; filterListItem.indeterminate = false; this.listData.push(filterListItem); } else { this.containsNullOrEmpty = true; } }); } private addItems(shouldUpdateSelection: boolean) { this.selectAllSelected = true; this.containsNullOrEmpty = false; this.selectAllIndeterminate = false; const applyFormatter = !this.shouldFormatValues(); this.uniqueValues.forEach(element => { const hasValue = (element !== undefined && element !== null && element !== '' && this.column.dataType !== GridColumnDataType.Date) || !!(element && element.label); if (hasValue) { const filterListItem = new FilterListItem(); filterListItem.isSelected = true; filterListItem.isFiltered = true; if (this.column.filteringExpressionsTree) { filterListItem.isSelected = false; filterListItem.isFiltered = false; if (shouldUpdateSelection) { const value = this.getExpressionValue(element); if (this.filterValues.has(value)) { filterListItem.isSelected = true; filterListItem.isFiltered = true; } this.selectAllIndeterminate = true; } else { this.selectAllSelected = false; } } filterListItem.value = this.getFilterItemValue(element); filterListItem.label = this.getFilterItemLabel(element, applyFormatter); filterListItem.indeterminate = false; this.listData.push(filterListItem); } }); this.containsNullOrEmpty = this.uniqueValues.length > this.listData.length; } private addSelectAllItem() { const selectAll = new FilterListItem(); selectAll.isSelected = this.selectAllSelected; selectAll.value = this.grid.resourceStrings.igx_grid_excel_select_all; selectAll.label = this.grid.resourceStrings.igx_grid_excel_select_all; selectAll.indeterminate = this.selectAllIndeterminate; selectAll.isSpecial = true; selectAll.isFiltered = this.selectAllSelected; this.listData.unshift(selectAll); } private addBlanksItem(shouldUpdateSelection) { const blanks = new FilterListItem(); if (this.column.filteringExpressionsTree) { if (shouldUpdateSelection) { if (this.filterValues.has(null)) { blanks.isSelected = true; blanks.isFiltered = true; } else { blanks.isSelected = false; blanks.isFiltered = false; } } } else { blanks.isSelected = true; blanks.isFiltered = true; } blanks.value = null; blanks.label = this.grid.resourceStrings.igx_grid_excel_blanks; blanks.indeterminate = false; blanks.isSpecial = true; blanks.isBlanks = true; this.listData.unshift(blanks); } private getFilterItemLabel(element: any, applyFormatter: boolean = true, data?: any) { if (this.column.dataType === GridColumnDataType.Date || this.column.dataType === GridColumnDataType.Time || this.column.dataType === GridColumnDataType.DateTime) { return element && element.label ? element.label : this.column.formatter ? applyFormatter ? this.column.formatter(element, data) : element : this.grid.datePipe.transform(element, this.column.pipeArgs.format, this.column.pipeArgs.timezone, this.grid.locale); } if (this.column.dataType === GridColumnDataType.Number) { return this.column.formatter ? applyFormatter ? this.column.formatter(element, data) : element : this.grid.decimalPipe.transform(element, this.column.pipeArgs.digitsInfo, this.grid.locale); } if (this.column.dataType === GridColumnDataType.Currency) { return this.column.formatter ? applyFormatter ? this.column.formatter(element, data) : element : this.grid.currencyPipe.transform(element, this.column.pipeArgs.currencyCode ? this.column.pipeArgs.currencyCode : getLocaleCurrencyCode(this.grid.locale), this.column.pipeArgs.display, this.column.pipeArgs.digitsInfo, this.grid.locale); } if (this.column.dataType === GridColumnDataType.Percent) { return this.column.formatter ? applyFormatter ? this.column.formatter(element, data) : element : this.grid.percentPipe.transform(element, this.column.pipeArgs.digitsInfo, this.grid.locale); } return this.column.formatter && applyFormatter ? this.column.formatter(element, data) : element; } private getFilterItemValue(element: any) { if (this.column.dataType === GridColumnDataType.Date) { element = parseDate(element.value); } return element; } private getExpressionValue(element: any): string { let value; if (this.column.dataType === GridColumnDataType.Date) { value = element && element.value ? new Date(element.value).toISOString() : element.value; } else if(this.column.dataType === GridColumnDataType.DateTime) { value = element ? new Date(element).toISOString() : element; } else if(this.column.dataType === GridColumnDataType.Time) { value = element ? new Date(element).toLocaleTimeString() : element; } else { value = element; } return value; } }
the_stack
Component Name : Amexio listbox Component Selector : <amexio-listbox> Component Description : Simple list box which allows user to select one of more items from list based on configuration. User can provide custom template to change look and feel. */ import { AfterViewInit, Component, ContentChild, EventEmitter, HostListener, Input, OnDestroy, OnInit, Output, Renderer2, TemplateRef, } from '@angular/core'; import { CommonDataService } from '../../services/data/common.data.service'; import { LifeCycleBaseComponent } from '../../base/lifecycle.base.component'; @Component({ selector: 'amexio-listbox', templateUrl: './listbox.component.html', }) export class AmexioListBoxComponent extends LifeCycleBaseComponent implements AfterViewInit, OnInit, OnDestroy { private componentLoaded: boolean; contextMenuStyle: any; /* Properties name : enable-checkbox datatype : boolean version : 4.0 onwards default : none description : Enables checkbox for each row, this allows user for multi selection. */ @Input('enable-checkbox') enablecheckbox: boolean; /* Properties name : header datatype : string version : 4.0 onwards default : none description : Heading for ListBox. */ @Input() header: string; /* Properties name : enable-header datatype : boolean version : 4.2.4 onwards default : true description : User can disabled header of listbox to false.. */ @Input('enable-header') enableHeader = true; /* Properties name : search-placeholder datatype : string version : 4.0 onwards default : none description : place-holder for searchbox. */ @Input('search-placeholder') searchplaceholder: string; /* Properties name : filter datatype : boolean version : 4.0 onwards default : none description : Enables user to filter data based on 'display-field' configured. */ @Input() filter: boolean; /* Properties name : data datatype : any version : 4.0 onwards default : none description : Local Data binding. */ _data: any; @Input('data') set data(value: any[]) { this._data = value; if (this.componentLoaded) { this.updateComponent(); } } get data(): any[] { return this._data; } /* Properties name : http-url datatype : string version : 4.0 onwards default : none description : REST url for fetching data. */ @Input('http-url') httpurl: string; /* Properties name : data-reader datatype : string version : 4.0 onwards default : none description : Key in JSON Datasource for records. */ @Input('data-reader') datareader: string; /* Properties name : http-method datatype : string version : 4.0 onwards default : none description : Type of HTTP call, POST,GET etc. */ @Input('http-method') httpmethod: string; /* Properties name : display-field datatype : string version : 4.0 onwards default : none description : Key in JSON for display particular column from data. */ @Input('display-field') displayfield: string; /* Properties name : height datatype : any version : 4.0 onwards default : none description : height for ListBox. */ @Input() height: any; /* Events name : selectedRows datatype : none version : none default : none description : It will fire only on selection of checkbox and gives you selected record data. */ @Output() selectedRows: any = new EventEmitter<any>(); /* Events name : onRowClick datatype : none version : none default : none description : It will gives you row clicked data. */ @Output() onRowClick: any = new EventEmitter<any>(); /* Properties name : border datatype : any version : 4.2 onwards default : none description : Border for listbox, default style is 1px solid #ced4da. */ @Input() border: any; /* Properties name : context-menu datatype : string version : 5.0.1 onwards default : description : Context Menu provides the list of menus on right click. */ @Input('context-menu') contextmenu: any[]; @Input('icon') icon: string; /* Events name : rightClick datatype : none version : 5.0.1 default : none description : It will gives you row clicked data. */ @Output() rightClick: any = new EventEmitter<any>(); @Output() onIconClick: any = new EventEmitter<any>(); @ContentChild('amexioBodyTmpl') bodyTemplate: TemplateRef<any>; viewData: any[]; orgData: any[]; filterText = ''; selectAll = false; response: any; selectedData: any[]; previousData: any; maskloader = true; ishoverselected = true; mouseLocation: { left: number; top: number } = { left: 0, top: 0 }; contextMenuFlag: boolean; posixUp: boolean; rightClickRowData: any; activedescendant = 'aria-activedescendant'; listId: string; componentId: string; a: any; flag = false; prevlistindex = -1; listindex = -1; documentClickListener: any; globalClickListenFunc: () => void; tempData: any[]; constructor(public dataService: CommonDataService, private renderer: Renderer2) { super(); this.filter = false; this.enablecheckbox = false; this.selectedData = []; this.searchplaceholder = 'Search'; this.flag = true; } ngOnInit() { if (this.httpmethod && this.httpurl) { this.dataService.fetchData(this.httpurl, this.httpmethod).subscribe((response) => { this.response = response; }, (error) => { }, () => { this.setData(this.response); }); } else if (this.data) { this.previousData = JSON.parse(JSON.stringify(this.data)); this.setData(this.data); } this.componentLoaded = true; this.componentId = 'listbox' + window.crypto.getRandomValues(new Uint32Array(1))[0]; this.listenListboxOutClick(); } listenListboxOutClick() { this.documentClickListener = this.renderer .listen('document', 'click', (event: any) => { if (this.viewData && this.viewData.length > 0) { this.viewData.forEach((element: any, index: number) => { if (this.prevlistindex !== -1 && this.viewData[this.prevlistindex].hasOwnProperty('ishoverselected') && this.viewData[this.prevlistindex]['ishoverselected'] === true) { this.viewData[this.prevlistindex]['ishoverselected'] = false; this.prevlistindex = -1; this.listindex = -1; } }); } }); } onArrowdown() { if (this.prevlistindex > -1) { this.viewData[this.prevlistindex]['ishoverselected'] = false; } this.listindex++; this.prevlistindex = this.listindex; if (this.listindex >= this.viewData.length) { this.listindex = 0; this.prevlistindex = 0; } this.viewData[this.listindex]['ishoverselected'] = true; if (this.viewData[this.listindex]['ishoverselected']) { const divid = document.getElementById(this.componentId); divid.setAttribute(this.activedescendant, this.viewData[this.listindex].index); } } onArrowUp() { if (this.prevlistindex > -1) { this.viewData[this.prevlistindex]['ishoverselected'] = false; } this.prevlistindex--; if (this.prevlistindex === -1) { this.prevlistindex = this.viewData.length - 1; this.listindex = -1; } this.viewData[this.prevlistindex]['ishoverselected'] = true; if (this.viewData[this.prevlistindex]['ishoverselected']) { const divid = document.getElementById(this.componentId); divid.setAttribute(this.activedescendant, this.viewData[this.prevlistindex].index); } if (this.prevlistindex === 0) { this.listindex = 0; } } onEnterPress() { this.viewData.forEach((element, index) => { if (element.ishoverselected === true) { if (element.isSelected === true) { element.isSelected = false; } else { element.isSelected = true; } } }); } updateComponent() { if (JSON.stringify(this.previousData) !== JSON.stringify(this.data)) { this.previousData = JSON.parse(JSON.stringify(this.data)); this.setData(this.data); } } setData(httpResponse: any) { let responsedata = httpResponse; if (this.datareader) { const dr = this.datareader.split('.'); for (const ir of dr) { responsedata = responsedata[ir]; } } else { responsedata = httpResponse; } this.viewData = responsedata; this.setSelectedFlag(this.viewData); this.onSelectClick(this.viewData); this.orgData = JSON.parse(JSON.stringify(this.viewData)); } onSelectClick(viewRows: any) { this.viewData.forEach((elem: any) => { elem['onClickFlag'] = false; }); } setSelectedFlag(viewRows: any) { viewRows.forEach((row: any, index: number) => { if (!row.hasOwnProperty('isSelected')) { row['isSelected'] = false; } row['index'] = 'listbox' + window.crypto.getRandomValues(new Uint32Array(1))[0] + index; }); this.maskloader = false; } filterData() { const tData = JSON.parse(JSON.stringify(this.orgData)); const nodes = this.searchTree(tData, this.filterText); this.viewData = nodes; } searchTree(data: any[], matchingTitle: string) { const disp = this.displayfield; return data.filter(function f(node) { if (node[disp] && node[disp].toLowerCase().startsWith(matchingTitle.toLowerCase())) { return true; } if (node.children) { return (node.children = node.children.filter(f)).length; } }); // return res; } selectedCheckBox(rowData: any) { rowData.isSelected = !rowData.isSelected; this.selectedData = []; this.viewData.forEach((node) => { if (node.isSelected) { this.selectedData.push(node); } }); if (this.filter) { this.checkSelectedFlag(rowData); } const tempData = JSON.parse(JSON.stringify(rowData)); delete tempData['index']; delete tempData['onClickFlag']; this.selectedRows.emit(tempData); } checkSelectedFlag(rowData: any) { this.orgData.forEach((orgObj) => { if (rowData.index === orgObj.index) { orgObj.isSelected = rowData.isSelected; } }); } selectAllRecord() { this.selectedData = []; this.selectAll = !this.selectAll; if (this.selectAll) { this.viewData.forEach((node) => { node.isSelected = true; }); this.selectedData = this.viewData; } else { this.viewData.forEach((node) => { node.isSelected = false; }); } this.selectedRows.emit(this.selectedData); } onClick(data: any) { if (!this.enablecheckbox) { this.viewData.forEach((elem: any) => { elem.onClickFlag = false; }); this.viewData.forEach((ele: any) => { if (ele.index === data.index) { ele.onClickFlag = true; } }); } const tempData = JSON.parse(JSON.stringify(data)); delete tempData['index']; delete tempData['onClickFlag']; delete tempData['isSelected']; this.onRowClick.emit(tempData); } ngAfterViewInit() { } loadContextMenu(event: any, row: any, id: any) { if (this.contextmenu && this.contextmenu.length > 0) { this.tempSelectedFlag(this.viewData); this.mouseLocation.left = event.clientX; this.mouseLocation.top = event.clientY; row.isSelected = true; this.getContextMenu(); this.posixUp = this.getListPosition(id); event.preventDefault(); event.stopPropagation(); this.rightClickRowData = row; } } // getcontextmenu getContextMenu() { if (this.contextmenu && this.contextmenu.length > 0) { this.contextMenuFlag = true; this.addListner(); } } tempSelectedFlag(rows: any) { rows.forEach((row: any) => { if (row.isSelected) { row.isSelected = false; } }); } getListPosition(elementRef: any) { const height = 240; if ((window.screen.height - elementRef.getBoundingClientRect().bottom) < height) { return true; } else { return false; } } rightClickDataEmit(Data: any) { this.rightClick.emit(Data); } addListner() { this.globalClickListenFunc = this.renderer.listen('document', 'click', (e: any) => { this.contextMenuFlag = false; if (!this.contextMenuFlag) { this.removeListner(); } }); } removeListner() { if (this.globalClickListenFunc) { this.globalClickListenFunc(); } } ngOnDestroy(): void { this.removeListner(); } onClickIcon() { this.onIconClick.emit(); } }
the_stack
import { verifyPlugin, validPluginName } from '../../plugin-helpers/verify-plugin'; import { JSONUtilities } from 'amplify-cli-core'; import * as fs from 'fs-extra'; import * as path from 'path'; import { PluginVerificationError, PluginVerificationResult } from '../../domain/plugin-verification-result'; import { PluginManifest } from '../../domain/plugin-manifest'; import { AmplifyEvent } from '../../domain/amplify-event'; const corePluginJson = { name: 'core', type: 'core', commands: [ 'categories', 'configure', 'console', 'delete', 'env', 'help', 'init', 'logout', 'migrate', 'plugin', 'publish', 'push', 'pull', 'run', 'status', 'uninstall', 'upgrade', 'version', ], commandAliases: { h: 'help', serve: 'run', ls: 'status', }, }; jest.mock('amplify-cli-core', () => ({ JSONUtilities: { readJson: jest.fn(), writeJson: jest.fn(), }, })); const fsMock = fs as jest.Mocked<typeof fs>; describe('verify-plugin', () => { describe('verifyPlugin', () => { beforeEach(() => { const readJsonMock = JSONUtilities.readJson as jest.MockedFunction<typeof JSONUtilities.readJson>; readJsonMock.mockClear(); }); it('returns PluginDirPathNotExist error when specify not exist path', async () => { fsMock.pathExists.mockImplementation(() => Promise.resolve(false)); const result = await verifyPlugin(path.join('path', 'to', 'plugin')); expect(result).toEqual(new PluginVerificationResult(false, PluginVerificationError.PluginDirPathNotExist)); }); it('returns PluginDirPathNotExist error when specify non directory path', async () => { fsMock.pathExists.mockImplementation(() => Promise.resolve(true)); const stat = { isDirectory: jest.fn().mockReturnValue(false), }; fsMock.stat.mockResolvedValue(stat as any); const result = await verifyPlugin(path.join('path', 'to', 'plugin')); expect(result).toEqual(new PluginVerificationResult(false, PluginVerificationError.PluginDirPathNotExist)); }); it('returns InvalidNodePackage error when specify package.json not exists directory path', async () => { fsMock.pathExists.mockImplementation(() => Promise.resolve(true)); const stat = { isDirectory: jest.fn().mockReturnValue(true), }; fsMock.stat.mockResolvedValue(stat as any); const readJsonMock = JSONUtilities.readJson as jest.MockedFunction<typeof JSONUtilities.readJson>; // read package.json const error = new Error('package.json is not exists.'); readJsonMock.mockImplementationOnce(() => { throw error; }); const result = await verifyPlugin(path.join('path', 'to', 'plugin')); expect(result).toEqual(new PluginVerificationResult(false, PluginVerificationError.InvalidNodePackage, error)); }); it('returns MissingManifest error when amplify-plugin.json is not exists.', async () => { // stat package.json fsMock.pathExists.mockImplementationOnce(() => Promise.resolve(true)); const stat = { isDirectory: jest.fn().mockReturnValue(true), }; fsMock.stat.mockResolvedValueOnce(stat as any); const readJsonMock = JSONUtilities.readJson as jest.MockedFunction<typeof JSONUtilities.readJson>; // read package.json const packageJson = {}; readJsonMock.mockReturnValueOnce(packageJson); // stat amplify-plugin.json fsMock.pathExists.mockImplementationOnce(() => Promise.resolve(false)); const result = await verifyPlugin(path.join('path', 'to', 'plugin')); const expected = new PluginVerificationResult(false, PluginVerificationError.MissingManifest, undefined, packageJson); expect(result).toEqual(expected); }); it('returns MissingManifest error when amplify-plugin.json is not file.', async () => { // stat package.json fsMock.pathExists.mockImplementationOnce(() => Promise.resolve(true)); const stat = { isDirectory: jest.fn().mockReturnValue(true), }; fsMock.stat.mockResolvedValueOnce(stat as any); const readJsonMock = JSONUtilities.readJson as jest.MockedFunction<typeof JSONUtilities.readJson>; // read package.json const packageJson = {}; readJsonMock.mockReturnValueOnce(packageJson); // stat amplify-plugin.json fsMock.pathExists.mockImplementationOnce(() => Promise.resolve(true)); const statManifest = { isFile: jest.fn().mockReturnValue(false), }; fsMock.stat.mockResolvedValueOnce(statManifest as any); const result = await verifyPlugin(path.join('path', 'to', 'plugin')); const expected = new PluginVerificationResult(false, PluginVerificationError.MissingManifest, undefined, packageJson); expect(result).toEqual(expected); }); it('returns InvalidManifest error when amplify-plugin.json is not json file.', async () => { // stat package.json fsMock.pathExists.mockImplementationOnce(() => Promise.resolve(true)); const stat = { isDirectory: jest.fn().mockReturnValue(true), }; fsMock.stat.mockResolvedValueOnce(stat as any); const readJsonMock = JSONUtilities.readJson as jest.MockedFunction<typeof JSONUtilities.readJson>; // read package.json const packageJson = {}; readJsonMock.mockReturnValueOnce(packageJson); // stat amplify-plugin.json fsMock.pathExists.mockImplementationOnce(() => Promise.resolve(true)); const statManifest = { isFile: jest.fn().mockReturnValue(true), }; fsMock.stat.mockResolvedValueOnce(statManifest as any); // read amplify-plugin.json const error = new Error('amplify-plugin.json is not json file.'); readJsonMock.mockImplementationOnce(() => { throw error; }); const result = await verifyPlugin(path.join('path', 'to', 'plugin')); const expected = new PluginVerificationResult(false, PluginVerificationError.InvalidManifest, error, packageJson); expect(result).toEqual(expected); }); it('returns InvalidManifest error when plugin name is invalid', async () => { // stat package.json fsMock.pathExists.mockImplementationOnce(() => Promise.resolve(true)); const stat = { isDirectory: jest.fn().mockReturnValue(true), }; fsMock.stat.mockResolvedValueOnce(stat as any); const readJsonMock = JSONUtilities.readJson as jest.MockedFunction<typeof JSONUtilities.readJson>; // read plugin package.json const packageJson = {}; readJsonMock.mockReturnValueOnce(packageJson); // stat amplify-plugin.json fsMock.pathExists.mockImplementationOnce(() => Promise.resolve(true)); const statManifest = { isFile: jest.fn().mockReturnValue(true), }; fsMock.stat.mockResolvedValueOnce(statManifest as any); // read amplify-plugin.json const amplifyPluginJson = { name: 'categories', // invalid plugin name }; readJsonMock.mockReturnValueOnce(amplifyPluginJson); // read core package.json readJsonMock.mockReturnValueOnce(corePluginJson); const result = await verifyPlugin(path.join('path', 'to', 'plugin')); const expected = new PluginVerificationResult( false, PluginVerificationError.InvalidManifest, 'Amplify CLI core command names can not be used as plugin name', packageJson, ); expect(result).toEqual(expected); }); it('returns MissingHandleAmplifyEventMethod error when plugin has invalid handle methods', async () => { // stat package.json fsMock.pathExists.mockImplementationOnce(() => Promise.resolve(true)); const stat = { isDirectory: jest.fn().mockReturnValue(true), }; fsMock.stat.mockResolvedValueOnce(stat as any); const readJsonMock = JSONUtilities.readJson as jest.MockedFunction<typeof JSONUtilities.readJson>; // read plugin package.json const packageJson = {}; readJsonMock.mockReturnValueOnce(packageJson); // stat amplify-plugin.json fsMock.pathExists.mockImplementationOnce(() => Promise.resolve(true)); const statManifest = { isFile: jest.fn().mockReturnValue(true), }; fsMock.stat.mockResolvedValueOnce(statManifest as any); // read amplify-plugin.json const amplifyPluginJson: PluginManifest = { name: 'dynamodb-export', // valid plugin name type: 'util', commands: ['version', 'help'], eventHandlers: [AmplifyEvent.PreInit], }; readJsonMock.mockReturnValueOnce(amplifyPluginJson); // read core package.json readJsonMock.mockReturnValueOnce(corePluginJson); const result = await verifyPlugin(path.join(__dirname, '..', '..', '..', '__mocks__', 'invalid-plugin')); const expected = new PluginVerificationResult( false, PluginVerificationError.MissingHandleAmplifyEventMethod, undefined, packageJson, amplifyPluginJson, ); expect(result).toEqual(expected); }); it('returns that verified is true when plugin pass all verifications', async () => { // stat package.json fsMock.pathExists.mockImplementationOnce(() => Promise.resolve(true)); const stat = { isDirectory: jest.fn().mockReturnValue(true), }; fsMock.stat.mockResolvedValueOnce(stat as any); const readJsonMock = JSONUtilities.readJson as jest.MockedFunction<typeof JSONUtilities.readJson>; // read plugin package.json const packageJson = {}; readJsonMock.mockReturnValueOnce(packageJson); // stat amplify-plugin.json fsMock.pathExists.mockImplementationOnce(() => Promise.resolve(true)); const statManifest = { isFile: jest.fn().mockReturnValue(true), }; fsMock.stat.mockResolvedValueOnce(statManifest as any); // read amplify-plugin.json const amplifyPluginJson: PluginManifest = { name: 'dynamodb-export', // valid plugin name type: 'util', commands: ['version', 'help'], eventHandlers: [AmplifyEvent.PreInit], }; readJsonMock.mockReturnValueOnce(amplifyPluginJson); // read core package.json readJsonMock.mockReturnValueOnce(corePluginJson); const result = await verifyPlugin(path.join(__dirname, '..', '..', '..', '__mocks__', 'valid-plugin')); const expected = new PluginVerificationResult(true, undefined, undefined, packageJson, amplifyPluginJson); expect(result).toEqual(expected); }); it('returns that verified is true when plugin has no event handlers', async () => { // stat package.json fsMock.pathExists.mockImplementationOnce(() => Promise.resolve(true)); const stat = { isDirectory: jest.fn().mockReturnValue(true), }; fsMock.stat.mockResolvedValueOnce(stat as any); const readJsonMock = JSONUtilities.readJson as jest.MockedFunction<typeof JSONUtilities.readJson>; // read plugin package.json const packageJson = {}; readJsonMock.mockReturnValueOnce(packageJson); // stat amplify-plugin.json fsMock.pathExists.mockImplementationOnce(() => Promise.resolve(true)); const statManifest = { isFile: jest.fn().mockReturnValue(true), }; fsMock.stat.mockResolvedValueOnce(statManifest as any); // read amplify-plugin.json const amplifyPluginJson: PluginManifest = { name: 'dynamodb-export', // valid plugin name type: 'util', commands: ['version', 'help'], eventHandlers: [], }; readJsonMock.mockReturnValueOnce(amplifyPluginJson); // read core package.json readJsonMock.mockReturnValueOnce(corePluginJson); const result = await verifyPlugin(path.join(__dirname, '..', '..', '..', '__mocks__', 'non-event-handlers-plugin')); const expected = new PluginVerificationResult(true, undefined, undefined, packageJson, amplifyPluginJson); expect(result).toEqual(expected); }); }); describe('validPluginName', () => { beforeEach(() => { const readJsonMock = JSONUtilities.readJson as jest.MockedFunction<typeof JSONUtilities.readJson>; readJsonMock.mockReturnValue(corePluginJson); }); it('returns result that isValid is true when specify valid plugin name', async () => { const result = await validPluginName('dynamo-export'); expect(result).toEqual({ isValid: true }); }); it('returns result that isValid is false when specify invalid plugin name', async () => { const result = await validPluginName('categories'); expect(result).toEqual({ isValid: false, message: 'Amplify CLI core command names can not be used as plugin name', }); }); }); });
the_stack
import Cookie from 'cookie'; import RealUserAgents from '@double-agent/real-user-agents'; import IRequestContext from '@double-agent/collect/interfaces/IRequestContext'; import { MainDomain, SubDomain, CrossDomain } from '@double-agent/collect'; import Plugin, { IPluginPage } from '@double-agent/collect/lib/Plugin'; import Document from '@double-agent/collect/lib/Document'; import { cleanDomains, DomainType } from '@double-agent/collect/lib/DomainUtils'; import IPlugin from '@double-agent/collect/interfaces/IPlugin'; import { CookieGetter, CookieSetter, ICreatedCookies, ICollectedCookies, ICookieGetter, ICookieSetter, IProfileData, } from './interfaces/IProfile'; export default class HttpCookiesPlugin extends Plugin { public initialize() { this.registerRoute('allHttp1', '/start', this.start); this.registerRoute( 'allHttp1', '/saveLoadAssetsAndReadFromJs', this.saveLoadAssetsAndReadFromJs, ); this.registerRoute('allHttp1', '/saveFromJs', this.saveFromJs); this.registerRoute('allHttp1', '/test.css', this.saveFromCss); this.registerRoute('allHttp1', '/redirectToNextPage', this.redirectToNextPage); this.registerRoute('allHttp1', '/saveAndRedirectToNextPage', this.saveAndRedirectToNextPage); this.registerRoute('allHttp1', '/setAndRedirectToNextPage', this.setAndRedirectToNextPage); this.registerRoute('allHttp1', '/set', this.set); this.registerRoute('allHttp1', '/save', this.save); const pages: IPluginPage[] = []; ['http', 'https'].forEach(protocol => { // set cookies on server, then set cookies on client, then read cookies on client, load test.css, load subsequent page let data = { cookieGroup: 'SameDomain' }; pages.push( { route: this.routes[protocol]['/start'], domain: MainDomain, clickNext: true, data }, { route: this.routes[protocol]['/saveLoadAssetsAndReadFromJs'], domain: MainDomain, clickNext: true, data, }, // test.css // saveFromJs ); // try setting cookies on cross-domain during redirect data = { cookieGroup: 'CrossDomainRedirect' }; pages.push( { route: this.routes[protocol]['/redirectToNextPage'], domain: MainDomain, isRedirect: true, }, { route: this.routes[protocol]['/setAndRedirectToNextPage'], domain: CrossDomain, isRedirect: true, data, }, { route: this.routes[protocol]['/saveAndRedirectToNextPage'], domain: MainDomain, isRedirect: true, data, }, ); // try setting cookies on sub-domain during redirect data = { cookieGroup: 'SubDomainRedirect' }; pages.push( { route: this.routes[protocol]['/setAndRedirectToNextPage'], domain: SubDomain, isRedirect: true, data, }, { route: this.routes[protocol]['/save'], domain: MainDomain, clickNext: true, data }, ); data = { cookieGroup: 'SubDomain' }; pages.push( { route: this.routes[protocol]['/set'], domain: SubDomain, clickNext: true, data }, { route: this.routes[protocol]['/save'], domain: MainDomain, clickNext: true, data }, ); data = { cookieGroup: 'CrossDomain' }; pages.push( // { route: this.routes[protocol]['/set'], domain: CrossDomain, clickNext: true, data }, { route: this.routes[protocol]['/save'], domain: MainDomain, data }, ); }); this.registerPages(...pages); } public changePluginOrder(plugins: IPlugin[]) { plugins.splice(plugins.indexOf(this), 1); plugins.push(this); } private start(ctx: IRequestContext) { const document = new Document(ctx); const cookieGroup = ctx.page.data?.cookieGroup; const prefix = `${ctx.server.protocol}-${cookieGroup}`; const jsCookieToSet = `${prefix}--JsCookies=0`; const cookiesToSet = createCookies(ctx); document.addNextPageClick(); document.injectBodyTag(`<script type="text/javascript"> (function() { document.cookie = '${jsCookieToSet}'; })(); </script>`); ctx.res.setHeader('Set-Cookie', cookiesToSet); ctx.res.end(document.html); this.saveCreatedCookiesToProfile(cookiesToSet, ctx); this.saveCreatedCookiesToProfile([jsCookieToSet], ctx, { setter: 'JsScript' }); } private saveLoadAssetsAndReadFromJs(ctx: IRequestContext) { const document = new Document(ctx); document.addNextPageClick(); document.injectHeadTag( `<link rel="stylesheet" type="text/css" href="${ctx.buildUrl('/test.css')}" />`, ); document.injectBodyTag(`<script type="text/javascript"> (function() { const promise = fetch("${ctx.buildUrl('/saveFromJs')}", { method: 'POST', body: JSON.stringify({ cookies: document.cookie }), headers: { 'Content-Type': 'application/json' }, }); window.pageQueue.push(promise); })(); </script>`); this.saveCollectedCookiesToProfile(collectCookies(ctx), ctx); ctx.res.end(document.html); } private saveFromJs(ctx: IRequestContext) { const cookies = Cookie.parse( (ctx.requestDetails.bodyJson as any).cookies ?? '', ) as ICollectedCookies; this.saveCollectedCookiesToProfile(cookies, ctx, { getter: 'JsScript', group: 'SameDomain' }); ctx.res.end(); } private saveFromCss(ctx: IRequestContext) { const cookies = collectCookies(ctx); this.saveCollectedCookiesToProfile(cookies, ctx, { getter: 'HttpAssetHeader', group: 'SameDomain', }); ctx.res.end(''); } private redirectToNextPage(ctx: IRequestContext) { ctx.res.writeHead(302, { location: ctx.nextPageLink }); ctx.res.end(); } private saveAndRedirectToNextPage(ctx: IRequestContext) { this.saveCollectedCookiesToProfile(collectCookies(ctx), ctx); ctx.res.writeHead(302, { location: ctx.nextPageLink }); ctx.res.end(); } private setAndRedirectToNextPage(ctx: IRequestContext) { const cookiesToSet = createCookies(ctx); ctx.res.setHeader('Set-Cookie', cookiesToSet); ctx.res.writeHead(302, { location: ctx.nextPageLink }); ctx.res.end(); this.saveCreatedCookiesToProfile(cookiesToSet, ctx); } private set(ctx: IRequestContext) { const document = new Document(ctx); const cookiesToSet = createCookies(ctx); document.addNextPageClick(); ctx.res.setHeader('Set-Cookie', cookiesToSet); ctx.res.end(document.html); this.saveCreatedCookiesToProfile(cookiesToSet, ctx); } private save(ctx: IRequestContext) { const document = new Document(ctx); document.addNextPageClick(); this.saveCollectedCookiesToProfile(collectCookies(ctx), ctx); ctx.res.end(document.html); } private saveCreatedCookiesToProfile( cookies: ICreatedCookies, ctx: IRequestContext, extraData: IExtraSaveData = {}, ) { const setter = extraData.setter || CookieSetter.HttpHeader; const group = extraData.group || ctx.page.data?.cookieGroup; const httpProtocol = ctx.server.protocol; const profileData = ctx.session.getPluginProfileData<IProfileData>(this, []); const cleanedCookies = cookies.map(x => cleanDomains(x)); profileData.push({ group, setter, httpProtocol, cookies: cleanedCookies, url: ctx.requestDetails.url, }); ctx.session.savePluginProfileData<IProfileData>(this, profileData, { keepInMemory: true }); } private saveCollectedCookiesToProfile( allCookies: ICollectedCookies, ctx: IRequestContext, extraData: IExtraSaveData = {}, ) { const getter = extraData.getter || CookieGetter.HttpHeader; const group = extraData.group || ctx.page.data?.cookieGroup; const httpProtocol = ctx.server.protocol; const cookies = filterCookies(allCookies, httpProtocol, group); const profileData = ctx.session.getPluginProfileData<IProfileData>(this, []); profileData.push({ group, getter, httpProtocol, cookies, url: ctx.requestDetails.url }); ctx.session.savePluginProfileData<IProfileData>(this, profileData, { keepInMemory: true }); } } /////// ///////////////////////////////////////////////////////// function createCookies(ctx: IRequestContext) { const domainType = ctx.requestDetails.domainType; const cookieGroup = ctx.page.data?.cookieGroup; const prefix = `${ctx.server.protocol}-${cookieGroup}`; const userAgent = RealUserAgents.findByString(ctx.session.expectedUserAgentString); const isChrome80 = userAgent?.browserId.startsWith('chrome-80'); const cookies = [ `${prefix}--Basic=0`, `${prefix}--ToBeExpired=start;`, `${prefix}--ToBeExpired=start; expires=Thu, 01 Jan 1970 00:00:00 GMT`, `${prefix}--Secure=0; Secure`, `${prefix}--HttpOnly=0; HttpOnly`, `${prefix}--Expired=0; expires=Thu, 01 Jan 1970 00:00:00 GMT`, `${prefix}--SameSiteLax=0; SameSite=Lax`, `${prefix}--SameSiteLax-Secure=0; SameSite=Lax; Secure`, `${prefix}--SameSiteStrict=0; SameSite=Strict`, `${prefix}--SameSiteStrict-Secure=0; SameSite=Strict; Secure`, `${prefix}--SameSiteNone=0; SameSite=None`, `${prefix}--RootPath-Secure=0; Secure; Path=/`, ]; if (!isChrome80) { // chrome 80 starts a/b testing for sending ONLY SameSite=None cookies that are "Secure" to cross-site cookies.push(`${prefix}--SameSiteNone-Secure=0; SameSite=None; Secure`); } if ([DomainType.MainDomain, DomainType.SubDomain].includes(domainType)) { cookies.push( `${prefix}--HttpOnly-MainDomain=0; HttpOnly; Domain=${MainDomain}`, `${prefix}--MainDomain-SameSiteNone=0; SameSite=None; Domain=${MainDomain}`, `${prefix}--MainDomain-Secure-SameSiteLax=0; Secure; SameSite=Lax; Domain=${MainDomain}`, `${prefix}--MainDomain-Secure-SameSiteStrict=0; Secure; SameSite=Strict; Domain=${MainDomain}`, ); if (!isChrome80) { // chrome 80 starts a/b testing for sending ONLY SameSite=None cookies that are "Secure" to cross-site cookies.push( `${prefix}--MainDomain-Secure-SameSiteNone=0; Secure; SameSite=None; Domain=${MainDomain}`, ); } } return cookies; } function collectCookies(ctx) { return Cookie.parse(ctx.req.headers.cookie ?? ''); } function filterCookies( cookies: ICollectedCookies, httpProtocol: string, cookieGroup?: string, ): ICollectedCookies { const prefix = `${httpProtocol}-${cookieGroup}--`; const filteredCookies: ICollectedCookies = {}; for (const [name, value] of Object.entries(cookies)) { if (name.startsWith(prefix)) { filteredCookies[name] = value; } } return filteredCookies; } interface IExtraSaveData { group?: string; getter?: ICookieGetter; setter?: ICookieSetter; }
the_stack
import { Expr, query as q } from 'faunadb'; import { DocumentAuthAccount } from '~/types/document'; import { FactoryContext } from '~/types/factory/factory.context'; import { FactoryUser } from '~/types/factory/factory.user'; import { PAGINATION_SIZE_MAX } from '~/consts'; import { action } from '~/factory/api/action'; import { credential } from '~/factory/api/credential'; import { document } from '~/factory/api/document'; import { indexes } from '~/factory/api/indexes'; import { users } from '~/factory/api/users'; import { session } from '~/factory/api/session'; import { BiotaCollectionName } from '~/factory/constructors/collection'; import { ContextProp } from '~/factory/constructors/context'; import { ThrowError } from '~/factory/constructors/error'; import { MethodDispatch, Query } from '~/factory/constructors/method'; import { ResultData } from '~/factory/constructors/result'; import { BiotaRoleName } from '~/factory/constructors/role'; import { BiotaFunctionName } from '~/factory/constructors/udfunction'; // tslint:disable-next-line: only-arrow-functions export const user: FactoryContext<FactoryUser> = function (context): FactoryUser { // tslint:disable-next-line: only-arrow-functions return (idOrRef = null) => { const ref = q.If(q.IsDoc(idOrRef), idOrRef, q.Ref(q.Collection(BiotaCollectionName('users')), idOrRef)); const refExists = (refExpr: Expr) => { return q.If(q.Not(q.Exists(q.Var('ref'))), ThrowError(q.Var('ctx'), "Reference doesn't exists", { ref: refExpr }), true); }; return { ...document(context, { prefix: 'User' })(ref), login(email, password) { // #improve: add expirationDuration const inputs = { email, password }; // ---- const query = Query( { userRef: ResultData(users(q.Var('ctx')).getByAuthEmail(q.Var('email'))), userIsValid: q.If( q.IsDoc(q.Var('userRef')), true, ThrowError(q.Var('ctx'), "Couldn't find the user", { email: q.Var('email') }), ), identified_user: q.Identify(ContextProp(q.Var('ctx'), 'identity'), q.Var('password')), is_identified_user: q.If(q.Var('identified_user'), true, ThrowError(q.Var('ctx'), 'User email or password is wrong')), session: ResultData(session(q.Var('ctx'))().start(null, q.Var('userRef'))), // #improve: add expirationDuration action: action(q.Var('ctx'))('login', q.Var('userRef')).log(), }, q.Var('session'), q.Var('action'), ); // ---- const offline = 'factory.user.login'; const online = { name: BiotaFunctionName('UserLogin'), role: null }; return MethodDispatch({ context, inputs, query })(offline, online); }, logout(everywhere) { const inputs = { everywhere }; // ---- const query = Query( { doc: q.If( q.Or(ContextProp(q.Var('ctx'), 'hasSession'), ContextProp(q.Var('ctx'), 'hasIdentity')), q.If( q.Var('everywhere'), q.Let( { logging_out: q.Map( q.Paginate( ResultData( indexes(q.Var('ctx')).searchQuery(q.Collection(BiotaCollectionName('user_sessions')), { '_membership.owner': ContextProp(q.Var('ctx'), 'identity'), }), ), { size: PAGINATION_SIZE_MAX }, ), q.Lambda(['session'], document(q.Var('ctx'))(q.Var('session')).delete()), ), action: action(q.Var('ctx'))('logout_everywhere', ContextProp(q.Var('ctx'), 'identity')).log(), }, q.Var('logging_out'), ), q.Let( { logging_out: ResultData(document(q.Var('ctx'))(ContextProp(q.Var('ctx'), 'hasSession')).delete()), action: action(q.Var('ctx'))('logout', ContextProp(q.Var('ctx'), 'identity')).log(), }, q.Var('logging_out'), ), ), ThrowError(q.Var('ctx'), 'Context has no identity or session', q.Var('ctx')), ), }, q.Var('doc'), ); // ---- const offline = 'factory.user.logout'; const online = { name: BiotaFunctionName('UserLogout'), role: null }; return MethodDispatch({ context, inputs, query })(offline, online); }, changePassword(currentPassword, password) { const inputs = { currentPassword, password }; // ---- const query = Query( { doc: q.If( ContextProp(q.Var('ctx'), 'hasIdentity'), ResultData( credential(q.Var('ctx'))(ContextProp(q.Var('ctx'), 'identity')).update(q.Var('currentPassword'), q.Var('password')), ), ThrowError(q.Var('ctx'), "Can't change password without identity", { identity: ContextProp(q.Var('ctx'), 'identity') }), ), }, q.Var('doc'), ); // ---- const offline = 'factory.user.changePassword'; const online = { name: BiotaFunctionName('UserChangePassword'), role: null }; return MethodDispatch({ context, inputs, query })(offline, online); }, loginWithAuthAccount(account) { // #improve: add expirationDuration const inputs = { account }; // ---- const query = Query( { accountValid: q.If( q.And(q.IsString(q.Select('id', q.Var('account'), null)), q.IsString(q.Select('provider', q.Var('account'), null))), true, ThrowError(q.Var('ctx'), "Auth Account isn't valid", { account: q.Var('account') }), ), userRef: ResultData(users(q.Var('ctx')).getByAuthAccount(q.Var('account'))), userIsValid: q.If( q.IsDoc(q.Var('userRef')), true, ThrowError(q.Var('ctx'), "Could'nt find the user", { account: q.Var('account') }), ), session: ResultData(session(q.Var('ctx'))().start(null, q.Var('userRef'))), // #improve: add expirationDuration }, q.Var('session'), ); // ---- const offline = 'factory.user.loginWithAuthAccount'; const online = { name: BiotaFunctionName('UserLoginWithAuthAccount'), role: null }; return MethodDispatch({ context, inputs, query })(offline, online); }, register(email, password, data) { const inputs = { email, password, data }; // ---- const query = Query( { userRef: q.Select('ref', ResultData(user(q.Var('ctx'))().insert(q.Var('data'))), null), user_email: ResultData(user(q.Var('ctx'))(q.Var('userRef')).auth.email.set(q.Var('email'))), user_owner: ResultData(user(q.Var('ctx'))(q.Var('userRef')).membership.owner.set(q.Var('userRef'))), user_user_role: ResultData( user(q.Var('ctx'))(q.Var('userRef')) .membership.role(q.Role(BiotaRoleName('user'))) .set(), ), user_credentials: ResultData(credential(q.Var('ctx'))(q.Var('userRef')).insert(q.Var('password'))), session: ResultData(session(q.Var('ctx'))().start(null, q.Var('userRef'))), // #improve: add expirationDuration action: action(q.Var('ctx'))('register', q.Var('userRef')).log(), }, q.Var('session'), q.Var('action'), ); // ---- const offline = 'factory.user.register'; const online = { name: BiotaFunctionName('UserRegister'), role: null }; return MethodDispatch({ context, inputs, query })(offline, online); }, registerWithAuthAccount(account) { const inputs = { account }; // ---- const query = Query( { userRef: q.Select('ref', ResultData(user(q.Var('ctx'))().insert({})), null), user_auth_account: ResultData(user(q.Var('ctx'))(q.Var('userRef')).auth.accounts.set(q.Var('account') as DocumentAuthAccount)), user_owner: ResultData(user(q.Var('ctx'))(q.Var('userRef')).membership.owner.set(q.Var('userRef'))), user_user_role: ResultData( user(q.Var('ctx'))(q.Var('userRef')) .membership.role(q.Role(BiotaRoleName('user'))) .set(), ), session: ResultData(session(q.Var('ctx'))().start(null, q.Var('userRef'))), action: action(q.Var('ctx'))('register', q.Var('userRef')).log(), }, q.Var('session'), q.Var('action'), ); // ---- const offline = 'factory.user.registerWithAuthAccount'; const online = { name: BiotaFunctionName('UserRegisterWithAuthAccount'), role: null }; return MethodDispatch({ context, inputs, query })(offline, online); }, auth: { email: { set(email) { const inputs = { ref, email }; // ---- const query = Query( { doc: q.If( q.IsString(q.Var('email')), ResultData( user(q.Var('ctx'))(q.Var('ref')).upsert({ _auth: { email: q.Var('email'), }, }), ), false, ), }, q.Var('doc'), ); // ---- const offline = 'factory.user.auth.email.set'; const online = { name: BiotaFunctionName('UserAuthEmailSet'), role: null }; return MethodDispatch({ context, inputs, query })(offline, online); }, remove() { const inputs = { ref }; // ---- const query = Query( { doc: ResultData( user(q.Var('ctx'))(q.Var('ref')).upsert({ _auth: { email: null, }, }), ), }, q.Var('doc'), ); // ---- const offline = 'factory.user.auth.email.remove'; const online = { name: BiotaFunctionName('UserAuthEmailRemove'), role: null }; return MethodDispatch({ context, inputs, query })(offline, online); }, }, accounts: { distinct(account) { const inputs = { ref, account }; // ---- const query = Query( { provider: q.Select('provider', q.Var('account'), null), accountId: q.Select('id', q.Var('account'), null), current_accounts: q.Select(['data', '_auth', 'accounts'], q.Get(q.Var('ref')), []), same_current_account: q.Filter( q.Var('current_accounts'), q.Lambda( 'ca', q.And( q.Equals(q.Select('provider', q.Var('ca')), q.Var('provider')), q.Equals(q.Select('id', q.Var('ca')), q.Var('accountId')), ), ), ), current_accounts_without_new: q.Filter( q.Var('current_accounts'), q.Lambda( 'ca', q.Not( q.And( q.Equals(q.Select('provider', q.Var('ca')), q.Var('provider')), q.Equals(q.Select('id', q.Var('ca')), q.Var('accountId')), ), ), ), ), new_account: q.Merge(q.Select(0, q.Var('same_current_account'), {}), q.Var('account')), new_accounts: q.Append(q.Var('current_accounts_without_new'), [q.Var('new_account')]), }, q.Var('new_accounts'), ); // ---- const offline = 'factory.user.auth.accounts.distinct'; const online = { name: BiotaFunctionName('UserAuthAccountsDistinct'), role: null }; return MethodDispatch({ context, inputs, query })(offline, online); }, difference(provider, accountId) { const inputs = { ref, provider, accountId }; // ---- const query = Query( { current_accounts: q.Select(['data', '_auth', 'accounts'], q.Get(q.Var('ref')), []), filtered_accounts: q.Filter( q.Var('current_accounts'), q.Lambda( 'ca', q.Not( q.And( q.Equals(q.Select('provider', q.Var('ca')), q.Var('provider')), q.Equals(q.Select('id', q.Var('ca')), q.Var('accountId')), ), ), ), ), }, q.Var('filtered_accounts'), ); // ---- const offline = 'factory.user.auth.accounts.difference'; const online = { name: BiotaFunctionName('UserAuthAccountsDifference'), role: null }; return MethodDispatch({ context, inputs, query })(offline, online); }, set(account) { const inputs = { ref, account }; // ---- const query = Query( { doc: ResultData( user(q.Var('ctx'))(q.Var('ref')).upsert({ _auth: { accounts: user(q.Var('ctx'))(q.Var('ref')).auth.accounts.distinct(q.Var('account') as DocumentAuthAccount), }, }), ), }, q.Var('doc'), ); // ---- const offline = 'factory.user.auth.accounts.set'; const online = { name: BiotaFunctionName('UserAuthAccountsSet'), role: null }; return MethodDispatch({ context, inputs, query })(offline, online); }, remove(provider, accountId) { const inputs = { ref, provider, accountId }; // ---- const query = Query( { doc: ResultData( user(q.Var('ctx'))(q.Var('ref')).upsert({ _auth: { accounts: user(q.Var('ctx'))(q.Var('ref')).auth.accounts.difference(q.Var('provider'), q.Var('accountId')), }, }), ), }, q.Var('doc'), ); // ---- const offline = 'factory.user.auth.accounts.remove'; const online = { name: BiotaFunctionName('UserAuthAccountsRemove'), role: null }; return MethodDispatch({ context, inputs, query })(offline, online); }, }, }, }; }; };
the_stack
import { Timeline } from './Timeline'; import { TweenProps, EaseMethod, getEaseFromConfig, KeyframeData } from './Tween'; import { utils } from './utils'; import { sound } from './sound'; import { AnimateContainer } from './Container'; import { AnimateDisplayObject } from './DisplayObject'; import { Ticker } from '@pixi/ticker'; import { settings } from '@pixi/settings'; import { Graphics } from '@pixi/graphics'; import { Sprite } from '@pixi/sprite'; import { IDestroyOptions } from '@pixi/display'; const SharedTicker = Ticker.shared; export interface MovieClipOptions { /** * The default playback mode is independent (0). Child movieclips are given a different value as subordinate objects. */ mode?: number; /** * The starting frame. Default is 0. */ startPosition?: number; /** * If playback is looped. Default is true. */ loop?: boolean; /** * The frame labels map - label to frames */ labels?: LabelMap; /** * The duration of the clip. If no duration is provided, length is automatically determined. */ duration?: number; /** * The framerate to use for an independent mode MovieClip. Default is 24. */ framerate?: number; } export interface FrameLabel { label: string; position: number; } export interface LabelMap { [label: string]: number; } export type FrameAction = (this: MovieClip) => void; type TimedChildTimeline = boolean[] & {target?: AnimateDisplayObject}; /** * Provide timeline playback of movieclip */ export class MovieClip extends AnimateContainer { /** * The MovieClip will advance independently of its parent, even if its parent is paused. * This is the default mode. */ public static readonly INDEPENDENT = 0; /** * The MovieClip will only display a single frame (as determined by the startPosition property). */ public static readonly SINGLE_FRAME = 1; /** * The MovieClip will be advanced only when its parent advances and will be synched to the position of * the parent MovieClip. */ public static readonly SYNCHED = 2; /** * The default framerate if none is specified or there's not parent clip with a framerate. */ public static readonly DEFAULT_FRAMERATE = 24; /** * Controls how this MovieClip advances its time. Must be one of 0 (INDEPENDENT), 1 (SINGLE_FRAME), or 2 (SYNCHED). * See each constant for a description of the behaviour. */ public mode: number; /** * Specifies what the first frame to play in this movieclip, or the only frame to display if mode is SINGLE_FRAME. */ public startPosition: number; /** * Indicates whether this MovieClip should loop when it reaches the end of its timeline. */ public loop: boolean; /** * The current frame of the movieclip. * @readOnly */ public currentFrame: number; /** * The collection of private labels */ private _labels: FrameLabel[]; /** * The collection of private labels */ private _labelDict: LabelMap; /** * If true, this movieclip will animate automatically whenever it is on the stage. */ public selfAdvance: boolean; /** * If true, the MovieClip's position will not advance when ticked. */ public paused: boolean; /** * If true, actions in this MovieClip's tweens will be run when the playhead advances. */ public actionsEnabled: boolean; /** * If true, the MovieClip will automatically be reset to its first frame whenever the timeline adds * it back onto the display list. This only applies to MovieClip instances with mode=INDEPENDENT. * <br><br> * For example, if you had a character animation with a 'body' child MovieClip instance * with different costumes on each frame, you could set body.autoReset = false, so that * you can manually change the frame it is on, without worrying that it will be reset * automatically. */ public autoReset: boolean; /** * Offset from parent frame for a synched movieclip. */ private _synchOffset: number; /** * Previous position that this movieclip was stopped on. */ private _prevPos: number; /** * Note - changed from default: When the MovieClip is framerate independent, this is the time * elapsed from frame 0 in seconds. */ private _t: number; /** * By default MovieClip instances advance one frame per tick. Specifying a framerate for the MovieClip * will cause it to advance based on elapsed time between ticks as appropriate to maintain the target * framerate. */ protected _framerate: number; /** * The total time in seconds for the animation. This is changed when setting the framerate. */ private _duration: number; /** * The total duration in frames for the animation. */ private _totalFrames: number; /** * Standard tween timelines for all objects. Each element in the _timelines array * is a Timeline object - an array of tweens for one target, in order of occurrence. */ protected _timelines: Timeline[]; /** * Array of child timelines denoting if a child is actively a child of this movieclip * on any given frame. Each element in the _timedChildTimelines is an array with a 'target' * property, and is an array of boolean values indexed by frame. * @private */ public _timedChildTimelines: TimedChildTimeline[]; /** * Array to depth sort timed children */ protected _depthSorted: AnimateDisplayObject[]; /** * Array of frame scripts, indexed by frame. */ protected _actions: FrameAction[][]; /** * Optional callback fired before timeline is updated. * Can be used to clamp or update the currentFrame. * @private */ public _beforeUpdate: (target: MovieClip) => (() => void|null); /** * Internal property used to control child MovieClips relative to parents. */ private parentStartPosition: number; /** * @param options The options object */ constructor(options?: MovieClipOptions); /** * @param mode The playback mode default is independent (0), * @param startPosition The starting frame * @param loop If playback is looped * @param labels The frame labels map of label to frames * @param duration The duration, if no duration is provided, auto determines length * @param framerate The framerate to use for independent mode */ constructor(mode?: number, duration?: number, loop?: boolean, framerate?: number, labels?: LabelMap); constructor(options?: MovieClipOptions|number, duration?: number, loop?: boolean, framerate?: number, labels?: LabelMap) { super(); // Default options options = options === undefined ? {} : options; // Options can also be the mode if (typeof options === 'number') { options = { mode: options || MovieClip.INDEPENDENT, duration: duration || 0, loop: loop === undefined ? true : loop, labels: labels || {}, framerate: framerate || 0, startPosition: 0, }; } else { // Apply defaults to options options = Object.assign({ mode: MovieClip.INDEPENDENT, startPosition: 0, loop: true, labels: {}, duration: 0, framerate: 0, }, options); } this.mode = options.mode; this.startPosition = options.startPosition; this.loop = !!options.loop; this.currentFrame = 0; this._labels = []; this._labelDict = options.labels; if (options.labels) { for (const name in options.labels) { const label = { label: name, position: options.labels[name], }; this._labels.push(label); } this._labels.sort((a, b) => a.position - b.position); } this.selfAdvance = true; this.paused = false; this.actionsEnabled = true; this.autoReset = true; this._synchOffset = 0; this._prevPos = -1; // TODO: evaluate using a ._reset Boolean prop instead of -1. this._t = 0; this._framerate = options.framerate; this._duration = 0; this._totalFrames = options.duration; this._timelines = []; this._timedChildTimelines = []; this._depthSorted = []; this._actions = []; this._beforeUpdate = null; this.parentStartPosition = 0; if (this.mode === MovieClip.INDEPENDENT) { this._tickListener = this._tickListener.bind(this); this._onAdded = this._onAdded.bind(this); this._onRemoved = this._onRemoved.bind(this); this.on('added', this._onAdded); this.on('removed', this._onRemoved); } if (options.framerate) { this.framerate = options.framerate; } // save often used methods on the instance so that they can be fetched slightly faster // than if they had to be fetched from the prototype /* eslint-disable no-self-assign */ this.advance = this.advance; this._updateTimeline = this._updateTimeline; this._setTimelinePosition = this._setTimelinePosition; this._goto = this._goto; /* eslint-enable no-self-assign */ } private _onAdded(): void { if (!this._framerate) { this.framerate = this.parentFramerate; } SharedTicker.add(this._tickListener, null); } private _tickListener(tickerDeltaTime: number): void { if (this.paused || !this.selfAdvance) { // see if the movieclip needs to be updated even though it isn't animating if (this._prevPos < 0) { this._goto(this.currentFrame); } return; } const seconds = tickerDeltaTime / settings.TARGET_FPMS / 1000; this.advance(seconds); } private _onRemoved(): void { SharedTicker.remove(this._tickListener, null); } /** * Returns an array of objects with label and position (aka frame) properties, sorted by position. */ public get labels(): FrameLabel[] { return this._labels; } /** * Returns a dictionary of labels where key is the label and value is the frame. */ public get labelsMap(): LabelMap { return this._labelDict; } /** * Returns the name of the label on or immediately before the current frame. */ public get currentLabel(): string|null { const labels = this._labels; let current: string = null; for (let i = 0, len = labels.length; i < len; ++i) { if (labels[i].position <= this.currentFrame) { current = labels[i].label; } else { break; } } return current; } /** * When the MovieClip is framerate independent, this is the time elapsed from frame 0 in seconds. */ public get elapsedTime(): number { return this._t; } public set elapsedTime(value) { this._t = value; } /** * By default MovieClip instances advance one frame per tick. Specifying a framerate for the * MovieClip will cause it to advance based on elapsed time between ticks as appropriate to * maintain the target framerate. * * For example, if a MovieClip with a framerate of 10 is placed on a Stage being updated at * 40fps, then the MovieClip advance roughly one frame every 4 ticks. This will not be exact, * because the time between each tick vary slightly between frames. * * This feature is dependent on the tick event object (or an object with an appropriate 'delta' property) being * passed into {{#crossLink 'Stage/update'}}{{/crossLink}}. */ public get framerate(): number { return this._framerate; } public set framerate(value) { if (value > 0) { if (this._framerate) { // recalculate time based on difference between new and old framerate: this._t *= this._framerate / value; } else { this._t = this.currentFrame / value; } this._framerate = value; this._duration = value ? this._totalFrames / value : 0; } else { this._t = this._framerate = this._duration = 0; } } /** * Get the total number of frames (duration) of this MovieClip */ public get totalFrames(): number { return this._totalFrames; } /** * Extend the timeline to the last frame. */ private _autoExtend(endFrame: number): void { if (this._totalFrames < endFrame) { this._totalFrames = endFrame; } } /** * Convert values of properties */ private _parseProperties(properties: TweenProps & {t?: string|number; v?: number|boolean}): void { // Convert any string colors to uints if (typeof properties.t === 'string') { properties.t = utils.hexToUint(properties.t); } else if (typeof properties.v === 'number') { properties.v = !!properties.v; } } /** * Get a timeline for a child, synced timeline. */ private _getChildTimeline(instance: AnimateDisplayObject): Timeline { for (let i = this._timelines.length - 1; i >= 0; --i) { if (this._timelines[i].target === instance) { return this._timelines[i]; } } const timeline = Timeline.create(instance); this._timelines.push(timeline); return timeline; } /** * Add mask or masks */ public addTimedMask(instance: AnimateDisplayObject, keyframes: {[frame: number]: Graphics|Sprite}): this { for (const i in keyframes) { this.addKeyframe(instance, { m: keyframes[i], }, parseInt(i, 10)); } // Set the initial position/add this._setTimelinePosition(this.currentFrame, this.currentFrame, true); return this; } /** * Shortcut alias for `addTimedMask` */ public am = this.addTimedMask; /** * Shortcut alias for `addTween` */ public tw = this.addTween; /** * Add a tween to the clip * @param instance The clip to tween * @param properties The property or property to tween * @param startFrame The frame to start tweening * @param duration Number of frames to tween. If 0, then the properties are set with no tweening. * @param ease An optional easing function that takes the tween time from 0-1. */ public addTween(instance: AnimateDisplayObject, properties: TweenProps, startFrame: number, duration?: number, ease?: EaseMethod): this { const timeline = this._getChildTimeline(instance); this._parseProperties(properties); timeline.addTween(properties, startFrame, duration, ease); this._autoExtend(startFrame + duration); return this; } /** * Add a tween to the clip * @param instance The clip to tween * @param properties The property or property to tween * @param startFrame The frame to start tweening */ public addKeyframe(instance: AnimateDisplayObject, properties: KeyframeData, startFrame: number): this { const timeline = this._getChildTimeline(instance); const { tw } = properties; // remove tw property just so that it doesn't mess anything up or confuse anyone doing debugging delete properties.tw; this._parseProperties(properties); // add keyframe - note that even if we add a tween immediately afterwards, we want to // add this keyframe in order to make sure the starting properties are set timeline.addKeyframe(properties, startFrame); this._autoExtend(startFrame); // Add a tween if present in the keyframe data if (tw) { this.addTween(instance, tw.p, startFrame, tw.d, getEaseFromConfig(tw.e)); } return this; } /** * Alias for method `addTimedChild` */ public at = this.addTimedChild; /** * Add a child to show for a certain number of frames before automatic removal. * @param instance The clip to show * @param startFrame The starting frame * @param duration The number of frames to display the child before removing it. * @param keyframes The collection of static keyframes to add */ public addTimedChild(instance: AnimateDisplayObject, startFrame: number, duration?: number, keyframes?: string|{[frame: number]: TweenProps}): this { if (startFrame === undefined) // jshint ignore:line { startFrame = 0; } if (duration === undefined || duration < 1) // jshint ignore:line { duration = this._totalFrames || 1; } // Add the starting offset for synced movie clips if (instance instanceof MovieClip && instance.mode === MovieClip.SYNCHED) { (instance as MovieClip).parentStartPosition = startFrame; } // add tweening info about this child's presence on stage // when the child is (re)added, if it has 'autoReset' set to true, then it // should be set back to frame 0 let timeline: TimedChildTimeline; // get existing timeline for (let i = this._timedChildTimelines.length - 1; i >= 0; --i) { if (this._timedChildTimelines[i].target === instance) { timeline = this._timedChildTimelines[i]; break; } } // if there wasn't one, make a new one if (!timeline) { timeline = []; timeline.target = instance; this._timedChildTimelines.push(timeline); } // Fill the timeline with keyframe booleans utils.fillFrames(timeline, startFrame, duration); // Update the total frames if the instance extends our current // total frames for this movieclip if (this._totalFrames < startFrame + duration) { this._totalFrames = startFrame + duration; } // Add the collection of keyframes if (keyframes) { if (typeof keyframes === 'string') { keyframes = utils.deserializeKeyframes(keyframes); } for (const i in keyframes) { this.addKeyframe(instance, keyframes[i], parseInt(i, 10)); } this._getChildTimeline(instance) // subtract 1 from duration because we are using 0 based frame indices // and duration is calculated as total frames .extendLastFrame(startFrame + duration - 1); } // Set the initial position/add this._setTimelinePosition(startFrame, this.currentFrame, true); return this; } /** * Short cut for `addAction` */ public aa = this.addAction; /** * Handle frame actions, callback is bound to the instance of the MovieClip. * @param callback The clip call on a certain frame * @param startFrame The starting frame index or label */ public addAction(callback: FrameAction, startFrame: number|string): this { if (typeof startFrame === 'string') { const index = this._labelDict[startFrame]; if (index === undefined) { throw new Error(`The label '${startFrame}' does not exist on this timeline`); } startFrame = index; } const actions = this._actions; // ensure that the movieclip timeline is long enough to support the target frame if (actions.length <= startFrame) { actions.length = startFrame + 1; } if (this._totalFrames < startFrame) { this._totalFrames = startFrame; } // add the action if (actions[startFrame]) { actions[startFrame].push(callback); } else { actions[startFrame] = [callback]; } return this; } /** * Short cut for `playSound` */ public ps = this.playSound; /** * Handle sounds. * @method PIXI.animate.MovieClip#playSound * @param {String} alias The name of the Sound * @param {Boolean} [loop=false] The loop property of the sound */ public playSound(alias: string, loop?: boolean): this { sound.emit('play', alias, !!loop, this); return this; } /** * Sets paused to false. */ play(): void { this.paused = false; } /** * Sets paused to true. */ stop(): void { this.paused = true; } /** * Advances this movie clip to the specified position or label and sets paused to false. * @param positionOrLabel The animation name or frame number to go to. */ public gotoAndPlay(positionOrLabel: string|number): void { this.paused = false; this._goto(positionOrLabel); } /** * Advances this movie clip to the specified position or label and sets paused to true. * @param positionOrLabel The animation or frame name to go to. */ public gotoAndStop(positionOrLabel: string|number): void { this.paused = true; this._goto(positionOrLabel); } /** * Get the close parent with a valid framerate. If no parent, returns the default framerate. */ public get parentFramerate(): number { // eslint-disable-next-line @typescript-eslint/no-this-alias let o: MovieClip = this; let fps = o._framerate; while ((o = o.parent as MovieClip) && !fps) { if (o.mode === MovieClip.INDEPENDENT) { fps = o._framerate; } } return fps || MovieClip.DEFAULT_FRAMERATE; } /** * Advances the playhead. This occurs automatically each tick by default. * @param time The amount of time in seconds to advance by. Only applicable if framerate is set. */ public advance(time?: number): void { // Handle any other cases where starting to play // and no framerate has been set yet if (!this._framerate) { this.framerate = this.parentFramerate; } if (time) { this._t += time; } if (this._t > this._duration) { this._t = this.loop ? this._t % this._duration : this._duration; } // add a tiny amount to account for potential floating point errors this.currentFrame = Math.floor((this._t * this._framerate) + 0.00000001); // final error checking if (this.currentFrame >= this._totalFrames) { this.currentFrame = this._totalFrames - 1; } let afterUpdateOnce; if (this._beforeUpdate) { afterUpdateOnce = this._beforeUpdate(this); } // update all tweens & actions in the timeline this._updateTimeline(); // Do the animator callback here if (afterUpdateOnce) { afterUpdateOnce(); } } /** * @param positionOrLabel The animation name or frame number to go to. */ protected _goto(positionOrLabel: string|number): void { const pos = typeof positionOrLabel === 'string' ? this._labelDict[positionOrLabel] : positionOrLabel; if (pos === undefined) // jshint ignore:line { return; } // prevent _updateTimeline from overwriting the new position because of a reset: this._prevPos = NaN; this.currentFrame = pos; // Handle the case where trying to play but haven't // added to the stage yet if (!this._framerate) { this.framerate = this.parentFramerate; } // update the elapsed time if a time based movieclip if (this._framerate > 0) { this._t = pos / this._framerate; } else { this._t = 0; } this._updateTimeline(); } /** * Reset the movieclip to the first frame (without advancing the timeline). */ private _reset(): void { this._prevPos = -1; this._t = 0; this.currentFrame = 0; } /** * Update timeline position according to playback, performing actions and updating children. * @private */ public _updateTimeline(): void { const synched = this.mode !== MovieClip.INDEPENDENT; if (synched) { this.currentFrame = this.startPosition + (this.mode === MovieClip.SINGLE_FRAME ? 0 : this._synchOffset); if (this.currentFrame >= this._totalFrames) { this.currentFrame %= this._totalFrames; } } if (this._prevPos === this.currentFrame) { return; } // update timeline position, ignoring actions if this is a graphic. this._setTimelinePosition(this._prevPos, this.currentFrame, synched ? false : this.actionsEnabled); this._prevPos = this.currentFrame; } /** * Set the timeline position */ protected _setTimelinePosition(startFrame: number, currentFrame: number, doActions: boolean): void { if (startFrame !== currentFrame && doActions) { let startPos: number; if (isNaN(startFrame)) { startPos = currentFrame; } else { startPos = (startFrame >= this._totalFrames - 1 ? 0 : startFrame + 1); } // generate actionFrames on the way const actionFrames: number[] = []; // loop if (currentFrame < startPos) { for (let i = startPos; i < this._actions.length; ++i) { if (this._actions[i]) { actionFrames.push(i); } } for (let i = 0; i <= currentFrame; ++i) { if (this._actions[i]) { actionFrames.push(i); } } } // no loop else { for (let i = startPos; i <= currentFrame; ++i) { if (this._actions[i]) { actionFrames.push(i); } } } if (actionFrames.length) { const oldCurrentFrame = this.currentFrame; for (let i = 0; i < actionFrames.length; ++i) { const frame = actionFrames[i]; this._setTimelinePosition(frame, frame, true); // _goto is called OR last frame reached if (this.currentFrame !== oldCurrentFrame || frame === currentFrame) { return; } // stop is called else if (this.paused) { this.currentFrame = frame; return; } } } } // handle all tweens const _timelines = this._timelines; for (let i = _timelines.length - 1; i >= 0; --i) { const timeline = _timelines[i]; for (let j = 0, length = timeline.length; j < length; ++j) { const tween = timeline[j]; // if the tween contains part of the timeline that we are travelling through if (currentFrame >= tween.startFrame && currentFrame <= tween.endFrame) { // set the position within that tween // and break the loop to move onto the next timeline tween.setPosition(currentFrame); break; } } } const timedChildTimelines = this._timedChildTimelines; const depthSorted = this._depthSorted; for (let i = 0, length = timedChildTimelines.length; i < length; ++i) { const target = timedChildTimelines[i].target; const shouldBeChild = timedChildTimelines[i][currentFrame]; // if child should be on stage and is not: if (shouldBeChild) { // Add to the depthSorted object so we can // check that items are property drawn later depthSorted.push(target); if (target.parent !== this) { // add the target if it's not there already this.addChild(target); if (target instanceof MovieClip && target.mode === MovieClip.INDEPENDENT && target.autoReset) { target._reset(); } } } else if (!shouldBeChild && target.parent === this) { this.removeChild(target); } } // Properly depth sort the children for (let i = 0, length = depthSorted.length; i < length; i++) { const target = depthSorted[i]; const currentIndex = this.children.indexOf(target); if (currentIndex !== i) { this.addChildAt(target, i); } } // Clear the temporary depth sorting array depthSorted.length = 0; // go through all children and update synched movieclips that are not single frames const children = this.children; for (let i = 0, length = children.length; i < length; ++i) { const child = children[i]; if (child instanceof MovieClip && child.mode === MovieClip.SYNCHED) { child._synchOffset = currentFrame - child.parentStartPosition; child._updateTimeline(); } } // handle actions if (doActions && this._actions && this._actions[currentFrame]) { const frameActions = this._actions[currentFrame]; for (let j = 0; j < frameActions.length; ++j) { frameActions[j].call(this); } } } destroy(options?: IDestroyOptions|boolean): void { if (this._tickListener) { SharedTicker.remove(this._tickListener, null); this._tickListener = null; } const hiddenChildren = []; const timelines = this._timelines; if (timelines) { for (let i = 0; i < timelines.length; i++) { const timeline = timelines[i]; hiddenChildren.push(timeline.target); timeline.destroy(); } } const childTimelines = this._timedChildTimelines; if (childTimelines) { for (let i = 0; i < childTimelines.length; i++) { const timeline = childTimelines[i]; if (hiddenChildren.indexOf(timeline.target) < 0) { hiddenChildren.push(timeline.target); } timeline.length = 0; } } // Destroy all the children for (let i = 0; i < hiddenChildren.length; i++) { // Don't destroy children in the display list if (this.children.indexOf(hiddenChildren[i]) < 0) { hiddenChildren[i].destroy(options as IDestroyOptions); } } hiddenChildren.length = 0; this._actions = null; this._timelines = null; this._depthSorted = null; this._timedChildTimelines = null; this._beforeUpdate = null; this._labels = null; this._labelDict = null; super.destroy(options as IDestroyOptions); } }
the_stack
import { ArraySchema, MapSchema, Schema, type } from '@colyseus/schema'; import { Bullet, Game, Monster, Player, Prop } from '../entities'; import { Collisions, Constants, Entities, Geometry, Maps, Maths, Models, Tiled, Types } from '@tosios/common'; export class GameState extends Schema { @type(Game) public game: Game; @type({ map: Player }) public players: MapSchema<Player> = new MapSchema<Player>(); @type({ map: Monster }) public monsters: MapSchema<Monster> = new MapSchema<Monster>(); @type([Prop]) public props: ArraySchema<Prop> = new ArraySchema<Prop>(); @type([Bullet]) public bullets: ArraySchema<Bullet> = new ArraySchema<Bullet>(); private map: Entities.Map; private walls: Collisions.TreeCollider; private spawners: Geometry.RectangleBody[] = []; private actions: Models.ActionJSON[] = []; private onMessage: (message: Models.MessageJSON) => void; // // Init // constructor( roomName: string, mapName: string, maxPlayers: number, mode: Types.GameMode, onMessage: (message: Models.MessageJSON) => void, ) { super(); // Game this.game = new Game({ roomName, mapName, maxPlayers, mode, onWaitingStart: this.handleWaitingStart, onLobbyStart: this.handleLobbyStart, onGameStart: this.handleGameStart, onGameEnd: this.handleGameEnd, }); // Map this.initializeMap(mapName); // Callback this.onMessage = onMessage; } // // Updates // update() { this.updateGame(); this.updatePlayers(); this.updateMonsters(); this.updateBullets(); } private updateGame() { this.game.update(this.players); } private updatePlayers() { let action: Models.ActionJSON; while (this.actions.length > 0) { action = this.actions.shift(); switch (action.type) { case 'move': this.playerMove(action.playerId, action.ts, action.value); break; case 'rotate': this.playerRotate(action.playerId, action.ts, action.value.rotation); break; case 'shoot': this.playerShoot(action.playerId, action.ts, action.value.angle); break; default: break; } } } private updateMonsters() { this.monsters.forEach((monster, monsterId) => { this.monsterUpdate(monsterId); }); } private updateBullets() { for (let i: number = 0; i < this.bullets.length; i++) { this.bulletUpdate(i); } } // // Game: State changes // private handleWaitingStart = () => { this.setPlayersActive(false); this.onMessage({ type: 'waiting', from: 'server', ts: Date.now(), params: {}, }); }; private handleLobbyStart = () => { this.setPlayersActive(false); }; private handleGameStart = () => { if (this.game.mode === 'team deathmatch') { this.setPlayersTeamsRandomly(); } this.setPlayersPositionRandomly(); this.setPlayersActive(true); this.propsAdd(Constants.FLASKS_COUNT); this.monstersAdd(Constants.MONSTERS_COUNT); this.onMessage({ type: 'start', from: 'server', ts: Date.now(), params: {}, }); }; private handleGameEnd = (message?: Models.MessageJSON) => { if (message) { this.onMessage(message); } this.propsClear(); this.monstersClear(); this.onMessage({ type: 'stop', from: 'server', ts: Date.now(), params: {}, }); }; // // Map // initializeMap = (mapName: string) => { const data = Maps.List[mapName]; const tiledMap = new Tiled.Map(data, Constants.TILE_SIZE); // Set the map boundaries this.map = new Entities.Map(tiledMap.widthInPixels, tiledMap.heightInPixels); // Create a R-Tree for walls this.walls = new Collisions.TreeCollider(); tiledMap.collisions.forEach((tile) => { if (tile.tileId > 0) { this.walls.insert({ minX: tile.minX, minY: tile.minY, maxX: tile.maxX, maxY: tile.maxY, collider: tile.type, }); } }); // Create spawners tiledMap.spawners.forEach((tile) => { if (tile.tileId > 0) { this.spawners.push(new Geometry.RectangleBody(tile.minX, tile.minY, tile.maxX, tile.maxY)); } }); }; // // Players: single // playerAdd(id: string, name: string) { const spawner = this.getSpawnerRandomly(); const player = new Player( id, spawner.x + Constants.PLAYER_SIZE / 2, spawner.y + Constants.PLAYER_SIZE / 2, Constants.PLAYER_SIZE / 2, 0, Constants.PLAYER_MAX_LIVES, name || id, ); // Add the user to the "red" team by default if (this.game.mode === 'team deathmatch') { player.setTeam('Red'); } this.players.set(id, player); // Broadcast message to other players this.onMessage({ type: 'joined', from: 'server', ts: Date.now(), params: { name: this.players.get(id).name, }, }); } playerPushAction(action: Models.ActionJSON) { this.actions.push(action); } private playerMove(id: string, ts: number, dir: Geometry.Vector2) { const player = this.players.get(id); if (!player || dir.empty) { return; } player.move(dir.x, dir.y, Constants.PLAYER_SPEED); // Collisions: Map const clampedPosition = this.map.clampCircle(player.body); player.setPosition(clampedPosition.x, clampedPosition.y); // Collisions: Walls const correctedPosition = this.walls.correctWithCircle(player.body); player.setPosition(correctedPosition.x, correctedPosition.y); // Acknoledge last treated action player.ack = ts; // Collisions: Props if (!player.isAlive) { return; } let prop: Prop; for (let i: number = 0; i < this.props.length; i++) { prop = this.props[i]; if (!prop.active) { continue; } if (Collisions.circleToCircle(player.body, prop.body)) { switch (prop.type) { case 'potion-red': if (!player.isFullLives) { prop.active = false; player.heal(); } break; default: break; } } } } private playerRotate(id: string, ts: number, rotation: number) { const player = this.players.get(id); if (!player) { return; } player.setRotation(rotation); } private playerShoot(id: string, ts: number, angle: number) { const player = this.players.get(id); if (!player || !player.isAlive || this.game.state !== 'game') { return; } // Check if player can shoot const delta = ts - player.lastShootAt; if (player.lastShootAt && delta < Constants.BULLET_RATE) { return; } player.lastShootAt = ts; // Make the bullet start at the staff const bulletX = player.x + Math.cos(angle) * Constants.PLAYER_WEAPON_SIZE; const bulletY = player.y + Math.sin(angle) * Constants.PLAYER_WEAPON_SIZE; // Recycle bullets if some are unused to prevent instantiating too many const index = this.bullets.findIndex((bullet) => !bullet.active); if (index === -1) { this.bullets.push( new Bullet(id, player.team, bulletX, bulletY, Constants.BULLET_SIZE, angle, player.color, Date.now()), ); } else { this.bullets[index].reset( id, player.team, bulletX, bulletY, Constants.BULLET_SIZE, angle, player.color, Date.now(), ); } } private playerUpdateKills(playerId: string) { const player = this.players.get(playerId); if (!player) { return; } player.setKills(player.kills + 1); } playerRemove(id: string) { this.onMessage({ type: 'left', from: 'server', ts: Date.now(), params: { name: this.players.get(id).name, }, }); this.players.delete(id); } // // Players: multiple // private setPlayersActive(active: boolean) { this.players.forEach((player) => { player.setLives(active ? player.maxLives : 0); }); } private setPlayersPositionRandomly() { let spawner: Geometry.RectangleBody; this.players.forEach((player) => { spawner = this.getSpawnerRandomly(); player.setPosition(spawner.x + Constants.PLAYER_SIZE / 2, spawner.y + Constants.PLAYER_SIZE / 2); player.ack = 0; }); } private getPositionRandomly( body: Geometry.CircleBody, snapToGrid: boolean, withCollisions: boolean, ): Geometry.CircleBody { body.x = Maths.getRandomInt(Constants.TILE_SIZE, this.map.width - Constants.TILE_SIZE); body.y = Maths.getRandomInt(Constants.TILE_SIZE, this.map.height - Constants.TILE_SIZE); // Should we compute collisions? if (withCollisions) { while (this.walls.collidesWithCircle(body)) { body.x = Maths.getRandomInt(Constants.TILE_SIZE, this.map.width - Constants.TILE_SIZE); body.y = Maths.getRandomInt(Constants.TILE_SIZE, this.map.height - Constants.TILE_SIZE); } } // We want the items to snap to the grid if (snapToGrid) { body.x += Maths.snapPosition(body.x, Constants.TILE_SIZE); body.y += Maths.snapPosition(body.y, Constants.TILE_SIZE); } return body; } private setPlayersTeamsRandomly() { const playersIds = Maths.shuffleArray(Array.from(this.players.keys())); const minimumPlayersPerTeam = Math.floor(playersIds.length / 2); const rest = playersIds.length % 2; for (let i = 0; i < playersIds.length; i++) { const playerId = playersIds[i]; const player = this.players.get(playerId); const isBlueTeam = i < minimumPlayersPerTeam + rest; player.setTeam(isBlueTeam ? 'Blue' : 'Red'); } } private getSpawnerRandomly(): Geometry.RectangleBody { return this.spawners[Maths.getRandomInt(0, this.spawners.length - 1)]; } // // Monsters // private monstersAdd = (count: number) => { for (let i = 0; i < count; i++) { const body = this.getPositionRandomly( new Geometry.CircleBody(0, 0, Constants.MONSTER_SIZE / 2), false, false, ); const monster = new Monster( body.x, body.y, body.width / 2, this.map.width, this.map.height, Constants.MONSTER_LIVES, ); this.monsters.set(Maths.getRandomInt(0, 1000).toString(), monster); } }; private monsterUpdate = (id: string) => { const monster = this.monsters.get(id); if (!monster || !monster.isAlive) { return; } // Update monster monster.update(this.players); // Collisions: Players this.players.forEach((player) => { // Check if the monster can hurt the player if (!player.isAlive || !monster.canAttack || !Collisions.circleToCircle(monster.body, player.body)) { return; } monster.attack(); player.hurt(); if (!player.isAlive) { this.onMessage({ type: 'killed', from: 'server', ts: Date.now(), params: { killerName: 'A bat', killedName: player.name, }, }); } }); }; private monsterRemove = (id: string) => { this.monsters.delete(id); }; private monstersClear = () => { const monstersIds = Array.from(this.monsters.keys()); monstersIds.forEach(this.monsterRemove); }; // // Bullets // private bulletUpdate(bulletId: number) { const bullet = this.bullets[bulletId]; if (!bullet || !bullet.active) { return; } bullet.move(Constants.BULLET_SPEED); // Collisions: Players this.players.forEach((player) => { // Check if the bullet can hurt the player if ( !player.canBulletHurt(bullet.playerId, bullet.team) || !Collisions.circleToCircle(bullet.body, player.body) ) { return; } bullet.active = false; player.hurt(); if (!player.isAlive) { this.onMessage({ type: 'killed', from: 'server', ts: Date.now(), params: { killerName: this.players[bullet.playerId].name, killedName: player.name, }, }); this.playerUpdateKills(bullet.playerId); } }); // Collisions: Monsters this.monsters.forEach((monster, monsterId) => { // Check if the bullet can hurt the player if (!Collisions.circleToCircle(bullet.body, monster.body)) { return; } bullet.active = false; monster.hurt(); if (!monster.isAlive) { this.monsterRemove(monsterId); } }); // Collisions: Walls if (this.walls.collidesWithCircle(bullet.body, 'half')) { bullet.active = false; return; } // Collisions: Map if (this.map.isCircleOutside(bullet.body)) { bullet.active = false; } } // // Props // private propsAdd(count: number) { for (let i = 0; i < count; i++) { const body = this.getPositionRandomly(new Geometry.CircleBody(0, 0, Constants.FLASK_SIZE / 2), false, true); const prop = new Prop('potion-red', body.x, body.y, body.radius); this.props.push(prop); } } private propsClear() { if (!this.props) { return; } while (this.props.length > 0) { this.props.pop(); } } }
the_stack
import * as pulumi from "@pulumi/pulumi"; import { input as inputs, output as outputs, enums } from "../types"; import * as utilities from "../utilities"; /** * Resource for managing QuickSight Data Source * * ## Example Usage * * ```typescript * import * as pulumi from "@pulumi/pulumi"; * import * as aws from "@pulumi/aws"; * * const defaultDataSource = new aws.quicksight.DataSource("default", { * dataSourceId: "example-id", * parameters: { * s3: { * manifestFileLocation: { * bucket: "my-bucket", * key: "path/to/manifest.json", * }, * }, * }, * type: "S3", * }); * ``` * * ## Import * * A QuickSight data source can be imported using the AWS account ID, and data source ID name separated by a slash (`/`) e.g. * * ```sh * $ pulumi import aws:quicksight/dataSource:DataSource example 123456789123/my-data-source-id * ``` */ export class DataSource extends pulumi.CustomResource { /** * Get an existing DataSource resource's state with the given name, ID, and optional extra * properties used to qualify the lookup. * * @param name The _unique_ name of the resulting resource. * @param id The _unique_ provider ID of the resource to lookup. * @param state Any extra arguments used during the lookup. * @param opts Optional settings to control the behavior of the CustomResource. */ public static get(name: string, id: pulumi.Input<pulumi.ID>, state?: DataSourceState, opts?: pulumi.CustomResourceOptions): DataSource { return new DataSource(name, <any>state, { ...opts, id: id }); } /** @internal */ public static readonly __pulumiType = 'aws:quicksight/dataSource:DataSource'; /** * Returns true if the given object is an instance of DataSource. This is designed to work even * when multiple copies of the Pulumi SDK have been loaded into the same process. */ public static isInstance(obj: any): obj is DataSource { if (obj === undefined || obj === null) { return false; } return obj['__pulumiType'] === DataSource.__pulumiType; } /** * Amazon Resource Name (ARN) of the data source */ public /*out*/ readonly arn!: pulumi.Output<string>; /** * The ID for the AWS account that the data source is in. Currently, you use the ID for the AWS account that contains your Amazon QuickSight account. */ public readonly awsAccountId!: pulumi.Output<string>; /** * The credentials Amazon QuickSight uses to connect to your underlying source. Currently, only credentials based on user name and password are supported. See Credentials below for more details. */ public readonly credentials!: pulumi.Output<outputs.quicksight.DataSourceCredentials | undefined>; /** * An identifier for the data source. */ public readonly dataSourceId!: pulumi.Output<string>; /** * A name for the data source, maximum of 128 characters. */ public readonly name!: pulumi.Output<string>; /** * The parameters used to connect to this data source (exactly one). */ public readonly parameters!: pulumi.Output<outputs.quicksight.DataSourceParameters>; /** * A set of resource permissions on the data source. Maximum of 64 items. See Permission below for more details. */ public readonly permissions!: pulumi.Output<outputs.quicksight.DataSourcePermission[] | undefined>; /** * Secure Socket Layer (SSL) properties that apply when Amazon QuickSight connects to your underlying source. See SSL Properties below for more details. */ public readonly sslProperties!: pulumi.Output<outputs.quicksight.DataSourceSslProperties | undefined>; /** * Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. */ public readonly tags!: pulumi.Output<{[key: string]: string} | undefined>; /** * A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block). */ public readonly tagsAll!: pulumi.Output<{[key: string]: string}>; /** * The type of the data source. See the [AWS Documentation](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_CreateDataSource.html#QS-CreateDataSource-request-Type) for the complete list of valid values. */ public readonly type!: pulumi.Output<string>; /** * Use this parameter only when you want Amazon QuickSight to use a VPC connection when connecting to your underlying source. See VPC Connection Properties below for more details. */ public readonly vpcConnectionProperties!: pulumi.Output<outputs.quicksight.DataSourceVpcConnectionProperties | undefined>; /** * Create a DataSource resource with the given unique name, arguments, and options. * * @param name The _unique_ name of the resource. * @param args The arguments to use to populate this resource's properties. * @param opts A bag of options that control this resource's behavior. */ constructor(name: string, args: DataSourceArgs, opts?: pulumi.CustomResourceOptions) constructor(name: string, argsOrState?: DataSourceArgs | DataSourceState, opts?: pulumi.CustomResourceOptions) { let inputs: pulumi.Inputs = {}; opts = opts || {}; if (opts.id) { const state = argsOrState as DataSourceState | undefined; inputs["arn"] = state ? state.arn : undefined; inputs["awsAccountId"] = state ? state.awsAccountId : undefined; inputs["credentials"] = state ? state.credentials : undefined; inputs["dataSourceId"] = state ? state.dataSourceId : undefined; inputs["name"] = state ? state.name : undefined; inputs["parameters"] = state ? state.parameters : undefined; inputs["permissions"] = state ? state.permissions : undefined; inputs["sslProperties"] = state ? state.sslProperties : undefined; inputs["tags"] = state ? state.tags : undefined; inputs["tagsAll"] = state ? state.tagsAll : undefined; inputs["type"] = state ? state.type : undefined; inputs["vpcConnectionProperties"] = state ? state.vpcConnectionProperties : undefined; } else { const args = argsOrState as DataSourceArgs | undefined; if ((!args || args.dataSourceId === undefined) && !opts.urn) { throw new Error("Missing required property 'dataSourceId'"); } if ((!args || args.parameters === undefined) && !opts.urn) { throw new Error("Missing required property 'parameters'"); } if ((!args || args.type === undefined) && !opts.urn) { throw new Error("Missing required property 'type'"); } inputs["awsAccountId"] = args ? args.awsAccountId : undefined; inputs["credentials"] = args ? args.credentials : undefined; inputs["dataSourceId"] = args ? args.dataSourceId : undefined; inputs["name"] = args ? args.name : undefined; inputs["parameters"] = args ? args.parameters : undefined; inputs["permissions"] = args ? args.permissions : undefined; inputs["sslProperties"] = args ? args.sslProperties : undefined; inputs["tags"] = args ? args.tags : undefined; inputs["tagsAll"] = args ? args.tagsAll : undefined; inputs["type"] = args ? args.type : undefined; inputs["vpcConnectionProperties"] = args ? args.vpcConnectionProperties : undefined; inputs["arn"] = undefined /*out*/; } if (!opts.version) { opts = pulumi.mergeOptions(opts, { version: utilities.getVersion()}); } super(DataSource.__pulumiType, name, inputs, opts); } } /** * Input properties used for looking up and filtering DataSource resources. */ export interface DataSourceState { /** * Amazon Resource Name (ARN) of the data source */ arn?: pulumi.Input<string>; /** * The ID for the AWS account that the data source is in. Currently, you use the ID for the AWS account that contains your Amazon QuickSight account. */ awsAccountId?: pulumi.Input<string>; /** * The credentials Amazon QuickSight uses to connect to your underlying source. Currently, only credentials based on user name and password are supported. See Credentials below for more details. */ credentials?: pulumi.Input<inputs.quicksight.DataSourceCredentials>; /** * An identifier for the data source. */ dataSourceId?: pulumi.Input<string>; /** * A name for the data source, maximum of 128 characters. */ name?: pulumi.Input<string>; /** * The parameters used to connect to this data source (exactly one). */ parameters?: pulumi.Input<inputs.quicksight.DataSourceParameters>; /** * A set of resource permissions on the data source. Maximum of 64 items. See Permission below for more details. */ permissions?: pulumi.Input<pulumi.Input<inputs.quicksight.DataSourcePermission>[]>; /** * Secure Socket Layer (SSL) properties that apply when Amazon QuickSight connects to your underlying source. See SSL Properties below for more details. */ sslProperties?: pulumi.Input<inputs.quicksight.DataSourceSslProperties>; /** * Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. */ tags?: pulumi.Input<{[key: string]: pulumi.Input<string>}>; /** * A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block). */ tagsAll?: pulumi.Input<{[key: string]: pulumi.Input<string>}>; /** * The type of the data source. See the [AWS Documentation](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_CreateDataSource.html#QS-CreateDataSource-request-Type) for the complete list of valid values. */ type?: pulumi.Input<string>; /** * Use this parameter only when you want Amazon QuickSight to use a VPC connection when connecting to your underlying source. See VPC Connection Properties below for more details. */ vpcConnectionProperties?: pulumi.Input<inputs.quicksight.DataSourceVpcConnectionProperties>; } /** * The set of arguments for constructing a DataSource resource. */ export interface DataSourceArgs { /** * The ID for the AWS account that the data source is in. Currently, you use the ID for the AWS account that contains your Amazon QuickSight account. */ awsAccountId?: pulumi.Input<string>; /** * The credentials Amazon QuickSight uses to connect to your underlying source. Currently, only credentials based on user name and password are supported. See Credentials below for more details. */ credentials?: pulumi.Input<inputs.quicksight.DataSourceCredentials>; /** * An identifier for the data source. */ dataSourceId: pulumi.Input<string>; /** * A name for the data source, maximum of 128 characters. */ name?: pulumi.Input<string>; /** * The parameters used to connect to this data source (exactly one). */ parameters: pulumi.Input<inputs.quicksight.DataSourceParameters>; /** * A set of resource permissions on the data source. Maximum of 64 items. See Permission below for more details. */ permissions?: pulumi.Input<pulumi.Input<inputs.quicksight.DataSourcePermission>[]>; /** * Secure Socket Layer (SSL) properties that apply when Amazon QuickSight connects to your underlying source. See SSL Properties below for more details. */ sslProperties?: pulumi.Input<inputs.quicksight.DataSourceSslProperties>; /** * Key-value map of resource tags. If configured with a provider [`defaultTags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block) present, tags with matching keys will overwrite those defined at the provider-level. */ tags?: pulumi.Input<{[key: string]: pulumi.Input<string>}>; /** * A map of tags assigned to the resource, including those inherited from the provider [`defaultTags` configuration block](https://www.terraform.io/docs/providers/aws/index.html#default_tags-configuration-block). */ tagsAll?: pulumi.Input<{[key: string]: pulumi.Input<string>}>; /** * The type of the data source. See the [AWS Documentation](https://docs.aws.amazon.com/quicksight/latest/APIReference/API_CreateDataSource.html#QS-CreateDataSource-request-Type) for the complete list of valid values. */ type: pulumi.Input<string>; /** * Use this parameter only when you want Amazon QuickSight to use a VPC connection when connecting to your underlying source. See VPC Connection Properties below for more details. */ vpcConnectionProperties?: pulumi.Input<inputs.quicksight.DataSourceVpcConnectionProperties>; }
the_stack
import { CPU } from '../cpu/cpu'; import { asmProgram, TestProgramRunner } from '../utils/test-utils'; import { AVRIOPort, PinOverrideMode, portBConfig, portDConfig } from './gpio'; import { AVRTimer, timer0Config, timer1Config, timer2Config } from './timer'; // CPU registers const R1 = 1; const R17 = 17; const R18 = 18; const R19 = 19; const R20 = 20; const R21 = 21; const R22 = 22; const SREG = 95; // Timer 0 Registers const TIFR0 = 0x35; const TCCR0A = 0x44; const TCCR0B = 0x45; const TCNT0 = 0x46; const OCR0A = 0x47; const OCR0B = 0x48; const TIMSK0 = 0x6e; const TIMSK1 = 0x6f; // Timer 1 Registers const TIFR1 = 0x36; const TCCR1A = 0x80; const TCCR1B = 0x81; const TCCR1C = 0x82; const TCNT1 = 0x84; const TCNT1H = 0x85; const ICR1 = 0x86; const ICR1H = 0x87; const OCR1A = 0x88; const OCR1AH = 0x89; const OCR1B = 0x8a; const OCR1C = 0x8c; const OCR1CH = 0x8d; // Timer 2 Registers const TCCR2B = 0xb1; const TCNT2 = 0xb2; // Register bit names const TOV0 = 1; const TOV1 = 1; const OCIE0A = 2; const OCIE0B = 4; const TOIE0 = 1; const OCF0A = 2; const OCF0B = 4; const OCF1A = 1 << 1; const OCF1B = 1 << 2; const OCF1C = 1 << 3; const WGM00 = 1; const WGM10 = 1; const WGM01 = 2; const WGM11 = 2; const WGM12 = 8; const WGM13 = 16; const CS00 = 1; const CS01 = 2; const CS02 = 4; const CS10 = 1; const CS21 = 2; const CS22 = 4; const COM0B1 = 1 << 5; const COM1C0 = 1 << 2; const FOC0B = 1 << 6; const FOC1C = 1 << 5; const T0 = 4; // PD4 on ATmega328p // opcodes const nopOpCode = '0000'; describe('timer', () => { it('should update timer every tick when prescaler is 1', () => { const cpu = new CPU(new Uint16Array(0x1000)); new AVRTimer(cpu, timer0Config); cpu.writeData(TCCR0B, CS00); // Set prescaler to 1 cpu.cycles = 1; cpu.tick(); cpu.cycles = 2; cpu.tick(); const tcnt = cpu.readData(TCNT0); expect(tcnt).toEqual(1); }); it('should update timer every 64 ticks when prescaler is 3', () => { const cpu = new CPU(new Uint16Array(0x1000)); new AVRTimer(cpu, timer0Config); cpu.writeData(TCCR0B, CS01 | CS00); // Set prescaler to 64 cpu.cycles = 1; cpu.tick(); cpu.cycles = 1 + 64; cpu.tick(); const tcnt = cpu.readData(TCNT0); expect(tcnt).toEqual(1); }); it('should not update timer if it has been disabled', () => { const cpu = new CPU(new Uint16Array(0x1000)); new AVRTimer(cpu, timer0Config); cpu.writeData(TCCR0B, 0); // No prescaler (timer disabled) cpu.cycles = 1; cpu.tick(); cpu.cycles = 100000; cpu.tick(); const tcnt = cpu.readData(TCNT0); expect(tcnt).toEqual(0); // TCNT should stay 0 }); it('should set the TOV flag when timer wraps above TOP value', () => { const cpu = new CPU(new Uint16Array(0x1000)); new AVRTimer(cpu, timer0Config); cpu.writeData(TCNT0, 0xff); cpu.writeData(TCCR0B, CS00); // Set prescaler to 1 cpu.cycles = 1; cpu.tick(); expect(cpu.readData(TCNT0)).toEqual(0xff); expect(cpu.data[TIFR0] & TOV0).toEqual(0); cpu.cycles++; cpu.tick(); expect(cpu.readData(TCNT0)).toEqual(0); expect(cpu.data[TIFR0] & TOV0).toEqual(TOV0); }); it('should set the TOV if timer overflows past TOP without reaching TOP', () => { const cpu = new CPU(new Uint16Array(0x1000)); new AVRTimer(cpu, timer0Config); cpu.writeData(TCNT0, 0xfe); cpu.writeData(TCCR0B, CS00); // Set prescaler to 1 cpu.cycles = 1; cpu.tick(); expect(cpu.readData(TCNT0)).toEqual(0xfe); cpu.cycles += 4; cpu.tick(); expect(cpu.readData(TCNT0)).toEqual(0x2); expect(cpu.data[TIFR0] & TOV0).toEqual(TOV0); }); it('should clear the TOV flag when writing 1 to the TOV bit, and not trigger the interrupt', () => { const cpu = new CPU(new Uint16Array(0x1000)); new AVRTimer(cpu, timer0Config); cpu.writeData(TCNT0, 0xff); cpu.writeData(TCCR0B, CS00); // Set prescaler to 1 cpu.cycles = 1; cpu.tick(); cpu.cycles = 2; cpu.tick(); expect(cpu.data[TIFR0] & TOV0).toEqual(TOV0); cpu.writeData(TIFR0, TOV0); expect(cpu.data[TIFR0] & TOV0).toEqual(0); }); it('should set TOV if timer overflows in FAST PWM mode', () => { const cpu = new CPU(new Uint16Array(0x1000)); new AVRTimer(cpu, timer0Config); cpu.writeData(TCNT0, 0xff); cpu.writeData(TCCR0B, CS00); // Set prescaler to 1 cpu.cycles = 1; cpu.tick(); cpu.writeData(OCR0A, 0x7f); cpu.writeData(TCCR0A, WGM01 | WGM00); // WGM: Fast PWM cpu.cycles = 2; cpu.tick(); const tcnt = cpu.readData(TCNT0); expect(tcnt).toEqual(0); expect(cpu.data[TIFR0] & TOV0).toEqual(TOV0); }); it('should generate an overflow interrupt if timer overflows and interrupts enabled', () => { const cpu = new CPU(new Uint16Array(0x1000)); new AVRTimer(cpu, timer0Config); cpu.writeData(TCNT0, 0xff); cpu.writeData(TCCR0B, CS00); // Set prescaler to 1 cpu.cycles = 1; cpu.tick(); cpu.writeData(TIMSK0, TOIE0); cpu.data[SREG] = 0x80; // SREG: I------- cpu.cycles = 2; cpu.tick(); const tcnt = cpu.readData(TCNT0); expect(tcnt).toEqual(2); // TCNT should be 2 (one tick above + 2 cycles for interrupt) expect(cpu.data[TIFR0] & TOV0).toEqual(0); expect(cpu.pc).toEqual(0x20); expect(cpu.cycles).toEqual(4); }); it('should support overriding TIFR/TOV and TIMSK/TOIE bits (issue #64)', () => { const cpu = new CPU(new Uint16Array(0x1000)); new AVRTimer(cpu, { ...timer0Config, // The following values correspond ATtiny85 config: TOV: 2, OCFA: 2, OCFB: 8, TOIE: 2, OCIEA: 16, OCIEB: 8, }); cpu.writeData(TCNT0, 0xff); cpu.writeData(TCCR0B, CS00); // Set prescaler to 1 cpu.cycles = 1; cpu.tick(); cpu.writeData(TIMSK0, 2); cpu.data[SREG] = 0x80; // SREG: I------- cpu.cycles = 2; cpu.tick(); const tcnt = cpu.readData(TCNT0); expect(tcnt).toEqual(2); // TCNT should be 2 (one tick above + 2 cycles for interrupt) expect(cpu.data[TIFR0] & 2).toEqual(0); expect(cpu.pc).toEqual(0x20); expect(cpu.cycles).toEqual(4); }); it('should not generate an overflow interrupt when global interrupts disabled', () => { const cpu = new CPU(new Uint16Array(0x1000)); new AVRTimer(cpu, timer0Config); cpu.writeData(TCNT0, 0xff); cpu.writeData(TCCR0B, CS00); // Set prescaler to 1 cpu.cycles = 1; cpu.tick(); cpu.data[TIMSK0] = TOIE0; cpu.data[SREG] = 0x0; // SREG: -------- cpu.cycles = 2; cpu.tick(); expect(cpu.data[TIFR0] & TOV0).toEqual(TOV0); expect(cpu.pc).toEqual(0); expect(cpu.cycles).toEqual(2); }); it('should not generate an overflow interrupt when TOIE0 is clear', () => { const cpu = new CPU(new Uint16Array(0x1000)); new AVRTimer(cpu, timer0Config); cpu.writeData(TCNT0, 0xff); cpu.writeData(TCCR0B, CS00); // Set prescaler to 1 cpu.cycles = 1; cpu.tick(); cpu.data[TIMSK0] = 0; cpu.data[SREG] = 0x80; // SREG: I------- cpu.cycles = 2; cpu.tick(); expect(cpu.data[TIFR0] & TOV0).toEqual(TOV0); expect(cpu.pc).toEqual(0); expect(cpu.cycles).toEqual(2); }); it('should set OCF0A/B flags when OCRA/B == 0 and the timer equals to OCRA (issue #74)', () => { const cpu = new CPU(new Uint16Array(0x1000)); new AVRTimer(cpu, timer0Config); cpu.writeData(TCNT0, 0xff); cpu.writeData(OCR0A, 0x0); cpu.writeData(OCR0B, 0x0); cpu.writeData(TCCR0A, 0x0); // WGM: Normal cpu.writeData(TCCR0B, CS00); // Set prescaler to 1 cpu.cycles = 1; cpu.tick(); cpu.cycles = 2; cpu.tick(); expect(cpu.readData(TCNT0)).toEqual(0); expect(cpu.data[TIFR0] & (OCF0A | OCF0B)).toEqual(OCF0A | OCF0B); expect(cpu.pc).toEqual(0); expect(cpu.cycles).toEqual(2); }); it('should set the OCF1A flag when OCR1A == 120 and the timer overflowed past 120 in WGM mode 15 (issue #94)', () => { const cpu = new CPU(new Uint16Array(0x1000)); new AVRTimer(cpu, timer1Config); cpu.writeData(TCNT1, 118); cpu.writeData(OCR1A, 120); cpu.writeData(OCR1B, 4); // To avoid getting the OCF1B flag set cpu.writeData(TCCR1A, WGM10 | WGM11); // WGM: Fast PWM cpu.writeData(TCCR1B, WGM12 | WGM13 | CS10); // Set prescaler to 1 cpu.cycles = 1; cpu.tick(); cpu.cycles = 5; cpu.tick(); expect(cpu.readData(TCNT1)).toEqual(1); expect(cpu.data[TIFR1] & (OCF1A | OCF1B)).toEqual(OCF1A); expect(cpu.pc).toEqual(0); expect(cpu.cycles).toEqual(5); }); it('should set OCF0A flag when timer equals OCRA', () => { const cpu = new CPU(new Uint16Array(0x1000)); new AVRTimer(cpu, timer0Config); cpu.writeData(TCNT0, 0x10); cpu.writeData(OCR0A, 0x11); cpu.writeData(TCCR0A, 0x0); // WGM: Normal cpu.writeData(TCCR0B, CS00); // Set prescaler to 1 cpu.cycles = 1; cpu.tick(); cpu.cycles = 2; cpu.tick(); expect(cpu.data[TIFR0]).toEqual(OCF0A); expect(cpu.pc).toEqual(0); expect(cpu.cycles).toEqual(2); }); it('should reset the counter in CTC mode if it equals to OCRA', () => { const cpu = new CPU(new Uint16Array(0x1000)); new AVRTimer(cpu, timer0Config); cpu.writeData(TCNT0, 0x10); cpu.writeData(OCR0A, 0x11); cpu.writeData(TCCR0A, WGM01); // WGM: CTC cpu.writeData(TCCR0B, CS00); // Set prescaler to 1 cpu.cycles = 1; cpu.tick(); cpu.cycles = 3; cpu.tick(); const tcnt = cpu.readData(TCNT0); expect(tcnt).toEqual(0); expect(cpu.pc).toEqual(0); expect(cpu.cycles).toEqual(3); }); it('should not set the TOV bit when TOP < MAX in CTC mode (issue #75)', () => { const cpu = new CPU(new Uint16Array(0x1000)); new AVRTimer(cpu, timer0Config); cpu.writeData(TCNT0, 0x1e); cpu.writeData(OCR0A, 0x1f); cpu.writeData(TCCR0A, WGM01); // WGM: CTC cpu.writeData(TCCR0B, CS00); // Set prescaler to 1 cpu.cycles = 1; cpu.tick(); cpu.cycles++; cpu.tick(); cpu.cycles++; cpu.tick(); const tcnt = cpu.readData(TCNT0); expect(tcnt).toEqual(0); expect(cpu.data[TIFR0] & TOV0).toEqual(0); // TOV0 clear }); it('should set the TOV bit when TOP == MAX in CTC mode (issue #75)', () => { const cpu = new CPU(new Uint16Array(0x1000)); new AVRTimer(cpu, timer0Config); cpu.writeData(TCNT0, 0xfe); cpu.writeData(OCR0A, 0xff); cpu.writeData(TCCR0A, WGM01); // WGM: CTC cpu.writeData(TCCR0B, CS00); // Set prescaler to 1 cpu.cycles = 1; cpu.tick(); cpu.cycles++; cpu.tick(); expect(cpu.readData(TCNT0)).toEqual(0xff); expect(cpu.data[TIFR0] & TOV0).toEqual(0); // TOV clear cpu.cycles++; cpu.tick(); expect(cpu.readData(TCNT0)).toEqual(0); expect(cpu.data[TIFR0] & TOV0).toEqual(TOV0); // TOV set }); it('should not set the TOV bit twice on overflow (issue #80)', () => { const cpu = new CPU(new Uint16Array(0x1000)); new AVRTimer(cpu, timer0Config); cpu.writeData(TCNT0, 0xfe); cpu.writeData(OCR0A, 0xff); cpu.writeData(TCCR0A, WGM01); // WGM: CTC cpu.writeData(TCCR0B, CS00); // Set prescaler to 1 cpu.cycles = 1; cpu.tick(); cpu.cycles++; cpu.tick(); expect(cpu.readData(TCNT0)).toEqual(0xff); expect(cpu.data[TIFR0] & TOV0).toEqual(0); // TOV clear cpu.cycles++; cpu.tick(); expect(cpu.readData(TCNT0)).toEqual(0); expect(cpu.data[TIFR0] & TOV0).toEqual(TOV0); // TOV set }); it('should set OCF0B flag when timer equals OCRB', () => { const cpu = new CPU(new Uint16Array(0x1000)); new AVRTimer(cpu, timer0Config); cpu.writeData(TCNT0, 0x10); cpu.writeData(OCR0B, 0x11); cpu.writeData(TCCR0A, 0x0); // WGM: (Normal) cpu.writeData(TCCR0B, CS00); // Set prescaler to 1 cpu.cycles = 1; cpu.tick(); cpu.cycles = 2; cpu.tick(); expect(cpu.data[TIFR0]).toEqual(OCF0B); expect(cpu.pc).toEqual(0); expect(cpu.cycles).toEqual(2); }); it('should generate Timer Compare A interrupt when TCNT0 == TCNTA', () => { const cpu = new CPU(new Uint16Array(0x1000)); new AVRTimer(cpu, timer0Config); cpu.writeData(TCNT0, 0x20); cpu.writeData(OCR0A, 0x21); cpu.writeData(TCCR0B, CS00); // Set prescaler to 1 cpu.writeData(TIMSK0, OCIE0A); cpu.writeData(95, 0x80); // SREG: I------- cpu.cycles = 1; cpu.tick(); cpu.cycles = 2; cpu.tick(); const tcnt = cpu.readData(TCNT0); expect(tcnt).toEqual(0x23); // TCNT should be 0x23 (one tick above + 2 cycles for interrupt) expect(cpu.data[TIFR0] & OCF0A).toEqual(0); expect(cpu.pc).toEqual(0x1c); expect(cpu.cycles).toEqual(4); }); it('should not generate Timer Compare A interrupt when OCIEA is disabled', () => { const cpu = new CPU(new Uint16Array(0x1000)); new AVRTimer(cpu, timer0Config); cpu.writeData(TCNT0, 0x20); cpu.writeData(OCR0A, 0x21); cpu.writeData(TCCR0B, CS00); // Set prescaler to 1 cpu.writeData(TIMSK0, 0); cpu.writeData(95, 0x80); // SREG: I------- cpu.cycles = 1; cpu.tick(); cpu.cycles = 2; cpu.tick(); const tcnt = cpu.readData(TCNT0); expect(tcnt).toEqual(0x21); expect(cpu.pc).toEqual(0); expect(cpu.cycles).toEqual(2); }); it('should generate Timer Compare B interrupt when TCNT0 == TCNTB', () => { const cpu = new CPU(new Uint16Array(0x1000)); new AVRTimer(cpu, timer0Config); cpu.writeData(TCNT0, 0x20); cpu.writeData(OCR0B, 0x21); cpu.writeData(TCCR0B, CS00); // Set prescaler to 1 cpu.writeData(TIMSK0, OCIE0B); cpu.writeData(95, 0x80); // SREG: I------- cpu.cycles = 1; cpu.tick(); cpu.cycles = 2; cpu.tick(); const tcnt = cpu.readData(TCNT0); expect(tcnt).toEqual(0x23); // TCNT should be 0x23 (0x23 + 2 cycles for interrupt) expect(cpu.data[TIFR0] & OCF0B).toEqual(0); expect(cpu.pc).toEqual(0x1e); expect(cpu.cycles).toEqual(4); }); it('should not increment TCNT on the same cycle of TCNT write (issue #36)', () => { // At the end of this short program, R17 should contain 0x31. Verified against // a physical ATmega328p. const { program, instructionCount } = asmProgram(` LDI r16, 0x1 ; TCCR0B = 1 << CS00; OUT 0x25, r16 LDI r16, 0x30 ; TCNT0 <- 0x30 OUT 0x26, r16 NOP IN r17, 0x26 ; r17 <- TCNT `); const cpu = new CPU(program); new AVRTimer(cpu, timer0Config); const runner = new TestProgramRunner(cpu); runner.runInstructions(instructionCount); expect(cpu.data[R17]).toEqual(0x31); }); it('timer2 should count every 256 ticks when prescaler is 6 (issue #5)', () => { const cpu = new CPU(new Uint16Array(0x1000)); new AVRTimer(cpu, timer2Config); cpu.writeData(TCCR2B, CS22 | CS21); // Set prescaler to 256 cpu.cycles = 1; cpu.tick(); cpu.cycles = 1 + 511; cpu.tick(); expect(cpu.readData(TCNT2)).toEqual(1); cpu.cycles = 1 + 512; cpu.tick(); expect(cpu.readData(TCNT2)).toEqual(2); }); it('should update TCNT as it is being read by a 2-cycle instruction (issue #40)', () => { const { program, instructionCount } = asmProgram(` LDI r16, 0x1 ; TCCR0B = 1 << CS00 OUT 0x25, r16 LDI r16, 0x0 ; TCNT0 <- 0 OUT 0x26, r16 NOP LDS r1, 0x46 ; r1 <- TCNT0 (2 cycles) `); const cpu = new CPU(program); new AVRTimer(cpu, timer0Config); const runner = new TestProgramRunner(cpu); runner.runInstructions(instructionCount); expect(cpu.data[R1]).toEqual(2); }); it('should not start counting before the prescaler is first set (issue #41)', () => { const { program, instructionCount } = asmProgram(` NOP NOP NOP NOP LDI r16, 0x1 ; TCCR2B = 1 << CS20; STS 0xb1, r16 ; Should start counting after this line NOP LDS r17, 0xb2 ; TCNT should equal 2 at this point `); const cpu = new CPU(program); new AVRTimer(cpu, timer2Config); const runner = new TestProgramRunner(cpu); runner.runInstructions(instructionCount); expect(cpu.readData(R17)).toEqual(2); }); it('should not keep counting for one more instruction when the timer is disabled (issue #72)', () => { const { program, instructionCount } = asmProgram(` EOR r1, r1 ; r1 = 0; LDI r16, 0x1 ; TCCR2B = 1 << CS20; STS 0xb1, r16 ; Should start counting after this instruction, STS 0xb1, r1 ; and stop counting *after* this one. NOP LDS r17, 0xb2 ; TCNT2 should equal 2 at this point (not counting the NOP) `); const cpu = new CPU(program); new AVRTimer(cpu, timer2Config); const runner = new TestProgramRunner(cpu); runner.runInstructions(instructionCount); expect(cpu.readData(R17)).toEqual(2); }); it('should clear OC0B pin when writing 1 to FOC0B', () => { const cpu = new CPU(new Uint16Array(0x1000)); new AVRTimer(cpu, timer0Config); cpu.writeData(TCCR0A, COM0B1); // Listen to Port B's internal callback const portD = new AVRIOPort(cpu, portDConfig); const gpioCallback = jest.spyOn(portD, 'timerOverridePin'); cpu.writeData(TCCR0B, FOC0B); expect(gpioCallback).toHaveBeenCalledWith(5, PinOverrideMode.Clear); }); describe('Fast PWM mode', () => { it('should set OC0A on Compare Match, clear on Bottom (issue #78)', () => { const { program, labels } = asmProgram(` LDI r16, 0xfc ; TCNT0 = 0xfc; OUT 0x26, r16 LDI r16, 0xfe ; OCR0A = 0xfe; OUT 0x27, r16 ; WGM: Fast PWM, enable OC0A mode 3 (set on Compare Match, clear on Bottom) LDI r16, 0xc3 ; TCCR0A = (1 << COM0A1) | (1 << COM0A0) | (1 << WGM01) | (1 << WGM00); OUT 0x24, r16 LDI r16, 0x1 ; TCCR0B = 1 << CS00; OUT 0x25, r16 NOP ; TCNT is now 0xfd beforeMatch: NOP ; TCNT is now 0xfe (Compare Match) afterMatch: NOP ; TCNT is now 0xff beforeBottom: NOP ; TCNT is now 0x00 (BOTTOM) afterBottom: NOP `); const cpu = new CPU(program); new AVRTimer(cpu, timer0Config); // Listen to Port D's internal callback const portD = new AVRIOPort(cpu, portDConfig); const gpioCallback = jest.spyOn(portD, 'timerOverridePin'); const runner = new TestProgramRunner(cpu); runner.runToAddress(labels.beforeMatch); expect(cpu.readData(TCNT0)).toEqual(0xfd); expect(gpioCallback).toHaveBeenCalledTimes(1); expect(gpioCallback).toHaveBeenCalledWith(6, PinOverrideMode.Enable); // OC0A: Enable gpioCallback.mockClear(); runner.runToAddress(labels.afterMatch); expect(cpu.readData(TCNT0)).toEqual(0xfe); expect(gpioCallback).toHaveBeenCalledTimes(1); expect(gpioCallback).toHaveBeenCalledWith(6, PinOverrideMode.Set); // OC0A: Set gpioCallback.mockClear(); runner.runToAddress(labels.beforeBottom); expect(cpu.readData(TCNT0)).toEqual(0xff); expect(gpioCallback).toHaveBeenCalledTimes(0); gpioCallback.mockClear(); runner.runToAddress(labels.afterBottom); expect(cpu.readData(TCNT0)).toEqual(0x0); expect(gpioCallback).toHaveBeenCalledTimes(1); expect(gpioCallback).toHaveBeenCalledWith(6, PinOverrideMode.Clear); // OC0A: Clear }); it('should toggle OC0A on Compare Match when COM0An = 1 (issue #78)', () => { const { program, labels } = asmProgram(` LDI r16, 0xfc ; TCNT0 = 0xfc; OUT 0x26, r16 LDI r16, 0xfe ; OCR0A = 0xfe; OUT 0x27, r16 ; WGM: Fast PWM, enable OC0A mode 1 (Toggle) LDI r16, 0x43 ; TCCR0A = (1 << COM0A0) | (1 << WGM01) | (1 << WGM00); OUT 0x24, r16 LDI r16, 0x09 ; TCCR0B = (1 << WGM02) | (1 << CS00); OUT 0x25, r16 NOP ; TCNT is now 0xfd beforeMatch: NOP ; TCNT is now 0xfe (Compare Match, TOP) afterMatch: NOP ; TCNT is now 0 afterOverflow: NOP `); const cpu = new CPU(program); new AVRTimer(cpu, timer0Config); // Listen to Port D's internal callback const portD = new AVRIOPort(cpu, portDConfig); const gpioCallback = jest.spyOn(portD, 'timerOverridePin'); const runner = new TestProgramRunner(cpu); runner.runToAddress(labels.beforeMatch); expect(cpu.readData(TCNT0)).toEqual(0xfd); expect(gpioCallback).toHaveBeenCalledTimes(1); expect(gpioCallback).toHaveBeenCalledWith(6, PinOverrideMode.Enable); // OC0A: Enable gpioCallback.mockClear(); runner.runToAddress(labels.afterMatch); expect(cpu.readData(TCNT0)).toEqual(0xfe); expect(gpioCallback).toHaveBeenCalledTimes(1); expect(gpioCallback).toHaveBeenCalledWith(6, PinOverrideMode.Toggle); // OC0A: Toggle gpioCallback.mockClear(); runner.runToAddress(labels.afterOverflow); expect(cpu.readData(TCNT0)).toEqual(0); expect(gpioCallback).toHaveBeenCalledTimes(0); }); it('should leave OC0A disconnected when COM0An = 1 and WGM02 = 0 (issue #78)', () => { const { program, labels } = asmProgram(` LDI r16, 0xfc ; TCNT0 = 0xfc; OUT 0x26, r16 LDI r16, 0xfe ; OCR0A = 0xfe; OUT 0x27, r16 ; WGM: Fast PWM mode 7, enable OC0A mode 1 (Toggle) LDI r16, 0x43 ; TCCR0A = (1 << COM0A0) | (1 << WGM01) | (1 << WGM00); OUT 0x24, r16 LDI r16, 0x09 ; TCCR0B = (1 << WGM02) | (1 << CS00); OUT 0x25, r16 beforeClearWGM02: LDI r16, 0x01 ; TCCR0B = (1 << CS00); OUT 0x25, r16 afterClearWGM02: NOP `); const cpu = new CPU(program); new AVRTimer(cpu, timer0Config); // Listen to Port D's internal callback const portD = new AVRIOPort(cpu, portDConfig); const gpioCallback = jest.spyOn(portD, 'timerOverridePin'); const runner = new TestProgramRunner(cpu); // First, run with the bit set and assert that the Pin Override was enabled (OC0A connected) runner.runToAddress(labels.beforeClearWGM02); expect(gpioCallback).toHaveBeenCalledTimes(1); expect(gpioCallback).toHaveBeenCalledWith(6, PinOverrideMode.Enable); gpioCallback.mockClear(); // Now clear WGM02 and observe that Pin Override was disabled (OC0A disconnected) runner.runToAddress(labels.afterClearWGM02); expect(gpioCallback).toHaveBeenCalledTimes(1); expect(gpioCallback).toHaveBeenCalledWith(6, PinOverrideMode.None); gpioCallback.mockClear(); }); }); describe('Phase-correct PWM mode', () => { it('should count up to TOP, down to 0, and then set TOV flag', () => { const { program, instructionCount } = asmProgram(` LDI r16, 0x3 ; OCR0A = 0x3; // <- TOP value OUT 0x27, r16 ; Set waveform generation mode (WGM) to PWM, Phase Correct, top OCR0A LDI r16, 0x1 ; TCCR0A = 1 << WGM00; OUT 0x24, r16 LDI r16, 0x9 ; TCCR0B = (1 << WGM02) | (1 << CS00); OUT 0x25, r16 LDI r16, 0x2 ; TCNT0 = 0x2; OUT 0x26, r16 IN r17, 0x26 ; TCNT0 will be 2 IN r18, 0x26 ; TCNT0 will be 3 IN r19, 0x26 ; TCNT0 will be 2 IN r20, 0x26 ; TCNT0 will be 1 IN r21, 0x26 ; TCNT0 will be 0 IN r22, 0x26 ; TCNT0 will be 1 (end of test) `); const cpu = new CPU(program); new AVRTimer(cpu, timer0Config); const runner = new TestProgramRunner(cpu); runner.runInstructions(instructionCount); expect(cpu.readData(R17)).toEqual(2); expect(cpu.readData(R18)).toEqual(3); expect(cpu.readData(R19)).toEqual(2); expect(cpu.readData(R20)).toEqual(1); expect(cpu.readData(R21)).toEqual(0); expect(cpu.readData(R22)).toEqual(1); expect(cpu.data[TIFR0] & TOV0).toEqual(TOV0); }); it('should clear OC0A when TCNT0=OCR0A and counting up', () => { const { program, lines, instructionCount } = asmProgram(` LDI r16, 0xfe ; OCR0A = 0xfe; // <- TOP value OUT 0x27, r16 ; Set waveform generation mode (WGM) to PWM, Phase Correct LDI r16, 0x81 ; TCCR0A = (1 << COM0A1) | (1 << WGM00); OUT 0x24, r16 LDI r16, 0x1 ; TCCR0B = (1 << CS00); OUT 0x25, r16 LDI r16, 0xfd ; TCNT0 = 0xfd; OUT 0x26, r16 NOP ; TCNT0 will be 0xfe NOP ; TCNT0 will be 0xff NOP ; TCNT0 will be 0xfe again (end of test) `); const cpu = new CPU(program); new AVRTimer(cpu, timer0Config); // Listen to Port D's internal callback const portD = new AVRIOPort(cpu, portDConfig); const gpioCallback = jest.spyOn(portD, 'timerOverridePin'); const nopCount = lines.filter((line) => line.bytes == nopOpCode).length; const runner = new TestProgramRunner(cpu); runner.runInstructions(instructionCount - nopCount); expect(cpu.readData(TCNT0)).toEqual(0xfd); expect(gpioCallback).toHaveBeenCalledWith(6, PinOverrideMode.Enable); gpioCallback.mockClear(); runner.runInstructions(1); expect(cpu.readData(TCNT0)).toEqual(0xfe); expect(gpioCallback).toHaveBeenCalledWith(6, PinOverrideMode.Clear); gpioCallback.mockClear(); runner.runInstructions(1); expect(cpu.readData(TCNT0)).toEqual(0xff); expect(gpioCallback).not.toHaveBeenCalled(); runner.runInstructions(1); expect(cpu.readData(TCNT0)).toEqual(0xfe); expect(gpioCallback).toHaveBeenCalledWith(6, PinOverrideMode.Set); }); it('should toggle OC0A when TCNT0=OCR0A and COM0An=1 (issue #78)', () => { const { program, labels } = asmProgram(` LDI r16, 0xfe ; OCR0A = 0xfe; // <- TOP value OUT 0x27, r16 ; Set waveform generation mode (WGM) to PWM, Phase Correct (mode 5) LDI r16, 0x41 ; TCCR0A = (1 << COM0A0) | (1 << WGM00); OUT 0x24, r16 LDI r16, 0x09 ; TCCR0B = (1 << WGM02) | (1 << CS00); OUT 0x25, r16 LDI r16, 0xfd ; TCNT0 = 0xfd; OUT 0x26, r16 beforeMatch: NOP ; TCNT0 will be 0xfe afterMatch: NOP `); const cpu = new CPU(program); new AVRTimer(cpu, timer0Config); // Listen to Port D's internal callback const portD = new AVRIOPort(cpu, portDConfig); const gpioCallback = jest.spyOn(portD, 'timerOverridePin'); const runner = new TestProgramRunner(cpu); runner.runToAddress(labels.beforeMatch); expect(cpu.readData(TCNT0)).toEqual(0xfd); expect(gpioCallback).toHaveBeenCalledWith(6, PinOverrideMode.Enable); gpioCallback.mockClear(); runner.runToAddress(labels.afterMatch); expect(cpu.readData(TCNT0)).toEqual(0xfe); expect(gpioCallback).toHaveBeenCalledWith(6, PinOverrideMode.Toggle); gpioCallback.mockClear(); }); it('should leave OC0A disconnected TCNT0=OCR0A and COM0An=1 in WGM mode 1 (issue #78)', () => { const { program, instructionCount } = asmProgram(` LDI r16, 0xfe ; OCR0A = 0xfe; // <- TOP value OUT 0x27, r16 ; Set waveform generation mode (WGM) to PWM, Phase Correct (mode 1) LDI r16, 0x41 ; TCCR0A = (1 << COM0A0) | (1 << WGM00); OUT 0x24, r16 LDI r16, 0x01 ; TCCR0B = (1 << CS00); OUT 0x25, r16 LDI r16, 0xfd ; TCNT0 = 0xfd; OUT 0x26, r16 `); const cpu = new CPU(program); new AVRTimer(cpu, timer0Config); // Listen to Port D's internal callback const portD = new AVRIOPort(cpu, portDConfig); const gpioCallback = jest.spyOn(portD, 'timerOverridePin'); const runner = new TestProgramRunner(cpu); runner.runInstructions(instructionCount); // Assert that the pin callback wasn't called (thus it's disconnected) expect(gpioCallback).not.toHaveBeenCalled(); }); it('should not miss Compare Match when executing multi-cycle instruction (issue #79)', () => { const { program, instructionCount } = asmProgram(` LDI r16, 0x10 ; OCR0A = 0x10; // <- TOP value OUT 0x27, r16 ; Set waveform generation mode (WGM) to normal, enable OC0A (Set on match) LDI r16, 0xc0 ; TCCR0A = (1 << COM0A1) | (1 << COM0A0); OUT 0x24, r16 LDI r16, 0x1 ; TCCR0B = (1 << CS00); OUT 0x25, r16 LDI r16, 0xf ; TCNT0 = 0xf; OUT 0x26, r16 RJMP 1 ; TCNT0 will be 0x11 (RJMP takes 2 cycles) `); const cpu = new CPU(program); new AVRTimer(cpu, timer0Config); // Listen to Port D's internal callback const portD = new AVRIOPort(cpu, portDConfig); const gpioCallback = jest.spyOn(portD, 'timerOverridePin'); const runner = new TestProgramRunner(cpu); runner.runInstructions(instructionCount); expect(cpu.readData(TCNT0)).toEqual(0x11); expect(gpioCallback).toHaveBeenCalledWith(6, PinOverrideMode.Enable); // Verify that Compare Match has occured and set the OC0A pin (PD6 on ATmega328p) expect(gpioCallback).toHaveBeenCalledWith(6, PinOverrideMode.Set); }); it('should only update OCR0A when TCNT0=TOP in PWM Phase Correct mode (issue #76)', () => { const { program, instructionCount } = asmProgram(` LDI r16, 0x4 ; OCR0A = 0x4; OUT 0x27, r16 ; Set waveform generation mode (WGM) to PWM, Phase Correct LDI r16, 0x01 ; TCCR0A = (1 << WGM00); OUT 0x24, r16 LDI r16, 0x09 ; TCCR0B = (1 << WGM02) | (1 << CS00); OUT 0x25, r16 LDI r16, 0x0 ; TCNT0 = 0x0; OUT 0x26, r16 LDI r16, 0x2 ; OCR0A = 0x2; // TCNT0 should read 0x0 OUT 0x27, r16 ; // TCNT0 should read 0x1 NOP ; // TCNT0 should read 0x2 NOP ; // TCNT0 should read 0x3 IN r17, 0x26 ; R17 = TCNT; // TCNT0 should read 0x4 (that's old OCR0A / TOP) NOP ; // TCNT0 should read 0x3 NOP ; // TCNT0 should read 0x2 NOP ; // TCNT0 should read 0x1 NOP ; // TCNT0 should read 0x0 NOP ; // TCNT0 should read 0x1 NOP ; // TCNT0 should read 0x2 IN r18, 0x26 ; R18 = TCNT; // TCNT0 should read 0x1 `); const cpu = new CPU(program); new AVRTimer(cpu, timer0Config); const runner = new TestProgramRunner(cpu); runner.runInstructions(instructionCount); expect(cpu.readData(R17)).toEqual(0x4); expect(cpu.readData(R18)).toEqual(0x1); }); }); describe('16 bit timers', () => { it('should increment 16-bit TCNT by 1', () => { const cpu = new CPU(new Uint16Array(0x1000)); new AVRTimer(cpu, timer1Config); cpu.writeData(TCNT1H, 0x22); // TCNT1 <- 0x2233 cpu.writeData(TCNT1, 0x33); // ... const timerLow = cpu.readData(TCNT1); const timerHigh = cpu.readData(TCNT1H); expect((timerHigh << 8) | timerLow).toEqual(0x2233); cpu.writeData(TCCR1A, 0x0); // WGM: Normal cpu.writeData(TCCR1B, CS10); // Set prescaler to 1 cpu.cycles = 1; cpu.tick(); cpu.cycles = 2; cpu.tick(); cpu.readData(TCNT1); expect(cpu.dataView.getUint16(TCNT1, true)).toEqual(0x2234); // TCNT1 should increment }); it('should set OCF0A flag when timer equals OCRA (16 bit mode)', () => { const cpu = new CPU(new Uint16Array(0x1000)); new AVRTimer(cpu, timer1Config); cpu.writeData(TCNT1H, 0x10); // TCNT1 <- 0x10ee cpu.writeData(TCNT1, 0xee); // ... cpu.writeData(OCR1AH, 0x10); // OCR1 <- 0x10ef cpu.writeData(OCR1A, 0xef); // ... cpu.writeData(TCCR1A, 0x0); // WGM: Normal cpu.writeData(TCCR1B, CS10); // Set prescaler to 1 cpu.cycles = 1; cpu.tick(); cpu.cycles = 2; cpu.tick(); expect(cpu.data[TIFR1]).toEqual(OCF1A); // TIFR1 should have OCF1A bit on expect(cpu.pc).toEqual(0); expect(cpu.cycles).toEqual(2); }); it('should set OCF1C flag when timer equals OCRC', () => { const cpu = new CPU(new Uint16Array(0x1000)); const OCR1C = 0x8c; const OCR1CH = 0x8d; const OCF1C = 1 << 3; new AVRTimer(cpu, { ...timer1Config, OCRC: OCR1C, OCFC: OCF1C, }); cpu.writeData(TCNT1H, 0); cpu.writeData(TCNT1, 0x10); cpu.writeData(OCR1C, 0x11); cpu.writeData(OCR1CH, 0x11); cpu.writeData(TCCR1A, 0x0); // WGM: (Normal) cpu.writeData(TCCR1B, CS00); // Set prescaler to 1 cpu.cycles = 1; cpu.tick(); cpu.cycles = 2; cpu.tick(); expect(cpu.data[TIFR1]).toEqual(OCF1C); expect(cpu.pc).toEqual(0); expect(cpu.cycles).toEqual(2); }); it('should generate an overflow interrupt if timer overflows and interrupts enabled', () => { const cpu = new CPU(new Uint16Array(0x1000)); new AVRTimer(cpu, timer1Config); cpu.writeData(TCCR1A, 0x3); // TCCR1A <- WGM10 | WGM11 (Fast PWM, 10-bit) cpu.writeData(TCCR1B, 0x9); // TCCR1B <- WGM12 | CS10 cpu.writeData(TIMSK1, 0x1); // TIMSK1: TOIE1 cpu.data[SREG] = 0x80; // SREG: I------- cpu.writeData(TCNT1H, 0x3); // TCNT1 <- 0x3ff cpu.cycles = 1; cpu.tick(); cpu.writeData(TCNT1, 0xff); // ... cpu.cycles++; // This cycle shouldn't be counted cpu.tick(); cpu.cycles++; cpu.tick(); // This is where we cause the overflow cpu.readData(TCNT1); // Refresh TCNT1 expect(cpu.dataView.getUint16(TCNT1, true)).toEqual(2); expect(cpu.data[TIFR1] & TOV1).toEqual(0); expect(cpu.pc).toEqual(0x1a); expect(cpu.cycles).toEqual(5); }); it('should reset the timer once it reaches ICR value in mode 12', () => { const cpu = new CPU(new Uint16Array(0x1000)); new AVRTimer(cpu, timer1Config); cpu.writeData(TCNT1H, 0x50); // TCNT1 <- 0x500f cpu.writeData(TCNT1, 0x0f); // ... cpu.writeData(ICR1H, 0x50); // ICR1 <- 0x5010 cpu.writeData(ICR1, 0x10); // ... cpu.writeData(TCCR1B, WGM13 | WGM12 | CS10); // Set prescaler to 1, WGM: CTC cpu.cycles = 1; cpu.tick(); cpu.cycles = 3; // 2 cycles should increment timer twice, beyond ICR1 cpu.tick(); cpu.readData(TCNT1); // Refresh TCNT1 expect(cpu.dataView.getUint16(TCNT1, true)).toEqual(0); // TCNT should be 0 expect(cpu.data[TIFR1] & TOV1).toEqual(0); expect(cpu.cycles).toEqual(3); }); it('should not update the high byte of TCNT if written after the low byte (issue #37)', () => { const cpu = new CPU(new Uint16Array(0x1000)); new AVRTimer(cpu, timer1Config); cpu.writeData(TCNT1, 0x22); cpu.writeData(TCNT1H, 0x55); cpu.cycles = 1; cpu.tick(); const timerLow = cpu.readData(TCNT1); const timerHigh = cpu.readData(TCNT1H); expect((timerHigh << 8) | timerLow).toEqual(0x22); }); it('reading from TCNT1H before TCNT1L should return old value (issue #37)', () => { const cpu = new CPU(new Uint16Array(0x1000)); new AVRTimer(cpu, timer1Config); cpu.writeData(TCNT1H, 0xff); cpu.writeData(TCNT1, 0xff); cpu.writeData(TCCR1B, WGM12 | CS10); // Set prescaler to 1, WGM: CTC cpu.cycles = 1; cpu.tick(); cpu.cycles = 2; cpu.tick(); // We read the high byte before the low byte, so the high byte should still have // the previous value: const timerHigh = cpu.readData(TCNT1H); const timerLow = cpu.readData(TCNT1); expect((timerHigh << 8) | timerLow).toEqual(0xff00); }); it('should toggle OC1B on Compare Match', () => { const { program, lines, instructionCount } = asmProgram(` ; Set waveform generation mode (WGM) to Normal, top 0xFFFF LDI r16, 0x10 ; TCCR1A = (1 << COM1B0); STS 0x80, r16 LDI r16, 0x1 ; TCCR1B = (1 << CS00); STS 0x81, r16 LDI r16, 0x0 ; OCR1BH = 0x0; STS 0x8B, r16 LDI r16, 0x4a ; OCR1BL = 0x4a; STS 0x8A, r16 LDI r16, 0x0 ; TCNT1H = 0x0; STS 0x85, r16 LDI r16, 0x49 ; TCNT1L = 0x49; STS 0x84, r16 NOP ; TCNT1 will be 0x49 NOP ; TCNT1 will be 0x4a `); const cpu = new CPU(program); new AVRTimer(cpu, timer1Config); // Listen to Port B's internal callback const portB = new AVRIOPort(cpu, portBConfig); const gpioCallback = jest.spyOn(portB, 'timerOverridePin'); const nopCount = lines.filter((line) => line.bytes == nopOpCode).length; const runner = new TestProgramRunner(cpu); runner.runInstructions(instructionCount - nopCount); expect(cpu.readData(TCNT1)).toEqual(0x49); expect(gpioCallback).toHaveBeenCalledWith(2, PinOverrideMode.Enable); gpioCallback.mockClear(); runner.runInstructions(1); expect(cpu.readData(TCNT1)).toEqual(0x4a); expect(gpioCallback).toHaveBeenCalledWith(2, PinOverrideMode.Toggle); }); it('should toggle OC1C on Compare Match', () => { const { program, lines, instructionCount } = asmProgram(` ; Set waveform generation mode (WGM) to Normal, top 0xFFFF LDI r16, 0x04 ; TCCR1A = (1 << COM1C0); STS ${TCCR1A}, r16 LDI r16, 0x1 ; TCCR1B = (1 << CS00); STS ${TCCR1B}, r16 LDI r16, 0x0 ; OCR1CH = 0x0; STS ${OCR1CH}, r16 LDI r16, 0x4a ; OCR1C = 0x4a; STS ${OCR1C}, r16 LDI r16, 0x0 ; TCNT1H = 0x0; STS ${TCNT1H}, r16 LDI r16, 0x49 ; TCNT1 = 0x49; STS ${TCNT1}, r16 NOP ; TCNT1 will be 0x49 NOP ; TCNT1 will be 0x4a `); const cpu = new CPU(program); new AVRTimer(cpu, { ...timer1Config, OCRC: OCR1C, OCFC: OCF1C, compPortC: portBConfig.PORT, compPinC: 3, }); // Listen to Port B's internal callback const portB = new AVRIOPort(cpu, portBConfig); const gpioCallback = jest.spyOn(portB, 'timerOverridePin'); const nopCount = lines.filter((line) => line.bytes == nopOpCode).length; const runner = new TestProgramRunner(cpu); runner.runInstructions(instructionCount - nopCount); expect(cpu.readData(TCNT1)).toEqual(0x49); expect(gpioCallback).toHaveBeenCalledWith(3, PinOverrideMode.Enable); gpioCallback.mockClear(); runner.runInstructions(1); expect(cpu.readData(TCNT1)).toEqual(0x4a); expect(gpioCallback).toHaveBeenCalledWith(3, PinOverrideMode.Toggle); }); it('should toggle OC1C on when writing 1 to FOC1C', () => { const cpu = new CPU(new Uint16Array(0x1000)); new AVRTimer(cpu, { ...timer1Config, OCRC: OCR1C, OCFC: OCF1C, compPortC: portBConfig.PORT, compPinC: 3, }); cpu.writeData(TCCR1A, COM1C0); // Listen to Port B's internal callback const portB = new AVRIOPort(cpu, portBConfig); const gpioCallback = jest.spyOn(portB, 'timerOverridePin'); cpu.writeData(TCCR1C, FOC1C); expect(gpioCallback).toHaveBeenCalledWith(3, PinOverrideMode.Toggle); }); it('should not toggle OC1C on when writing 1 to FOC1C in PWM mode', () => { const cpu = new CPU(new Uint16Array(0x1000)); new AVRTimer(cpu, { ...timer1Config, OCRC: OCR1C, OCFC: OCF1C, compPortC: portBConfig.PORT, compPinC: 3, }); cpu.writeData(TCCR1A, COM1C0 | WGM11); // Listen to Port B's internal callback const portB = new AVRIOPort(cpu, portBConfig); const gpioCallback = jest.spyOn(portB, 'timerOverridePin'); cpu.writeData(TCCR1C, FOC1C); expect(gpioCallback).not.toHaveBeenCalled(); }); it('should only update OCR1A when TCNT1=BOTTOM in PWM Phase/Frequency Correct mode (issue #76)', () => { const { program, instructionCount } = asmProgram(` LDI r16, 0x0 ; OCR1AH = 0x0; STS 0x89, r16 LDI r16, 0x4 ; OCR1AL = 0x4; STS 0x88, r16 ; Set waveform generation mode (WGM) to PWM Phase/Frequency Correct mode (9) LDI r16, 0x01 ; TCCR1A = (1 << WGM10); STS 0x80, r16 LDI r16, 0x11 ; TCCR1B = (1 << WGM13) | (1 << CS00); STS 0x81, r16 LDI r16, 0x0 ; TCNT1H = 0x0; STS 0x85, r16 LDI r16, 0x0 ; TCNT1L = 0x0; STS 0x84, r16 LDI r16, 0x8 ; OCR1AL = 0x8; // TCNT1 should read 0x0 STS 0x88, r16 ; // TCNT1 should read 0x2 (going up) LDS r17, 0x84 ; // TCNT1 should read 0x4 (going down) LDS r18, 0x84 ; // TCNT1 should read 0x2 (going down) NOP ; // TCNT1 should read 0x0 (going up) NOP ; // TCNT1 should read 0x1 (going up) NOP ; // TCNT1 should read 0x2 (going up) NOP ; // TCNT1 should read 0x3 (going up) NOP ; // TCNT1 should read 0x4 (going up) NOP ; // TCNT1 should read 0x5 (going up) LDS r19, 0x84 ; // TCNT1 should read 0x6 (going up) NOP ; // TCNT1 should read 0x8 (going up) LDS r20, 0x84 ; // TCNT1 should read 0x7 (going up) `); const cpu = new CPU(program); new AVRTimer(cpu, timer1Config); const runner = new TestProgramRunner(cpu); runner.runInstructions(instructionCount); expect(cpu.readData(R17)).toEqual(0x4); expect(cpu.readData(R18)).toEqual(0x2); expect(cpu.readData(R19)).toEqual(0x6); expect(cpu.readData(R20)).toEqual(0x7); }); }); describe('External clock', () => { it('should count on the falling edge of T0 when CS=110', () => { const cpu = new CPU(new Uint16Array(0x1000)); const port = new AVRIOPort(cpu, portDConfig); new AVRTimer(cpu, timer0Config); cpu.writeData(TCCR0B, CS02 | CS01); // Count on falling edge cpu.cycles = 1; cpu.tick(); port.setPin(T0, true); // Rising edge cpu.cycles = 2; cpu.tick(); expect(cpu.readData(TCNT0)).toEqual(0); port.setPin(T0, false); // Falling edge cpu.cycles = 3; cpu.tick(); expect(cpu.readData(TCNT0)).toEqual(1); }); it('should count on the rising edge of T0 when CS=111', () => { const cpu = new CPU(new Uint16Array(0x1000)); const port = new AVRIOPort(cpu, portDConfig); new AVRTimer(cpu, timer0Config); cpu.writeData(TCCR0B, CS02 | CS01 | CS00); // Count on rising edge cpu.cycles = 1; cpu.tick(); port.setPin(T0, true); // Rising edge cpu.cycles = 2; cpu.tick(); expect(cpu.readData(TCNT0)).toEqual(1); port.setPin(T0, false); // Falling edge cpu.cycles = 3; cpu.tick(); expect(cpu.readData(TCNT0)).toEqual(1); }); }); });
the_stack
import {} from "mocha"; import { expect } from "chai"; import supertest from "supertest"; import express from "express"; import _ from "lodash"; import casual from "casual"; import moment from "moment"; import { buildDataset, buildNDatasets } from "../utils/builders"; import { Dataset, SearchResult, PeriodOfTime } from "../../model"; export default function testFilterByDate( app: () => express.Application, buildDatasetIndex: (datasets: Dataset[]) => Promise<void> ) { describe("by date", () => { let datesByMonth: { datasets: Dataset[]; earliest: moment.Moment; latest: moment.Moment; }[]; describe("for datasets with a closed date range", async () => { before(async () => { datesByMonth = _.range(0, 12).map((month) => { const monthMoment = moment().utc().year(2019).month(month); const earliest = monthMoment.clone().startOf("month"); const latest = monthMoment.clone().endOf("month"); return { datasets: _.range(1, 3).map(() => { const start = moment .unix( casual.integer( earliest.unix(), latest .clone() .subtract(1, "days") .unix() ) ) .utc(); const end = moment .unix( casual.integer(start.unix(), latest.unix()) ) .utc(); return buildDataset({ temporal: { start: { date: start.toISOString(), text: start.toString() }, end: { date: end.toISOString(), text: end.toString() } } as PeriodOfTime }); }), earliest, latest }; }); await buildDatasetIndex( _.flatMap(datesByMonth, ({ datasets }) => datasets) ); }); it("should only return results between dateTo and dateFrom if both are present", async () => { for (let { datasets, earliest, latest } of datesByMonth) { // console.log( // `/datasets?dateFrom=${earliest.format( // "YYYY-MM-DD" // )}&dateTo=${latest.format("YYYY-MM-DD")}` // ); await supertest(app()) .get( `/datasets?dateFrom=${earliest.toISOString()}&dateTo=${latest.toISOString()}` ) .expect(200) .expect((res) => { const body: SearchResult = res.body; const identifiers = body.dataSets.map( (dataset) => dataset.identifier ); expect(identifiers).to.have.same.members( datasets.map((ds) => ds.identifier) ); }); } }); it("should only return results after dateFrom if dateTo is not also present", async () => { for (let i = 0; i < datesByMonth.length; i++) { const { earliest } = datesByMonth[i]; const expectedIdentifiers = _(datesByMonth) .drop(i) .flatMap(({ datasets }) => datasets) .map((ds) => ds.identifier) .value(); await supertest(app()) .get( `/datasets?dateFrom=${earliest.toISOString()}&limit=${ expectedIdentifiers.length + 1 }` ) .expect(200) .expect((res) => { const body: SearchResult = res.body; expect( body.dataSets.map((ds) => ds.identifier) ).to.have.same.members(expectedIdentifiers); }); } }); it("should only return results before dateTo if dateFrom is not also present", async () => { for (let i = 0; i < datesByMonth.length; i++) { const { latest } = datesByMonth[i]; const expectedIdentifiers = _(datesByMonth) .take(i + 1) .flatMap(({ datasets }) => datasets) .map((ds) => ds.identifier) .value(); await supertest(app()) .get( `/datasets?dateTo=${latest.toISOString()}&limit=${ expectedIdentifiers.length + 1 }` ) .expect(200) .expect((res) => { const body: SearchResult = res.body; expect( body.dataSets.map((ds) => ds.identifier) ).to.have.same.members(expectedIdentifiers); }); } }); }); it("datasets should be retrievable by querying by their date", async () => { const datasets = buildNDatasets(100); await buildDatasetIndex(datasets); for (let dataset of datasets) { if ( dataset.temporal && (dataset.temporal.end || dataset.temporal.start) ) { const url = `/datasets?${ dataset.temporal.end?.date ? "&dateTo=" + encodeURIComponent( moment( dataset.temporal.end.date ).toISOString() ) : "" }${ dataset.temporal.start && dataset.temporal.start.date ? "&dateFrom=" + encodeURIComponent( moment( dataset.temporal.start.date ).toISOString() ) : "" }&limit=${datasets.length}`; await supertest(app()) .get(url) .expect(200) .expect((res) => { const body: SearchResult = res.body; expect( body.dataSets.map((ds) => ds.identifier) ).to.contain(dataset.identifier); }); } } }); describe("should understand dates in format:", () => { const testFormat = ( format: string, unit: moment.DurationInputArg2 ) => { it(format, async () => { const startDate = moment(casual.date(format), format); const datasets = [ buildDataset({ temporal: { end: { date: startDate .clone() .subtract(1, unit) .subtract(1, "ms") .toISOString() } } }), buildDataset({ temporal: { start: { date: startDate.toISOString() }, end: { date: startDate.toISOString() } } }), buildDataset({ temporal: { start: { date: startDate .clone() .add(1, unit) .add(1, "ms") .toISOString() } } }) ]; await buildDatasetIndex(datasets); await supertest(app()) .get( `/datasets?dateFrom=${startDate.format( format )}&dateTo=${startDate.format(format)}` ) .expect(200) .expect((res) => { const body: SearchResult = res.body; expect(body.dataSets.length).to.equal(1); expect(body.dataSets[0].identifier).to.equal( datasets[1].identifier ); }); }); }; testFormat("YY", "year"); testFormat("YYYY", "year"); testFormat("YYYY-MM", "month"); testFormat("YYYY-MM-DD", "day"); testFormat("YYYY-MM-DDTHH:mm", "minute"); testFormat("YYYY-MM-DDTHH:mm:ss", "second"); }); }); }
the_stack
* @module MapLayers */ import { BeEvent } from "@itwin/core-bentley"; import { Cartographic, ImageMapLayerSettings, ImageSource, ImageSourceFormat } from "@itwin/core-common"; import { IModelApp } from "../../IModelApp"; import { NotifyMessageDetails, OutputMessagePriority } from "../../NotificationManager"; import { getJson, request, RequestBasicCredentials, RequestOptions, Response } from "../../request/Request"; import { ScreenViewport } from "../../Viewport"; import { GeographicTilingScheme, ImageryMapTile, ImageryMapTileTree, MapCartoRectangle, MapLayerFeatureInfo, MapTilingScheme, QuadId, WebMercatorTilingScheme } from "../internal"; const tileImageSize = 256, untiledImageSize = 256; const doDebugToolTips = false; /** @internal */ export enum MapLayerImageryProviderStatus { Valid, RequireAuth, } /** Base class for map layer imagery providers. * @internal */ export abstract class MapLayerImageryProvider { protected _hasSuccessfullyFetchedTile = false; public status: MapLayerImageryProviderStatus = MapLayerImageryProviderStatus.Valid; public readonly onStatusChanged = new BeEvent<(provider: MapLayerImageryProvider) => void>(); private readonly _mercatorTilingScheme = new WebMercatorTilingScheme(); private readonly _geographicTilingScheme = new GeographicTilingScheme(); public get tileSize(): number { return this._usesCachedTiles ? tileImageSize : untiledImageSize; } public get maximumScreenSize() { return 2 * this.tileSize; } public get minimumZoomLevel(): number { return 0; } public get maximumZoomLevel(): number { return 22; } public get usesCachedTiles() { return this._usesCachedTiles; } public get mutualExclusiveSubLayer(): boolean { return false; } public get useGeographicTilingScheme() { return false;} public cartoRange?: MapCartoRectangle; protected get _filterByCartoRange() { return true; } constructor(protected readonly _settings: ImageMapLayerSettings, protected _usesCachedTiles: boolean) { this._mercatorTilingScheme = new WebMercatorTilingScheme(); this._geographicTilingScheme = new GeographicTilingScheme(2, 1, true); } public async initialize(): Promise<void> { this.loadTile(0, 0, 22).then((tileData: ImageSource | undefined) => { // eslint-disable-line @typescript-eslint/no-floating-promises if (tileData !== undefined) this._missingTileData = tileData.data as Uint8Array; }); } public abstract constructUrl(row: number, column: number, zoomLevel: number): Promise<string>; public get tilingScheme(): MapTilingScheme { return this.useGeographicTilingScheme ? this._geographicTilingScheme : this._mercatorTilingScheme; } public addLogoCards(_cards: HTMLTableElement, _viewport: ScreenViewport): void { } protected _missingTileData?: Uint8Array; public get transparentBackgroundString(): string { return this._settings.transparentBackground ? "true" : "false"; } protected async _areChildrenAvailable(_tile: ImageryMapTile): Promise<boolean> { return true; } public getPotentialChildIds(tile: ImageryMapTile): QuadId[] { const childLevel = tile.quadId.level + 1; return tile.quadId.getChildIds(this.tilingScheme.getNumberOfXChildrenAtLevel(childLevel), this.tilingScheme.getNumberOfYChildrenAtLevel(childLevel)); } protected _generateChildIds(tile: ImageryMapTile, resolveChildren: (childIds: QuadId[]) => void) { resolveChildren(this.getPotentialChildIds(tile)); } public generateChildIds(tile: ImageryMapTile, resolveChildren: (childIds: QuadId[]) => void) { if (tile.depth >= this.maximumZoomLevel || (undefined !== this.cartoRange && this._filterByCartoRange && !this.cartoRange.intersectsRange(tile.rectangle))) { tile.setLeaf(); return; } this._generateChildIds(tile, resolveChildren); } public async getToolTip(strings: string[], quadId: QuadId, _carto: Cartographic, tree: ImageryMapTileTree): Promise<void> { if (doDebugToolTips) { const range = quadId.getLatLongRange(tree.tilingScheme); strings.push(`QuadId: ${quadId.debugString}, Lat: ${range.low.x} - ${range.high.x} Long: ${range.low.y} - ${range.high.y}`); } } public async getFeatureInfo(featureInfos: MapLayerFeatureInfo[], _quadId: QuadId, _carto: Cartographic, _tree: ImageryMapTileTree): Promise<void> { // default implementation; simply return an empty feature info featureInfos.push({layerName: this._settings.name}); } protected getRequestAuthorization(): RequestBasicCredentials | undefined { return (this._settings.userName && this._settings.password) ? { user: this._settings.userName, password: this._settings.password } : undefined; } protected getImageFromTileResponse(tileResponse: Response, zoomLevel: number) { const byteArray: Uint8Array = new Uint8Array(tileResponse.body); if (!byteArray || (byteArray.length === 0)) return undefined; if (this.matchesMissingTile(byteArray) && zoomLevel > 8) return undefined; let imageFormat: ImageSourceFormat; switch (tileResponse.header["content-type"]) { case "image/jpeg": imageFormat = ImageSourceFormat.Jpeg; break; case "image/png": imageFormat = ImageSourceFormat.Png; break; default: return undefined; } return new ImageSource(byteArray, imageFormat); } public setStatus(status: MapLayerImageryProviderStatus) { if (this.status !== status) { this.status = status; this.onStatusChanged.raiseEvent(this); } } public async makeTileRequest(url: string) { const tileRequestOptions: RequestOptions = { method: "GET", responseType: "arraybuffer" }; tileRequestOptions.auth = this.getRequestAuthorization(); return request(url, tileRequestOptions); } public async loadTile(row: number, column: number, zoomLevel: number): Promise<ImageSource | undefined> { try { const tileUrl: string = await this.constructUrl(row, column, zoomLevel); if (tileUrl.length === 0) return undefined; const tileResponse: Response = await this.makeTileRequest(tileUrl); if (!this._hasSuccessfullyFetchedTile) { this._hasSuccessfullyFetchedTile = true; } return this.getImageFromTileResponse(tileResponse, zoomLevel); } catch (error: any) { if (error?.status === 401) { this.setStatus(MapLayerImageryProviderStatus.RequireAuth); // Only report error to end-user if we were previously able to fetch tiles // and then encountered an error, otherwise I assume an error was already reported // through the source validation process. if (this._hasSuccessfullyFetchedTile) { const msg = IModelApp.localization.getLocalizedString("iModelJs:MapLayers.Messages.LoadTileTokenError", { layerName: this._settings.name }); IModelApp.notifications.outputMessage(new NotifyMessageDetails(OutputMessagePriority.Warning, msg)); } } return undefined; } } protected async toolTipFromUrl(strings: string[], url: string): Promise<void> { const requestOptions: RequestOptions = { method: "GET", responseType: "text", auth: this.getRequestAuthorization(), }; // spell-checker: disable-line try { const response: Response = await request(url, requestOptions); if (undefined !== response.text) { strings.push(response.text); } } catch { } } protected async toolTipFromJsonUrl(_strings: string[], url: string): Promise<void> { try { const json = await getJson(url); if (undefined !== json) { } } catch { } } public matchesMissingTile(tileData: Uint8Array): boolean { if (!this._missingTileData) return false; if (tileData.length !== this._missingTileData.length) return false; for (let i: number = 0; i < tileData.length; i += 10) { if (this._missingTileData[i] !== tileData[i]) { return false; } } return true; } // calculates the projected x cartesian coordinate in EPSG:3857from the longitude in EPSG:4326 (WGS84) public getEPSG3857X(longitude: number): number { return longitude * 20037508.34 / 180.0; } // calculates the projected y cartesian coordinate in EPSG:3857from the latitude in EPSG:4326 (WGS84) public getEPSG3857Y(latitude: number): number { const y = Math.log(Math.tan((90.0 + latitude) * Math.PI / 360.0)) / (Math.PI / 180.0); return y * 20037508.34 / 180.0; } // Map tile providers like Bing and Mapbox allow the URL to be constructed directory from the zoom level and tile coordinates. // However, WMS-based servers take a bounding box instead. This method can help get that bounding box from a tile. public getEPSG4326Extent(row: number, column: number, zoomLevel: number): { longitudeLeft: number, longitudeRight: number, latitudeTop: number, latitudeBottom: number } { const mapSize = 256 << zoomLevel; const leftGrid = 256 * column; const topGrid = 256 * row; const longitudeLeft = 360 * ((leftGrid / mapSize) - 0.5); const y0 = 0.5 - ((topGrid + 256) / mapSize); const latitudeBottom = 90.0 - 360.0 * Math.atan(Math.exp(-y0 * 2 * Math.PI)) / Math.PI; const longitudeRight = 360 * (((leftGrid + 256) / mapSize) - 0.5); const y1 = 0.5 - (topGrid / mapSize); const latitudeTop = 90.0 - 360.0 * Math.atan(Math.exp(-y1 * 2 * Math.PI)) / Math.PI; return { longitudeLeft, longitudeRight, latitudeTop, latitudeBottom }; } public getEPSG3857Extent(row: number, column: number, zoomLevel: number): { left: number, right: number, top: number, bottom: number } { const epsg4326Extent = this.getEPSG4326Extent(row, column, zoomLevel); const left = this.getEPSG3857X(epsg4326Extent.longitudeLeft); const right = this.getEPSG3857X(epsg4326Extent.longitudeRight); const bottom = this.getEPSG3857Y(epsg4326Extent.latitudeBottom); const top = this.getEPSG3857Y(epsg4326Extent.latitudeTop); return { left, right, bottom, top }; } public getEPSG3857ExtentString(row: number, column: number, zoomLevel: number) { const tileExtent = this.getEPSG3857Extent(row, column, zoomLevel); return `${tileExtent.left.toFixed(2)},${tileExtent.bottom.toFixed(2)},${tileExtent.right.toFixed(2)},${tileExtent.top.toFixed(2)}`; } public getEPSG4326ExtentString(row: number, column: number, zoomLevel: number, latLongAxisOrdering: boolean) { const tileExtent = this.getEPSG4326Extent(row, column, zoomLevel); if (latLongAxisOrdering) { return `${tileExtent.latitudeBottom.toFixed(8)},${tileExtent.longitudeLeft.toFixed(8)}, ${tileExtent.latitudeTop.toFixed(8)},${tileExtent.longitudeRight.toFixed(8)}`; } else { return `${tileExtent.longitudeLeft.toFixed(8)},${tileExtent.latitudeBottom.toFixed(8)}, ${tileExtent.longitudeRight.toFixed(8)},${tileExtent.latitudeTop.toFixed(8)}`; } } }
the_stack
export const servers = { cleanup: [ { action: "DeleteDeliveryServices", route: "/deliveryservices", method: "delete", data: [ { route: "/deliveryservices/", getRequest: [ { route: "/deliveryservices", queryKey: "xmlId", queryValue: "servertds1", replace: "route" } ] }, { route: "/deliveryservices/", getRequest: [ { route: "/deliveryservices", queryKey: "xmlId", queryValue: "servertdsop1", replace: "route" } ] } ] }, { action: "DeleteServerCapabilities", route: "/server_capabilities", method: "delete", data: [ { route: "/server_capabilities?name=servertestcap1" }, { route: "/server_capabilities?name=servertestcapop1" } ] }, { action: "DeleteServers", route: "/servers", method: "delete", data: [ { route: "/servers/", getRequest: [ { route: "/servers", queryKey: "hostName", queryValue: "servertestcreate2", replace: "route" } ] }, { route: "/servers/", getRequest: [ { route: "/servers", queryKey: "hostName", queryValue: "servertestcreateop2", replace: "route" } ] }, { route: "/servers/", getRequest: [ { route: "/servers", queryKey: "hostName", queryValue: "servertestremove3", replace: "route" } ] }, { route: "/servers/", getRequest: [ { route: "/servers", queryKey: "hostName", queryValue: "servertestremoveop3", replace: "route" } ] } ] }, { action: "DeleteProfile", route: "/profiles", method: "delete", data: [ { route: "/profiles/", getRequest: [ { route: "/profiles", queryKey: "name", queryValue: "servertestprofiles1", replace: "route" } ] } ] }, { action: "DeletePhysLocations", route: "/phys_locations", method: "delete", data: [ { route: "/phys_locations/", getRequest: [ { route: "/phys_locations", queryKey: "name", queryValue: "TPPhysLocation2", replace: "route" } ] } ] }, { action: "DeleteRegions", route: "/regions", method: "delete", data: [ { route: "/regions?name=PhysTest" }, { route: "/regions?name=PhysTest2" } ] }, { action: "DeleteDivisions", route: "/divisions", method: "delete", data: [ { route: "/divisions/", getRequest: [ { route: "/divisions", queryKey: "name", queryValue: "PhysTest", replace: "route" } ] } ] }, { action: "DeleteCDN", route: "/cdns", method: "delete", data: [ { route: "/cdns/", getRequest: [ { route: "/cdns", queryKey: "name", queryValue: "servertestcdn1", replace: "route" } ] } ] } ], setup: [ { action: "CreateDivisions", route: "/divisions", method: "post", data: [ { name: "PhysTest" } ] }, { action: "CreateRegions", route: "/regions", method: "post", data: [ { name: "PhysTest", division: "4", divisionName: "PhysTest", getRequest: [ { route: "/divisions", queryKey: "name", queryValue: "PhysTest", replace: "division" } ] }, { name: "PhysTest2", division: "4", divisionName: "PhysTest", getRequest: [ { route: "/divisions", queryKey: "name", queryValue: "PhysTest", replace: "division" } ] } ] }, { action: "CreatePhysLocation", route: "/phys_locations", method: "post", data: [ { address: "Buckingham Palace", city: "London", comments: "Buckingham Palace", email: "steve.kingstone@royal.gsx.gov.uk", name: "TPPhysLocation2", phone: "0-843-816-6276", poc: "Her Majesty The Queen Elizabeth Alexandra Mary Windsor II", regionId: 3, shortName: "tpphys2", state: "NA", zip: "99999", getRequest: [ { route: "/regions", queryKey: "name", queryValue: "PhysTest", replace: "regionId" } ] } ] }, { action: "CreateServers", route: "/servers", method: "post", data: [ { cachegroupId: 0, cdnId: 0, domainName: "test.net", hostName: "servertestremove2", httpsPort: 443, iloIpAddress: "", iloIpGateway: "", iloIpNetmask: "", iloPassword: "", iloUsername: "", interfaces: [ { ipAddresses: [ { address: "::1", gateway: "::2", serviceAddress: true } ], maxBandwidth: null, monitor: true, mtu: 1500, name: "eth0" } ], interfaceMtu: 1500, interfaceName: "eth0", ip6Address: "::1", ip6Gateway: "::2", ipAddress: "0.0.0.1", ipGateway: "0.0.0.2", ipNetmask: "255.255.255.0", mgmtIpAddress: "", mgmtIpGateway: "", mgmtIpNetmask: "", offlineReason: "", physLocationId: 0, profileId: 0, routerHostName: "", routerPortName: "", statusId: 3, tcpPort: 80, typeId: 12, updPending: false, getRequest: [ { route: "/phys_locations", queryKey: "name", queryValue: "TPPhysLocation2", replace: "physLocationId" }, { route: "/cdns", queryKey: "name", queryValue: "dummycdn", replace: "cdnId" }, { route: "/cachegroups", queryKey: "name", queryValue: "testCG", replace: "cachegroupId" }, { route: "/profiles", queryKey: "name", queryValue: "testProfile", replace: "profileId" } ] }, { cachegroupId: 0, cdnId: 0, domainName: "test.net", hostName: "servertestremoveop2", httpsPort: 443, iloIpAddress: "", iloIpGateway: "", iloIpNetmask: "", iloPassword: "", iloUsername: "", interfaces: [ { ipAddresses: [ { address: "::1", gateway: "::2", serviceAddress: true } ], maxBandwidth: null, monitor: true, mtu: 1500, name: "eth0" } ], interfaceMtu: 1500, interfaceName: "eth0", ip6Address: "::1", ip6Gateway: "::2", ipAddress: "0.0.0.1", ipGateway: "0.0.0.2", ipNetmask: "255.255.255.0", mgmtIpAddress: "", mgmtIpGateway: "", mgmtIpNetmask: "", offlineReason: "", physLocationId: 0, profileId: 0, routerHostName: "", routerPortName: "", statusId: 3, tcpPort: 80, typeId: 12, updPending: false, getRequest: [ { route: "/phys_locations", queryKey: "name", queryValue: "TPPhysLocation2", replace: "physLocationId" }, { route: "/cdns", queryKey: "name", queryValue: "dummycdn", replace: "cdnId" }, { route: "/cachegroups", queryKey: "name", queryValue: "testCG", replace: "cachegroupId" }, { route: "/profiles", queryKey: "name", queryValue: "testProfile", replace: "profileId" } ] }, { cachegroupId: 0, cdnId: 0, domainName: "test.net", hostName: "servertestremove3", httpsPort: 443, iloIpAddress: "", iloIpGateway: "", iloIpNetmask: "", iloPassword: "", iloUsername: "", interfaces: [ { ipAddresses: [ { address: "::1", gateway: "::2", serviceAddress: true } ], maxBandwidth: null, monitor: true, mtu: 1500, name: "eth0" } ], interfaceMtu: 1500, interfaceName: "eth0", ip6Address: "::1", ip6Gateway: "::2", ipAddress: "0.0.0.1", ipGateway: "0.0.0.2", ipNetmask: "255.255.255.0", mgmtIpAddress: "", mgmtIpGateway: "", mgmtIpNetmask: "", offlineReason: "", physLocationId: 0, profileId: 0, routerHostName: "", routerPortName: "", statusId: 3, tcpPort: 80, typeId: 11, updPending: false, getRequest: [ { route: "/phys_locations", queryKey: "name", queryValue: "TPPhysLocation2", replace: "physLocationId" }, { route: "/cdns", queryKey: "name", queryValue: "dummycdn", replace: "cdnId" }, { route: "/cachegroups", queryKey: "name", queryValue: "testCG", replace: "cachegroupId" }, { route: "/profiles", queryKey: "name", queryValue: "testProfile", replace: "profileId" } ] }, { cachegroupId: 0, cdnId: 0, domainName: "test.net", hostName: "servertestremoveop3", httpsPort: 443, iloIpAddress: "", iloIpGateway: "", iloIpNetmask: "", iloPassword: "", iloUsername: "", interfaces: [ { ipAddresses: [ { address: "::1", gateway: "::2", serviceAddress: true } ], maxBandwidth: null, monitor: true, mtu: 1500, name: "eth0" } ], interfaceMtu: 1500, interfaceName: "eth0", ip6Address: "::1", ip6Gateway: "::2", ipAddress: "0.0.0.1", ipGateway: "0.0.0.2", ipNetmask: "255.255.255.0", mgmtIpAddress: "", mgmtIpGateway: "", mgmtIpNetmask: "", offlineReason: "", physLocationId: 0, profileId: 0, routerHostName: "", routerPortName: "", statusId: 3, tcpPort: 80, typeId: 11, updPending: false, getRequest: [ { route: "/phys_locations", queryKey: "name", queryValue: "TPPhysLocation2", replace: "physLocationId" }, { route: "/cdns", queryKey: "name", queryValue: "dummycdn", replace: "cdnId" }, { route: "/cachegroups", queryKey: "name", queryValue: "testCG", replace: "cachegroupId" }, { route: "/profiles", queryKey: "name", queryValue: "testProfile", replace: "profileId" } ] } ] }, { action: "CreateServerCapabilities", route: "/server_capabilities", method: "post", data: [ { name: "servertestcap1" }, { name: "servertestcapop1" } ] }, { action: "CreateServerServerCapabilities", route: "/server_server_capabilities", method: "post", data: [ { serverId: 0, serverCapability: "servertestcap1", getRequest: [ { route: "/servers", queryKey: "hostName", queryValue: "servertestremove2", replace: "serverId" } ] }, { serverId: 0, serverCapability: "servertestcapop1", getRequest: [ { route: "/servers", queryKey: "hostName", queryValue: "servertestremoveop2", replace: "serverId" } ] } ] }, { action: "CreateDeliveryServices", route: "/deliveryservices", method: "post", data: [ { active: true, cdnId: 0, displayName: "servertestds1", dscp: 0, geoLimit: 0, geoProvider: 0, initialDispersion: 1, ipv6RoutingEnabled: true, logsEnabled: false, missLat: 41.881944, missLong: -87.627778, multiSiteOrigin: false, orgServerFqdn: "http://origin.infra.ciab.test", protocol: 0, qstringIgnore: 0, rangeRequestHandling: 0, regionalGeoBlocking: false, tenantId: 0, typeId: 1, xmlId: "servertds1", getRequest: [ { route: "/tenants", queryKey: "name", queryValue: "tenantSame", replace: "tenantId" }, { route: "/cdns", queryKey: "name", queryValue: "dummycdn", replace: "cdnId" } ] }, { active: true, cdnId: 0, displayName: "servertestdsop1", dscp: 0, geoLimit: 0, geoProvider: 0, initialDispersion: 1, ipv6RoutingEnabled: true, logsEnabled: false, missLat: 41.881944, missLong: -87.627778, multiSiteOrigin: false, orgServerFqdn: "http://origin.infra.ciab.test", protocol: 0, qstringIgnore: 0, rangeRequestHandling: 0, regionalGeoBlocking: false, tenantId: 0, typeId: 1, xmlId: "servertdsop1", getRequest: [ { route: "/tenants", queryKey: "name", queryValue: "tenantSame", replace: "tenantId" }, { route: "/cdns", queryKey: "name", queryValue: "dummycdn", replace: "cdnId" } ] } ] }, { action: "CreateDeliveryServiceServer", route: "/deliveryserviceserver", method: "post", data: [ { dsId: 120, replace: true, servers: [], getRequest: [ { route: "/servers", queryKey: "hostName", queryValue: "servertestremove3", replace: "servers", isArray: true }, { route: "/deliveryservices", queryKey: "xmlId", queryValue: "servertds1", replace: "dsId" } ] }, { dsId: 120, replace: true, servers: [], getRequest: [ { route: "/servers", queryKey: "hostName", queryValue: "servertestremoveop3", replace: "servers", isArray: true }, { route: "/deliveryservices", queryKey: "xmlId", queryValue: "servertdsop1", replace: "dsId" } ] } ] }, { action: "CreateCDN", route: "/cdns", method: "post", data: [ { name: "servertestcdn1", domainName: "svtestcdn1", dnssecEnabled: false } ] }, { action: "CreateProfile", route: "/profiles", method: "post", data: [ { name: "servertestprofiles1", description: "A test profile for API examples", cdn: 2, type: "UNK_PROFILE", routingDisabled: true, getRequest: [ { route: "/cdns", queryKey: "name", queryValue: "servertestcdn1", replace: "cdn" } ] } ] } ], tests: [ { logins: [ { description: "Admin Role", username: "TPAdmin", password: "pa$$word" } ], toggle:[ { description: "hide first table column", Name: "Cache Group" }, { description: "redisplay first table column", Name: "Cache Group" } ], add: [ { description: "create a Server", Status: "ONLINE", Hostname: "servertestcreate1", Domainname: "test.com", CDN: "dummycdn", CacheGroup: "testCG", Type: "EDGE", Profile: "testProfile", PhysLocation: "TPPhysLocation2", InterfaceName: "test", validationMessage: "Server created" }, { description: "create multiple Server", Status: "ONLINE", Hostname: "servertestcreate2", Domainname: "test.com", CDN: "dummycdn", CacheGroup: "testCG", Type: "EDGE", Profile: "testProfile", PhysLocation: "TPPhysLocation2", InterfaceName: "test", validationMessage: "Server created" } ], update: [ { description: "Validate cannot change the cdn of a Server when it is currently assigned to a Delivery Services in different CDN", Name: "servertestremove3", CDN: "servertestcdn1", Profile: "servertestprofiles1", validationMessage: "server cdn can not be updated when it is currently assigned to delivery services" }, { description: "change the cdn of a Server when it is currently not assign to any delivery service", Name: "servertestcreate1", CDN: "servertestcdn1", Profile: "servertestprofiles1", validationMessage: "Server updated" } ], remove: [ { description: "delete a Server", Name: "servertestcreate1", validationMessage: "Server deleted" }, { description: "delete a Server with Server Capabilities assigned", Name: "servertestremove2", validationMessage: "Server deleted" } ] }, { logins: [ { description: "ReadOnly Role", username: "TPReadOnly", password: "pa$$word" } ], toggle:[ { description: "hide first table column", Name: "Cache Group" }, { description: "redisplay first table column", Name: "Cache Group" } ], add: [ { description: "create a Server", Status: "ONLINE", Hostname: "servertcreatero", Domainname: "test.com", CDN: "dummycdn", CacheGroup: "testCG", Type: "EDGE", Profile: "testProfile", PhysLocation: "TPPhysLocation2", InterfaceName: "test", validationMessage: "Forbidden." } ], update: [ { description: "change the cdn of a Server", Name: "servertestcreate2", CDN: "servertestcdn1", Profile: "servertestprofiles1", validationMessage: "Forbidden." } ], remove: [ { description: "delete a Server", Name: "servertestcreate2", validationMessage: "Forbidden." } ] }, { logins: [ { description: "Operator Role", username: "TPOperator", password: "pa$$word" } ], toggle:[ { description: "hide first table column", Name: "Cache Group" }, { description: "redisplay first table column", Name: "Cache Group" } ], add: [ { description: "create a Server", Status: "ONLINE", Hostname: "servertestcreateop1", Domainname: "test.com", CDN: "dummycdn", CacheGroup: "testCG", Type: "EDGE", Profile: "testProfile", PhysLocation: "TPPhysLocation2", InterfaceName: "test", validationMessage: "Server created" }, { description: "create multiple Server", Status: "ONLINE", Hostname: "servertestcreateop2", Domainname: "test.com", CDN: "dummycdn", CacheGroup: "testCG", Type: "EDGE", Profile: "testProfile", PhysLocation: "TPPhysLocation2", InterfaceName: "test", validationMessage: "Server created" } ], update: [ { description: "Validate cannot change the cdn of a Server when it is currently assigned to a Delivery Services in different CDN", Name: "servertestremoveop3", CDN: "servertestcdn1", Profile: "servertestprofiles1", validationMessage: "server cdn can not be updated when it is currently assigned to delivery services" }, { description: "change the cdn of a Server when it is currently not assign to any delivery service", Name: "servertestcreateop1", CDN: "servertestcdn1", Profile: "servertestprofiles1", validationMessage: "Server updated" } ], remove: [ { description: "delete a Server", Name: "servertestcreateop1", validationMessage: "Server deleted" }, { description: "delete a Server with Server Capabilities assigned", Name: "servertestremoveop2", validationMessage: "Server deleted" } ] } ] };
the_stack
import config from '../config'; import bitcoinApi from './bitcoin/bitcoin-api-factory'; import logger from '../logger'; import memPool from './mempool'; import { BlockExtended, BlockSummary, PoolTag, TransactionExtended, TransactionStripped, TransactionMinerInfo } from '../mempool.interfaces'; import { Common } from './common'; import diskCache from './disk-cache'; import transactionUtils from './transaction-utils'; import bitcoinClient from './bitcoin/bitcoin-client'; import { IBitcoinApi } from './bitcoin/bitcoin-api.interface'; import { IEsploraApi } from './bitcoin/esplora-api.interface'; import poolsRepository from '../repositories/PoolsRepository'; import blocksRepository from '../repositories/BlocksRepository'; import loadingIndicators from './loading-indicators'; import BitcoinApi from './bitcoin/bitcoin-api'; import { prepareBlock } from '../utils/blocks-utils'; import BlocksRepository from '../repositories/BlocksRepository'; import HashratesRepository from '../repositories/HashratesRepository'; import indexer from '../indexer'; import fiatConversion from './fiat-conversion'; import RatesRepository from '../repositories/RatesRepository'; import poolsParser from './pools-parser'; class Blocks { private blocks: BlockExtended[] = []; private blockSummaries: BlockSummary[] = []; private currentBlockHeight = 0; private currentDifficulty = 0; private lastDifficultyAdjustmentTime = 0; private previousDifficultyRetarget = 0; private newBlockCallbacks: ((block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) => void)[] = []; constructor() { } public getBlocks(): BlockExtended[] { return this.blocks; } public setBlocks(blocks: BlockExtended[]) { this.blocks = blocks; } public getBlockSummaries(): BlockSummary[] { return this.blockSummaries; } public setBlockSummaries(blockSummaries: BlockSummary[]) { this.blockSummaries = blockSummaries; } public setNewBlockCallback(fn: (block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) => void) { this.newBlockCallbacks.push(fn); } /** * Return the list of transaction for a block * @param blockHash * @param blockHeight * @param onlyCoinbase - Set to true if you only need the coinbase transaction * @returns Promise<TransactionExtended[]> */ private async $getTransactionsExtended( blockHash: string, blockHeight: number, onlyCoinbase: boolean, quiet: boolean = false, ): Promise<TransactionExtended[]> { const transactions: TransactionExtended[] = []; const txIds: string[] = await bitcoinApi.$getTxIdsForBlock(blockHash); const mempool = memPool.getMempool(); let transactionsFound = 0; let transactionsFetched = 0; for (let i = 0; i < txIds.length; i++) { if (mempool[txIds[i]]) { // We update blocks before the mempool (index.ts), therefore we can // optimize here by directly fetching txs in the "outdated" mempool transactions.push(mempool[txIds[i]]); transactionsFound++; } else if (config.MEMPOOL.BACKEND === 'esplora' || !memPool.hasPriority() || i === 0) { // Otherwise we fetch the tx data through backend services (esplora, electrum, core rpc...) if (!quiet && (i % (Math.round((txIds.length) / 10)) === 0 || i + 1 === txIds.length)) { // Avoid log spam logger.debug(`Indexing tx ${i + 1} of ${txIds.length} in block #${blockHeight}`); } try { const tx = await transactionUtils.$getTransactionExtended(txIds[i]); transactions.push(tx); transactionsFetched++; } catch (e) { if (i === 0) { const msg = `Cannot fetch coinbase tx ${txIds[i]}. Reason: ` + (e instanceof Error ? e.message : e); logger.err(msg); throw new Error(msg); } else { logger.err(`Cannot fetch tx ${txIds[i]}. Reason: ` + (e instanceof Error ? e.message : e)); } } } if (onlyCoinbase === true) { break; // Fetch the first transaction and exit } } transactions.forEach((tx) => { if (!tx.cpfpChecked) { Common.setRelativesAndGetCpfpInfo(tx, mempool); // Child Pay For Parent } }); if (!quiet) { logger.debug(`${transactionsFound} of ${txIds.length} found in mempool. ${transactionsFetched} fetched through backend service.`); } return transactions; } /** * Return a block summary (list of stripped transactions) * @param block * @returns BlockSummary */ private summarizeBlock(block: IBitcoinApi.VerboseBlock): BlockSummary { const stripped = block.tx.map((tx) => { return { txid: tx.txid, vsize: tx.vsize, fee: tx.fee ? Math.round(tx.fee * 100000000) : 0, value: Math.round(tx.vout.reduce((acc, vout) => acc + (vout.value ? vout.value : 0), 0) * 100000000) }; }); return { id: block.hash, transactions: stripped }; } /** * Return a block with additional data (reward, coinbase, fees...) * @param block * @param transactions * @returns BlockExtended */ private async $getBlockExtended(block: IEsploraApi.Block, transactions: TransactionExtended[]): Promise<BlockExtended> { const blockExtended: BlockExtended = Object.assign({ extras: {} }, block); blockExtended.extras.reward = transactions[0].vout.reduce((acc, curr) => acc + curr.value, 0); blockExtended.extras.coinbaseTx = transactionUtils.stripCoinbaseTransaction(transactions[0]); blockExtended.extras.coinbaseRaw = blockExtended.extras.coinbaseTx.vin[0].scriptsig; if (block.height === 0) { blockExtended.extras.medianFee = 0; // 50th percentiles blockExtended.extras.feeRange = [0, 0, 0, 0, 0, 0, 0]; blockExtended.extras.totalFees = 0; blockExtended.extras.avgFee = 0; blockExtended.extras.avgFeeRate = 0; } else { const stats = await bitcoinClient.getBlockStats(block.id, [ 'feerate_percentiles', 'minfeerate', 'maxfeerate', 'totalfee', 'avgfee', 'avgfeerate' ]); blockExtended.extras.medianFee = stats.feerate_percentiles[2]; // 50th percentiles blockExtended.extras.feeRange = [stats.minfeerate, stats.feerate_percentiles, stats.maxfeerate].flat(); blockExtended.extras.totalFees = stats.totalfee; blockExtended.extras.avgFee = stats.avgfee; blockExtended.extras.avgFeeRate = stats.avgfeerate; } if (['mainnet', 'testnet', 'signet', 'regtest'].includes(config.MEMPOOL.NETWORK)) { let pool: PoolTag; if (blockExtended.extras?.coinbaseTx !== undefined) { pool = await this.$findBlockMiner(blockExtended.extras?.coinbaseTx); } else { if (config.DATABASE.ENABLED === true) { pool = await poolsRepository.$getUnknownPool(); } else { pool = poolsParser.unknownPool; } } if (!pool) { // We should never have this situation in practise logger.warn(`Cannot assign pool to block ${blockExtended.height} and 'unknown' pool does not exist. ` + `Check your "pools" table entries`); return blockExtended; } blockExtended.extras.pool = { id: pool.id, name: pool.name, slug: pool.slug, }; } return blockExtended; } /** * Try to find which miner found the block * @param txMinerInfo * @returns */ private async $findBlockMiner(txMinerInfo: TransactionMinerInfo | undefined): Promise<PoolTag> { if (txMinerInfo === undefined || txMinerInfo.vout.length < 1) { if (config.DATABASE.ENABLED === true) { return await poolsRepository.$getUnknownPool(); } else { return poolsParser.unknownPool; } } const asciiScriptSig = transactionUtils.hex2ascii(txMinerInfo.vin[0].scriptsig); const address = txMinerInfo.vout[0].scriptpubkey_address; let pools: PoolTag[] = []; if (config.DATABASE.ENABLED === true) { pools = await poolsRepository.$getPools(); } else { pools = poolsParser.miningPools; } for (let i = 0; i < pools.length; ++i) { if (address !== undefined) { const addresses: string[] = JSON.parse(pools[i].addresses); if (addresses.indexOf(address) !== -1) { return pools[i]; } } const regexes: string[] = JSON.parse(pools[i].regexes); for (let y = 0; y < regexes.length; ++y) { const regex = new RegExp(regexes[y], 'i'); const match = asciiScriptSig.match(regex); if (match !== null) { return pools[i]; } } } if (config.DATABASE.ENABLED === true) { return await poolsRepository.$getUnknownPool(); } else { return poolsParser.unknownPool; } } /** * [INDEXING] Index all blocks metadata for the mining dashboard */ public async $generateBlockDatabase() { const blockchainInfo = await bitcoinClient.getBlockchainInfo(); if (blockchainInfo.blocks !== blockchainInfo.headers) { // Wait for node to sync return; } try { let currentBlockHeight = blockchainInfo.blocks; let indexingBlockAmount = Math.min(config.MEMPOOL.INDEXING_BLOCKS_AMOUNT, blockchainInfo.blocks); if (indexingBlockAmount <= -1) { indexingBlockAmount = currentBlockHeight + 1; } const lastBlockToIndex = Math.max(0, currentBlockHeight - indexingBlockAmount + 1); logger.debug(`Indexing blocks from #${currentBlockHeight} to #${lastBlockToIndex}`); loadingIndicators.setProgress('block-indexing', 0); const chunkSize = 10000; let totalIndexed = await blocksRepository.$blockCountBetweenHeight(currentBlockHeight, lastBlockToIndex); let indexedThisRun = 0; let newlyIndexed = 0; const startedAt = new Date().getTime() / 1000; let timer = new Date().getTime() / 1000; while (currentBlockHeight >= lastBlockToIndex) { const endBlock = Math.max(0, lastBlockToIndex, currentBlockHeight - chunkSize + 1); const missingBlockHeights: number[] = await blocksRepository.$getMissingBlocksBetweenHeights( currentBlockHeight, endBlock); if (missingBlockHeights.length <= 0) { currentBlockHeight -= chunkSize; continue; } logger.info(`Indexing ${missingBlockHeights.length} blocks from #${currentBlockHeight} to #${endBlock}`); for (const blockHeight of missingBlockHeights) { if (blockHeight < lastBlockToIndex) { break; } ++indexedThisRun; ++totalIndexed; const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - timer)); if (elapsedSeconds > 5 || blockHeight === lastBlockToIndex) { const runningFor = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt)); const blockPerSeconds = Math.max(1, Math.round(indexedThisRun / elapsedSeconds)); const progress = Math.round(totalIndexed / indexingBlockAmount * 10000) / 100; const timeLeft = Math.round((indexingBlockAmount - totalIndexed) / blockPerSeconds); logger.debug(`Indexing block #${blockHeight} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${totalIndexed}/${indexingBlockAmount} (${progress}%) | elapsed: ${runningFor} seconds | left: ~${timeLeft} seconds`); timer = new Date().getTime() / 1000; indexedThisRun = 0; loadingIndicators.setProgress('block-indexing', progress, false); } const blockHash = await bitcoinApi.$getBlockHash(blockHeight); const block = BitcoinApi.convertBlock(await bitcoinClient.getBlock(blockHash)); const transactions = await this.$getTransactionsExtended(blockHash, block.height, true, true); const blockExtended = await this.$getBlockExtended(block, transactions); newlyIndexed++; await blocksRepository.$saveBlockInDatabase(blockExtended); } currentBlockHeight -= chunkSize; } logger.notice(`Block indexing completed: indexed ${newlyIndexed} blocks`); loadingIndicators.setProgress('block-indexing', 100); } catch (e) { logger.err('Block indexing failed. Trying again later. Reason: ' + (e instanceof Error ? e.message : e)); loadingIndicators.setProgress('block-indexing', 100); return; } const chainValid = await BlocksRepository.$validateChain(); if (!chainValid) { indexer.reindex(); } } public async $updateBlocks() { let fastForwarded = false; const blockHeightTip = await bitcoinApi.$getBlockHeightTip(); if (this.blocks.length === 0) { this.currentBlockHeight = Math.max(blockHeightTip - config.MEMPOOL.INITIAL_BLOCKS_AMOUNT, -1); } else { this.currentBlockHeight = this.blocks[this.blocks.length - 1].height; } if (blockHeightTip - this.currentBlockHeight > config.MEMPOOL.INITIAL_BLOCKS_AMOUNT * 2) { logger.info(`${blockHeightTip - this.currentBlockHeight} blocks since tip. Fast forwarding to the ${config.MEMPOOL.INITIAL_BLOCKS_AMOUNT} recent blocks`); this.currentBlockHeight = blockHeightTip - config.MEMPOOL.INITIAL_BLOCKS_AMOUNT; fastForwarded = true; logger.info(`Re-indexing skipped blocks and corresponding hashrates data`); indexer.reindex(); // Make sure to index the skipped blocks #1619 } if (!this.lastDifficultyAdjustmentTime) { const blockchainInfo = await bitcoinClient.getBlockchainInfo(); if (blockchainInfo.blocks === blockchainInfo.headers) { const heightDiff = blockHeightTip % 2016; const blockHash = await bitcoinApi.$getBlockHash(blockHeightTip - heightDiff); const block = BitcoinApi.convertBlock(await bitcoinClient.getBlock(blockHash)); this.lastDifficultyAdjustmentTime = block.timestamp; this.currentDifficulty = block.difficulty; if (blockHeightTip >= 2016) { const previousPeriodBlockHash = await bitcoinApi.$getBlockHash(blockHeightTip - heightDiff - 2016); const previousPeriodBlock = await bitcoinApi.$getBlock(previousPeriodBlockHash); this.previousDifficultyRetarget = (block.difficulty - previousPeriodBlock.difficulty) / previousPeriodBlock.difficulty * 100; logger.debug(`Initial difficulty adjustment data set.`); } } else { logger.debug(`Blockchain headers (${blockchainInfo.headers}) and blocks (${blockchainInfo.blocks}) not in sync. Waiting...`); } } while (this.currentBlockHeight < blockHeightTip) { if (this.currentBlockHeight < blockHeightTip - config.MEMPOOL.INITIAL_BLOCKS_AMOUNT) { this.currentBlockHeight = blockHeightTip; } else { this.currentBlockHeight++; logger.debug(`New block found (#${this.currentBlockHeight})!`); } const blockHash = await bitcoinApi.$getBlockHash(this.currentBlockHeight); const verboseBlock = await bitcoinClient.getBlock(blockHash, 2); const block = BitcoinApi.convertBlock(verboseBlock); const txIds: string[] = await bitcoinApi.$getTxIdsForBlock(blockHash); const transactions = await this.$getTransactionsExtended(blockHash, block.height, false); const blockExtended: BlockExtended = await this.$getBlockExtended(block, transactions); const blockSummary: BlockSummary = this.summarizeBlock(verboseBlock); if (Common.indexingEnabled()) { if (!fastForwarded) { const lastBlock = await blocksRepository.$getBlockByHeight(blockExtended.height - 1); if (lastBlock !== null && blockExtended.previousblockhash !== lastBlock['hash']) { logger.warn(`Chain divergence detected at block ${lastBlock['height']}, re-indexing most recent data`); // We assume there won't be a reorg with more than 10 block depth await BlocksRepository.$deleteBlocksFrom(lastBlock['height'] - 10); await HashratesRepository.$deleteLastEntries(); for (let i = 10; i >= 0; --i) { await this.$indexBlock(lastBlock['height'] - i); } } await blocksRepository.$saveBlockInDatabase(blockExtended); } } if (fiatConversion.ratesInitialized === true && config.DATABASE.ENABLED === true) { await RatesRepository.$saveRate(blockExtended.height, fiatConversion.getConversionRates()); } if (block.height % 2016 === 0) { this.previousDifficultyRetarget = (block.difficulty - this.currentDifficulty) / this.currentDifficulty * 100; this.lastDifficultyAdjustmentTime = block.timestamp; this.currentDifficulty = block.difficulty; } this.blocks.push(blockExtended); if (this.blocks.length > config.MEMPOOL.INITIAL_BLOCKS_AMOUNT * 4) { this.blocks = this.blocks.slice(-config.MEMPOOL.INITIAL_BLOCKS_AMOUNT * 4); } this.blockSummaries.push(blockSummary); if (this.blockSummaries.length > config.MEMPOOL.INITIAL_BLOCKS_AMOUNT * 4) { this.blockSummaries = this.blockSummaries.slice(-config.MEMPOOL.INITIAL_BLOCKS_AMOUNT * 4); } if (this.newBlockCallbacks.length) { this.newBlockCallbacks.forEach((cb) => cb(blockExtended, txIds, transactions)); } if (!memPool.hasPriority()) { diskCache.$saveCacheToDisk(); } } } /** * Index a block if it's missing from the database. Returns the block after indexing */ public async $indexBlock(height: number): Promise<BlockExtended> { const dbBlock = await blocksRepository.$getBlockByHeight(height); if (dbBlock != null) { return prepareBlock(dbBlock); } const blockHash = await bitcoinApi.$getBlockHash(height); const block = BitcoinApi.convertBlock(await bitcoinClient.getBlock(blockHash)); const transactions = await this.$getTransactionsExtended(blockHash, block.height, true); const blockExtended = await this.$getBlockExtended(block, transactions); await blocksRepository.$saveBlockInDatabase(blockExtended); return prepareBlock(blockExtended); } /** * Index a block by hash if it's missing from the database. Returns the block after indexing */ public async $getBlock(hash: string): Promise<BlockExtended | IEsploraApi.Block> { // Check the memory cache const blockByHash = this.getBlocks().find((b) => b.id === hash); if (blockByHash) { return blockByHash; } // Block has already been indexed if (Common.indexingEnabled()) { const dbBlock = await blocksRepository.$getBlockByHash(hash); if (dbBlock != null) { return prepareBlock(dbBlock); } } const block = await bitcoinApi.$getBlock(hash); // Not Bitcoin network, return the block as it if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) === false) { return block; } // Bitcoin network, add our custom data on top const transactions = await this.$getTransactionsExtended(hash, block.height, true); const blockExtended = await this.$getBlockExtended(block, transactions); if (Common.indexingEnabled()) { delete(blockExtended['coinbaseTx']); await blocksRepository.$saveBlockInDatabase(blockExtended); } return blockExtended; } public async $getStrippedBlockTransactions(hash: string): Promise<TransactionStripped[]> { // Check the memory cache const cachedSummary = this.getBlockSummaries().find((b) => b.id === hash); if (cachedSummary) { return cachedSummary.transactions; } const block = await bitcoinClient.getBlock(hash, 2); const summary = this.summarizeBlock(block); return summary.transactions; } public async $getBlocks(fromHeight?: number, limit: number = 15): Promise<BlockExtended[]> { try { let currentHeight = fromHeight !== undefined ? fromHeight : this.getCurrentBlockHeight(); const returnBlocks: BlockExtended[] = []; if (currentHeight < 0) { return returnBlocks; } if (currentHeight === 0 && Common.indexingEnabled()) { currentHeight = await blocksRepository.$mostRecentBlockHeight(); } // Check if block height exist in local cache to skip the hash lookup const blockByHeight = this.getBlocks().find((b) => b.height === currentHeight); let startFromHash: string | null = null; if (blockByHeight) { startFromHash = blockByHeight.id; } else if (!Common.indexingEnabled()) { startFromHash = await bitcoinApi.$getBlockHash(currentHeight); } let nextHash = startFromHash; for (let i = 0; i < limit && currentHeight >= 0; i++) { let block = this.getBlocks().find((b) => b.height === currentHeight); if (block) { returnBlocks.push(block); } else if (Common.indexingEnabled()) { block = await this.$indexBlock(currentHeight); returnBlocks.push(block); } else if (nextHash != null) { block = prepareBlock(await bitcoinApi.$getBlock(nextHash)); nextHash = block.previousblockhash; returnBlocks.push(block); } currentHeight--; } return returnBlocks; } catch (e) { throw e; } } public getLastDifficultyAdjustmentTime(): number { return this.lastDifficultyAdjustmentTime; } public getPreviousDifficultyRetarget(): number { return this.previousDifficultyRetarget; } public getCurrentBlockHeight(): number { return this.currentBlockHeight; } } export default new Blocks();
the_stack
import { ComponentChildren, h, JSX, RefObject } from 'preact' import { useCallback, useRef, useState } from 'preact/hooks' import menuStyles from '../../../css/menu.css' import { useMouseDownOutside } from '../../../hooks/use-mouse-down-outside' import { OnValueChange, Props } from '../../../types/types' import { createClassName } from '../../../utilities/create-class-name' import { getCurrentFromRef } from '../../../utilities/get-current-from-ref' import { IconMenuCheckmarkChecked16 } from '../../icon/icon-16/icon-menu-checkmark-checked-16' import { computeNextValue } from '../private/compute-next-value' import { isKeyCodeCharacterGenerating } from '../private/is-keycode-character-generating' import textboxStyles from '../textbox/textbox.css' import textboxAutocompleteStyles from './textbox-autocomplete.css' const EMPTY_STRING = '' const INVALID_ID = null const ITEM_ID_DATA_ATTRIBUTE_NAME = 'data-textbox-autocomplete-item-id' const MENU_VERTICAL_MARGIN = 16 export type TextboxAutocompleteProps<Name extends string> = { disabled?: boolean filter?: boolean icon?: ComponentChildren name?: Name noBorder?: boolean onInput?: OmitThisParameter<JSX.GenericEventHandler<HTMLInputElement>> onValueInput?: OnValueChange<string, Name> options: Array<TextboxAutocompleteOption> placeholder?: string propagateEscapeKeyDown?: boolean revertOnEscapeKeyDown?: boolean spellCheck?: boolean strict?: boolean top?: boolean value: string } export type TextboxAutocompleteOption = | TextboxAutocompleteOptionHeader | TextboxAutocompleteOptionValue | TextboxAutocompleteOptionSeparator export type TextboxAutocompleteOptionHeader = { header: string } export type TextboxAutocompleteOptionValue = { value: string disabled?: boolean } export type TextboxAutocompleteOptionSeparator = { separator: true } type Option = | TextboxAutocompleteOptionHeader | OptionValueWithId | TextboxAutocompleteOptionSeparator type OptionValueWithId = TextboxAutocompleteOptionValue & { id: string } type Id = typeof INVALID_ID | string export function TextboxAutocomplete<Name extends string>({ disabled = false, filter = false, icon, name, noBorder = false, onInput = function () {}, onValueInput = function () {}, placeholder, propagateEscapeKeyDown = true, revertOnEscapeKeyDown = false, spellCheck = false, strict = false, top = false, value, ...rest }: Props<HTMLInputElement, TextboxAutocompleteProps<Name>>): JSX.Element { if (typeof icon === 'string' && icon.length !== 1) { throw new Error(`String \`icon\` must be a single character: ${icon}`) } const rootElementRef: RefObject<HTMLDivElement> = useRef(null) const inputElementRef: RefObject<HTMLInputElement> = useRef(null) const menuElementRef: RefObject<HTMLDivElement> = useRef(null) const [isMenuVisible, setIsMenuVisible] = useState(false) const [selectedId, setSelectedId] = useState<Id>(INVALID_ID) const [originalValue, setOriginalValue] = useState(EMPTY_STRING) // Value of the textbox when it was initially focused const [editedValue, setEditedValue] = useState<string>(value) // Value being edited that does not match any of the options let options: Array<Option> = createOptions(rest.options) if (filter === true) { options = filterOptions(options, value, editedValue) } // Uncomment to debug // console.table([{ isMenuVisible, selectedId, originalValue, editedValue, value }]) const triggerBlur = useCallback(function (): void { setIsMenuVisible(false) setOriginalValue(EMPTY_STRING) setEditedValue(EMPTY_STRING) setSelectedId(INVALID_ID) getCurrentFromRef(inputElementRef).blur() }, []) // Adjust the menu scroll position so that the selected option is always visible const updateScrollPosition = useCallback(function (id: Id): void { const menuElement = getCurrentFromRef(menuElementRef) if (id === INVALID_ID) { menuElement.scrollTop = 0 return } const selectedElement = menuElement.querySelector<HTMLDivElement>( `[${ITEM_ID_DATA_ATTRIBUTE_NAME}='${id}']` ) if (selectedElement === null) { throw new Error('Invariant violation') // `id` is valid } const y = selectedElement.getBoundingClientRect().y - menuElement.getBoundingClientRect().y if (y < menuElement.scrollTop) { menuElement.scrollTop = y return } const offsetBottom = y + selectedElement.offsetHeight if (offsetBottom > menuElement.scrollTop + menuElement.offsetHeight) { menuElement.scrollTop = offsetBottom - menuElement.offsetHeight } }, []) const updateEditedValue = useCallback( function (editedValue: string): void { const newId = getIdByValue(options, editedValue) if (newId === INVALID_ID) { // `newValue` does not match any option in `options` setEditedValue(editedValue) setSelectedId(INVALID_ID) updateScrollPosition(INVALID_ID) return } // `newValue` matches one of the options in `options` setEditedValue(EMPTY_STRING) setSelectedId(newId) updateScrollPosition(newId) }, [options, updateScrollPosition] ) const handleFocus = useCallback( function (event: JSX.TargetedFocusEvent<HTMLInputElement>): void { setIsMenuVisible(true) updateMenuElementMaxHeight( getCurrentFromRef(rootElementRef), getCurrentFromRef(menuElementRef), top ) setOriginalValue(value) updateEditedValue(value) const inputElement = event.currentTarget inputElement.focus() inputElement.select() }, [top, updateEditedValue, value] ) const handleInput = useCallback( function (event: JSX.TargetedEvent<HTMLInputElement>): void { const newValue = event.currentTarget.value updateEditedValue(newValue) onValueInput(newValue, name) onInput(event) }, [name, onInput, onValueInput, updateEditedValue] ) const handleKeyDown = useCallback( function (event: JSX.TargetedKeyboardEvent<HTMLInputElement>): void { const inputElement = event.currentTarget const key = event.key if (key === 'ArrowUp' || key === 'ArrowDown') { event.preventDefault() if (options.length === 0) { return } const newId = key === 'ArrowUp' ? computePreviousId(options, selectedId) : computeNextId(options, selectedId) if (newId === INVALID_ID) { // Reached beginning/end of list of `options`, so just restore `savedValue` setSelectedId(INVALID_ID) inputElement.value = editedValue onValueInput(editedValue, name) onInput(event) updateScrollPosition(INVALID_ID) return } // Set the selected option to `newId`, and update `value` setSelectedId(newId) updateScrollPosition(newId) const newOptionValue = findOptionValueById(options, newId) if (newOptionValue === null) { throw new Error('Invariant violation') // `newId` is valid } const newValue = newOptionValue.value inputElement.value = newValue onValueInput(newValue, name) onInput(event) inputElement.select() return } if (key === 'Enter' || key === 'Escape' || key === 'Tab') { event.preventDefault() if (propagateEscapeKeyDown === false) { event.stopPropagation() } if (key === 'Escape' && revertOnEscapeKeyDown === true) { inputElement.value = originalValue const inputEvent = document.createEvent('Event') inputEvent.initEvent('input', true, true) inputElement.dispatchEvent(inputEvent) } triggerBlur() return } if (strict === false) { return } if (event.ctrlKey === true || event.metaKey === true) { return } if (isKeyCodeCharacterGenerating(event.keyCode) === true) { // Piece together `newValue`, and stop the `keyDown` event if `newValue` is invalid const newValue = computeNextValue(inputElement, event.key) if (isValidValue(options, newValue) === false) { event.preventDefault() } } }, [ editedValue, name, onInput, onValueInput, options, originalValue, propagateEscapeKeyDown, revertOnEscapeKeyDown, selectedId, strict, triggerBlur, updateScrollPosition ] ) const handlePaste = useCallback( function (event: JSX.TargetedClipboardEvent<HTMLInputElement>): void { if (event.clipboardData === null) { throw new Error('`event.clipboardData` is `null`') } const newValue = computeNextValue( event.currentTarget, event.clipboardData.getData('Text') ) if (isValidValue(options, newValue) === false) { event.preventDefault() } }, [options] ) const handleOptionChange = useCallback( function (event: JSX.TargetedEvent<HTMLInputElement>): void { const newId = event.currentTarget.getAttribute( ITEM_ID_DATA_ATTRIBUTE_NAME ) as string // Set the selected option to `newId`, and update `value` setSelectedId(newId) const newOptionValue = findOptionValueById(options, newId) if (newOptionValue === null) { throw new Error('Invariant violation') // `newId` is valid } const inputElement = getCurrentFromRef(inputElementRef) inputElement.value = newOptionValue.value const inputEvent = document.createEvent('Event') inputEvent.initEvent('input', true, true) inputElement.dispatchEvent(inputEvent) triggerBlur() }, [options, triggerBlur] ) const handleOptionMouseMove = useCallback( function (event: JSX.TargetedMouseEvent<HTMLInputElement>): void { const newId = event.currentTarget.getAttribute( ITEM_ID_DATA_ATTRIBUTE_NAME ) as string if (newId !== selectedId) { setSelectedId(newId) } }, [selectedId] ) const handleMouseDownOutside = useCallback( function (): void { if (isMenuVisible === false) { return } triggerBlur() }, [isMenuVisible, triggerBlur] ) useMouseDownOutside({ onMouseDownOutside: handleMouseDownOutside, ref: rootElementRef }) return ( <div ref={rootElementRef} class={createClassName([ textboxStyles.textbox, noBorder === true ? textboxStyles.noBorder : null, typeof icon === 'undefined' ? null : textboxStyles.hasIcon, disabled === true ? textboxStyles.disabled : null ])} > <div class={textboxStyles.inner}> <input {...rest} ref={inputElementRef} class={textboxStyles.input} disabled={disabled === true} name={name} onFocus={handleFocus} onInput={handleInput} onKeyDown={handleKeyDown} onPaste={handlePaste} placeholder={placeholder} tabIndex={disabled === true ? -1 : 0} type="text" value={value} /> {typeof icon === 'undefined' ? null : ( <div class={textboxStyles.icon}>{icon}</div> )} <div class={textboxStyles.border} /> <div ref={menuElementRef} class={createClassName([ menuStyles.menu, disabled === true || isMenuVisible === false ? menuStyles.hidden : null, top === true ? textboxAutocompleteStyles.top : textboxAutocompleteStyles.bottom ])} > {options.map(function (option: Option, index: number): JSX.Element { if ('separator' in option) { return <hr key={index} class={menuStyles.optionSeparator} /> } if ('header' in option) { return ( <h1 key={index} class={menuStyles.optionHeader}> {option.header} </h1> ) } return ( <label key={index} class={createClassName([ menuStyles.optionValue, option.disabled === true ? menuStyles.optionValueDisabled : null, option.disabled !== true && option.id === selectedId ? menuStyles.optionValueSelected : null ])} > <input {...rest} checked={value === option.value} class={menuStyles.input} disabled={option.disabled === true} name={name} onChange={handleOptionChange} onMouseMove={handleOptionMouseMove} spellcheck={spellCheck} tabIndex={-1} type="radio" value={`${option.value}`} {...{ [ITEM_ID_DATA_ATTRIBUTE_NAME]: option.id }} /> {option.value === originalValue ? ( // Show check icon if option matches `originalValue` <div class={menuStyles.checkIcon}> <IconMenuCheckmarkChecked16 /> </div> ) : null} {option.value} </label> ) })} </div> </div> </div> ) } // Add an `id` attribute to all the `TextboxAutocompleteOptionValue` items in `options` function createOptions( options: Array<TextboxAutocompleteOption> ): Array<Option> { return options.map(function ( option: TextboxAutocompleteOption, index: number ): Option { if ('value' in option) { const optionValueWithId: OptionValueWithId = { ...option, id: `${index}` } return optionValueWithId } return option }) } function filterOptions( options: Array<Option>, value: string, editedValue: string ): Array<Option> { if (value === EMPTY_STRING) { return options } const id = getIdByValue(options, value) if (id === INVALID_ID) { // `value` does not match any option in `options` return options.filter(function (option: Option): boolean { if ('value' in option) { return doesStringContainSubstring(option.value, value) === true } return false }) } // `value` matches one of the options in `options` if (editedValue === EMPTY_STRING) { return options } // Filter `options` by `editedValue` return options.filter(function (option: Option): boolean { if ('value' in option) { return doesStringContainSubstring(option.value, editedValue) === true } return false }) } // Returns `true` if `string` contains `substring`, else `false` function doesStringContainSubstring( string: string, substring: string ): boolean { return string.toLowerCase().indexOf(substring.toLowerCase()) !== -1 } // Returns the `id` of an `OptionValueWithId` in `options` with the given `value` function getIdByValue(options: Array<Option>, value: string): Id { for (const option of options) { if ('value' in option) { if (option.value === value) { return option.id } } } return INVALID_ID } // Returns `true` if `value` is a substring of `options[i].value` in `options`, else `false` function isValidValue(options: Array<Option>, value: string): boolean { if (value === EMPTY_STRING) { return true } for (const option of options) { if ('value' in option) { if (option.value.toLowerCase().indexOf(value.toLowerCase()) === 0) { return true } } } return false } // Returns the `OptionValueWithId` in `options` with the given `id`, else `null` function findOptionValueById( options: Array<Option>, id: string ): null | OptionValueWithId { for (const option of options) { if ('id' in option && option.id === id) { return option } } return null } // Returns the index of the `OptionValueWithId` in `options` with the given `id`, else `-1` function getIndexById(options: Array<Option>, id: string): number { let index = 0 for (const option of options) { if ('id' in option && option.id === id) { return index } index += 1 } return -1 } // Returns the `Id` of the `OptionValueWithId` _before_ the `OptionValueWithId` in `options` with the given `id` function computePreviousId(options: Array<Option>, id: Id): Id { if (id === INVALID_ID) { const result = findOptionValueAtOrBeforeIndex(options, options.length - 1) return result === null ? null : result.id } const index = getIndexById(options, id) if (index === -1) { throw new Error(`No option with \`id\` ${id}`) } if (index === 0) { return null } const result = findOptionValueAtOrBeforeIndex(options, index - 1) return result === null ? null : result.id } // Returns the `Id` of the `OptionValueWithId` _after_ the `OptionValueWithId` in `options` with the given `id` function computeNextId(options: Array<Option>, id: Id): Id { if (id === INVALID_ID) { const result = findOptionValueAtOrAfterIndex(options, 0) return result === null ? null : result.id } const index = getIndexById(options, id) if (index === -1) { throw new Error(`No option with \`id\` ${id}`) } if (index === options.length - 1) { return null } const result = findOptionValueAtOrAfterIndex(options, index + 1) return result === null ? null : result.id } // Returns the `OptionValueWithId` in `options` at or _before_ the `index`, else `null` function findOptionValueAtOrBeforeIndex( options: Array<Option>, index: number ): null | OptionValueWithId { if (index < 0) { throw new Error('`index` < 0') } if (index > options.length - 1) { throw new Error('`index` > `options.length` - 1') } return findLastOptionValue(options.slice(0, index + 1)) } // Returns the `OptionValueWithId` in `options` at or _after_ the `index`, else `null` function findOptionValueAtOrAfterIndex( options: Array<Option>, index: number ): null | OptionValueWithId { if (index < 0) { throw new Error('`index` < 0') } if (index > options.length - 1) { throw new Error('`index` > `options.length` - 1') } return findFirstOptionValue(options.slice(index)) } // Returns the first `OptionValueWithId` encountered in `options`, else `null` function findFirstOptionValue( options: Array<Option> ): null | OptionValueWithId { for (const option of options) { if ('id' in option && option.disabled !== true) { return option } } return null } // Returns the last `OptionValueWithId` encountered in `options`, else `null` function findLastOptionValue(options: Array<Option>): null | OptionValueWithId { return findFirstOptionValue(options.slice().reverse()) } function updateMenuElementMaxHeight( rootElement: HTMLDivElement, menuElement: HTMLDivElement, top: boolean ) { const rootElementTop = rootElement.getBoundingClientRect().top const maxHeight = top === true ? rootElementTop - MENU_VERTICAL_MARGIN : window.innerHeight - rootElementTop - rootElement.offsetHeight - MENU_VERTICAL_MARGIN menuElement.style.maxHeight = `${maxHeight}px` }
the_stack
import {TS} from "../../type/ts"; import {CompilerHostOptions, CustomTransformersInput} from "./compiler-host-options"; import {ModuleResolutionHost} from "../module-resolution-host/module-resolution-host"; import {getNewLineCharacter} from "../../util/get-new-line-character/get-new-line-character"; import {resolveId} from "../../util/resolve-id/resolve-id"; import {getScriptKindFromPath} from "../../util/get-script-kind-from-path/get-script-kind-from-path"; import {VirtualFile, VirtualFileInput} from "../module-resolution-host/virtual-file"; import {mergeTransformers} from "../../util/merge-transformers/merge-transformers"; import {ensureModuleTransformer} from "../transformer/ensure-module/ensure-module-transformer"; import {SourceFileToDependenciesMap} from "../transformer/declaration-bundler/declaration-bundler-options"; import {ExtendedResolvedModule} from "../cache/resolve-cache/extended-resolved-module"; import {getModuleDependencies, ModuleDependency} from "../../util/get-module-dependencies/get-module-dependencies"; import {pickResolvedModule} from "../../util/pick-resolved-module"; import path from "crosspath"; import {ensureAbsolute, getExtension, isExternal, isTypeScriptLib} from "../../util/path/path-util"; import {ensureNodeFactory} from "compatfactory"; export class CompilerHost extends ModuleResolutionHost implements TS.CompilerHost { private previousProgram: TS.EmitAndSemanticDiagnosticsBuilderProgram | undefined; private currentProgram: TS.EmitAndSemanticDiagnosticsBuilderProgram | undefined; private currentTypeRoots: Set<string> | undefined; private currentProgramInstance: TS.Program | undefined; private currentTypeChecker: TS.TypeChecker | undefined; private emitOutput: TS.EmitOutput | undefined; private creatingProgram = false; private invalidateProgram = false; private readonly externalFiles = new Set<string>(); constructor( protected readonly options: CompilerHostOptions, protected readonly printer: TS.Printer = options.typescript.createPrinter({ newLine: options.parsedCommandLineResult.parsedCommandLine.options.newLine }), protected readonly sourceFiles: Map<string, TS.SourceFile> = new Map(), protected readonly transformerDiagnostics: Map<string, TS.Diagnostic[]> = new Map(), protected readonly fileToVersionMap: Map<string, number> = new Map(), protected readonly sourceFileToDependenciesMap: SourceFileToDependenciesMap = new Map(), files?: Map<string, VirtualFile> ) { super(options, files); this.addDefaultFileNames(); } allowTransformingDeclarations(): boolean { return this.options.allowTransformingDeclarations === true; } isSupportedFileName(fileName: string, ignoreFilter = false): boolean { return (ignoreFilter || this.options.filter(fileName)) && this.getSupportedExtensions().has(getExtension(fileName)); } getDiagnostics(fileName?: string): readonly TS.Diagnostic[] { const program = this.getProgram(); const sourceFile = fileName == null ? undefined : this.getSourceFile(fileName); const baseDiagnostics = [ ...this.getParsedCommandLine().errors, ...program.getConfigFileParsingDiagnostics(), ...program.getOptionsDiagnostics(), ...program.getSyntacticDiagnostics(sourceFile), ...program.getGlobalDiagnostics(), ...program.getSemanticDiagnostics(sourceFile) ]; if (sourceFile != null) { return [...baseDiagnostics, ...(this.transformerDiagnostics.get(sourceFile.fileName) ?? [])]; } else { const extraDiagnostics: TS.Diagnostic[] = []; for (const transformerDiagnostics of this.transformerDiagnostics.values()) { extraDiagnostics.push(...transformerDiagnostics); } return [...baseDiagnostics, ...extraDiagnostics]; } } emitBuildInfo(): TS.EmitOutput { this.popEmitOutput(); const programWithEmitBuildInfo = this.getProgramInstance() as TS.Program & {emitBuildInfo?(writeFileCallback: TS.WriteFileCallback): void}; // A non-exposed internal method, emitBuildInfo, is used, if available (which it is from TypeScript v3.4 and up) // If not, we would have to emit the entire Program (or pending affected files) which can be avoided for maximum performance programWithEmitBuildInfo.emitBuildInfo?.(this.writeFile.bind(this)); return this.popEmitOutput(); } emit(fileName?: string, onlyDts = false, transformers?: CustomTransformersInput): TS.EmitOutput { this.popEmitOutput(); const sourceFile = fileName == null ? undefined : this.getSourceFile(fileName); const customTransformers = this.getCustomTransformers(transformers); let hasEmitted = false; const runEmit = (program: TS.Program | TS.EmitAndSemanticDiagnosticsBuilderProgram) => { // There is an extra, private, argument that can be given to emit internally in TypeScript // which forces emit of declarations. Set this to true for dts emit. ( program as TS.Program & { emit: ( targetSourceFile?: TS.SourceFile, writeFile?: TS.WriteFileCallback, cancellationToken?: TS.CancellationToken, emitOnlyDtsFiles?: boolean, customTransformers?: TS.CustomTransformers, forceDtsEmit?: boolean ) => TS.EmitResult; } ).emit( sourceFile, (file, data, writeByteOrderMark) => { hasEmitted = true; this.writeFile(file, data, writeByteOrderMark); }, undefined, onlyDts, customTransformers, onlyDts == null || !onlyDts ? undefined : true ); }; runEmit(this.getProgram()); // TypeScript will not emit if a builder-program haven't changed. In that case, use the underlying program instance and emit with that one. if (!hasEmitted) { runEmit(this.getProgramInstance()); } return this.popEmitOutput(); } writeFile(name: string, text: string, writeByteOrderMark: boolean): void { const emitOutput = this.ensureEmitOutput(); emitOutput.outputFiles.push({ name, text, writeByteOrderMark }); } getScriptTarget(): TS.ScriptTarget { return this.getCompilationSettings().target ?? this.getTypescript().ScriptTarget.ES3; } private createProgram(): TS.EmitAndSemanticDiagnosticsBuilderProgram { const typescript = this.getTypescript(); const rootNames = [...this.getFileNames()]; const options = this.getCompilationSettings(); // The --incremental option is part of TypeScript 3.4 and up only if ("createIncrementalProgram" in (typescript as Partial<typeof TS>)) { return typescript.createIncrementalProgram({ rootNames, options, host: this }); } else { return typescript.createEmitAndSemanticDiagnosticsBuilderProgram(rootNames, options, this, this.previousProgram); } } getProgram(): TS.EmitAndSemanticDiagnosticsBuilderProgram { // If there is no current program, or if the list of root names is out of sync with the actual list of files, construct a new Program if (this.currentProgram == null) { // Construct a new program. this.creatingProgram = true; try { this.currentProgram = this.createProgram(); } finally { this.creatingProgram = false; } // If the program was invalidated before it was ever finished being created, // Try again to ensure all SourceFiles will be part of it if (this.invalidateProgram) { this.invalidateProgram = false; this.currentProgram = this.createProgram(); } } return this.currentProgram; } getPrinter(): TS.Printer { return this.printer; } getProgramInstance(): TS.Program { if (this.currentProgramInstance == null) { this.currentProgramInstance = this.getProgram().getProgram(); } return this.currentProgramInstance; } getTypeChecker(): TS.TypeChecker { if (this.currentTypeChecker == null) { this.currentTypeChecker = this.getProgramInstance().getTypeChecker(); } return this.currentTypeChecker; } getFilter(): (id: string) => boolean { return this.options.filter; } getTransformers(): CustomTransformersInput { return this.options.transformers; } private getDependenciesForFileDeep(fileName: string, dependencies: Set<ModuleDependency> = new Set(), seenModules: Set<string> = new Set()): Set<ModuleDependency> { if (seenModules.has(fileName)) return dependencies; seenModules.add(fileName); const localDependencies = this.sourceFileToDependenciesMap.get(fileName); const dependenciesArr = [...dependencies]; if (localDependencies != null) { for (const dependency of localDependencies) { if ( !dependenciesArr.some( ({resolvedFileName, resolvedAmbientFileName}) => resolvedFileName === dependency.resolvedFileName && resolvedAmbientFileName === dependency.resolvedAmbientFileName ) ) { dependencies.add(dependency); if (dependency.resolvedFileName != null) this.getDependenciesForFileDeep(dependency.resolvedFileName, dependencies, seenModules); if (dependency.resolvedAmbientFileName != null) { this.getDependenciesForFileDeep(dependency.resolvedAmbientFileName, dependencies, seenModules); } } } } return dependencies; } getDependenciesForFile(fileName: string, deep = false): Set<ModuleDependency> | undefined { if (deep) { return this.getDependenciesForFileDeep(fileName); } return this.sourceFileToDependenciesMap.get(fileName); } getAllDependencies(): SourceFileToDependenciesMap { return this.sourceFileToDependenciesMap; } add(fileInput: VirtualFileInput | VirtualFile, traceDependencies = true): VirtualFile { const existing = this.get(fileInput.fileName); if (existing != null && existing.text === fileInput.text) { return existing; } this.delete(fileInput.fileName); if (fileInput.fromRollup) { const sourceFile = this.constructSourceFile(fileInput.fileName, fileInput.text); const typescript = this.getTypescript(); const factory = ensureNodeFactory(typescript); const transformedSourceFile = ensureModuleTransformer({typescript, factory, sourceFile}); if (transformedSourceFile !== sourceFile) { (fileInput as VirtualFile).transformedText = this.printer.printFile(transformedSourceFile); } } const addedFile = super.add(fileInput); if (traceDependencies) { this.refreshDependenciesForFileName(fileInput.fileName); } return addedFile; } private refreshDependenciesForFileName(fileName: string, seenModules: Set<string> = new Set()): void { if (seenModules.has(fileName) || this.externalFiles.has(fileName)) return; seenModules.add(fileName); const dependencies = getModuleDependencies({ compilerHost: this, module: fileName }); if (dependencies == null) return; this.sourceFileToDependenciesMap.set(fileName, dependencies); for (const resolveResult of dependencies) { // Don't perform a recursive descent into the files that are external if (isExternal(resolveResult.moduleSpecifier, fileName, this.options.externalOption)) { // Mark the module as external this.externalFiles.add(pickResolvedModule(resolveResult, true)); continue; } for (const module of [resolveResult.resolvedFileName, resolveResult.resolvedAmbientFileName]) { if (module == null) continue; this.refreshDependenciesForFileName(module, seenModules); } } } private constructSourceFile(fileName: string, text: string, languageVersion: TS.ScriptTarget = this.getScriptTarget()): TS.SourceFile { return this.getTypescript().createSourceFile(fileName, text, languageVersion, true, getScriptKindFromPath(fileName, this.getTypescript())); } private clearProgram(): void { if (this.creatingProgram) { this.invalidateProgram = true; } this.previousProgram = this.currentProgram; this.currentProgram = undefined; this.currentProgramInstance = undefined; this.currentTypeChecker = undefined; } private ensureEmitOutput(): TS.EmitOutput { if (this.emitOutput == null) { this.emitOutput = { outputFiles: [], emitSkipped: false }; } return this.emitOutput; } private popEmitOutput(): TS.EmitOutput { const emitOutput = this.ensureEmitOutput(); this.emitOutput = undefined; return emitOutput; } delete(fileName: string): boolean { const superDelete = super.delete(fileName); const sourceFilesDelete = this.sourceFiles.delete(fileName); const transformerDiagnosticsDelete = this.transformerDiagnostics.delete(fileName); const sourceFileToDependenciesMapDelete = this.sourceFileToDependenciesMap.delete(fileName); const success = superDelete || sourceFilesDelete || transformerDiagnosticsDelete || sourceFileToDependenciesMapDelete; this.clearProgram(); return success; } clone( compilerOptions: TS.CompilerOptions, fileNameFilter: (file: string) => boolean = () => true, overrides: Partial<Omit<CompilerHostOptions, "parsedCommandLineResult">> = {} ): CompilerHost { return new CompilerHost( { ...this.options, ...overrides, parsedCommandLineResult: { ...this.options.parsedCommandLineResult, parsedCommandLine: { ...this.getParsedCommandLine(), fileNames: this.getParsedCommandLine().fileNames.filter(fileNameFilter), options: { ...this.getCompilationSettings(), ...compilerOptions } } } }, this.printer, new Map([...this.sourceFiles.entries()].filter(([p]) => fileNameFilter(p))), new Map([...this.transformerDiagnostics.entries()].filter(([p]) => fileNameFilter(p))), new Map([...this.fileToVersionMap.entries()].filter(([p]) => fileNameFilter(p))), new Map([...this.sourceFileToDependenciesMap.entries()].filter(([p]) => fileNameFilter(p))), new Map([...this.files.entries()].filter(([p]) => fileNameFilter(p))) ); } getSourceFile(fileName: string, languageVersion: TS.ScriptTarget = this.getScriptTarget()): TS.SourceFile | undefined { const absoluteFileName = isTypeScriptLib(fileName) ? path.join(this.getDefaultLibLocation(), fileName) : ensureAbsolute(this.getCwd(), fileName); if (this.sourceFiles.has(absoluteFileName)) { return this.sourceFiles.get(absoluteFileName); } if (!this.isSupportedFileName(absoluteFileName, true)) return undefined; let file = this.get(absoluteFileName); if (file == null) { const text = this.readFile(absoluteFileName); if (text == null) return undefined; file = this.add({fileName: absoluteFileName, text, fromRollup: false}, false); } const sourceFile = this.constructSourceFile(absoluteFileName, file.transformedText, languageVersion); this.sourceFiles.set(absoluteFileName, sourceFile); const oldVersion = this.fileToVersionMap.get(absoluteFileName) ?? 0; const newVersion = oldVersion + 1; this.fileToVersionMap.set(absoluteFileName, newVersion); // SourceFiles in builder programs needs a version (sourceFile as unknown as {version: number}).version = newVersion; return sourceFile; } getTypeRoots() { if (this.currentTypeRoots == null) { this.currentTypeRoots = new Set(this.getTypescript().getEffectiveTypeRoots(this.getCompilationSettings(), this)); } return this.currentTypeRoots; } getDefaultLibLocation(): string { return path.dirname(this.getTypescript().getDefaultLibFilePath(this.getCompilationSettings())); } /** * Gets the Custom Transformers to use, depending on the current emit mode */ getCustomTransformers(transformers: CustomTransformersInput = this.getTransformers()): TS.CustomTransformers | undefined { const mergedTransformers = mergeTransformers(transformers); const upgradedTransformers = mergedTransformers({ program: this.getProgramInstance(), typescript: this.getTypescript(), printer: this.printer, /** * This hook can add diagnostics from within CustomTransformers. These will be emitted alongside Typescript diagnostics seamlessly */ addDiagnostics: (...diagnostics) => { diagnostics.forEach(diagnostic => { // Skip diagnostics that doesn't point to a specific file if (diagnostic.file == null) return; let transformerDiagnostics = this.transformerDiagnostics.get(diagnostic.file.fileName); // If no file matches the one of the diagnostic, skip it if (transformerDiagnostics == null) { transformerDiagnostics = []; this.transformerDiagnostics.set(diagnostic.file.fileName, transformerDiagnostics); } // Add the diagnostic transformerDiagnostics.push(diagnostic); }); } }); // Ensure that declarations are never transformed if not allowed if (!this.allowTransformingDeclarations()) { return { ...upgradedTransformers, afterDeclarations: undefined }; } return upgradedTransformers; } /** * Gets the default lib file name based on the given CompilerOptions */ getDefaultLibFileName(compilerOptions: TS.CompilerOptions): string { return this.getTypescript().getDefaultLibFileName(compilerOptions); } /** * Gets the canonical filename for the given file */ getCanonicalFileName(fileName: string): string { return this.useCaseSensitiveFileNames() ? fileName : fileName.toLowerCase(); } /** * Returns true if file names should be treated as case-sensitive */ useCaseSensitiveFileNames(): boolean { return this.getFileSystem().useCaseSensitiveFileNames; } /** * Gets the newline to use */ getNewLine(): string { const compilationSettings = this.getCompilationSettings(); return compilationSettings.newLine != null ? getNewLineCharacter(compilationSettings.newLine, this.getTypescript()) : this.getFileSystem().newLine; } /** * Reads the given directory */ readDirectory(p: string, extensions: readonly string[], exclude: readonly string[] | undefined, include: readonly string[], depth?: number): string[] { return this.getFileSystem().readDirectory(path.native.normalize(p), extensions, exclude, include, depth).map(path.normalize); } resolve(moduleName: string, containingFile: string): ExtendedResolvedModule | null { return resolveId({ moduleResolutionHost: this, parent: containingFile, id: moduleName, resolveCache: this.options.resolveCache }); } resolveModuleNames(moduleNames: string[], containingFile: string): (TS.ResolvedModuleFull | undefined)[] { const resolvedModules: (TS.ResolvedModuleFull | undefined)[] = []; for (const moduleName of moduleNames) { const result = this.resolve(moduleName, containingFile); if (result != null && result.resolvedAmbientFileName != null) { resolvedModules.push({...result, resolvedFileName: result.resolvedAmbientFileName}); } else if (result != null && result.resolvedFileName != null) { resolvedModules.push({...result, resolvedFileName: result.resolvedFileName}); } else { resolvedModules.push(undefined); } } return resolvedModules; } resolveTypeReferenceDirectives(typeReferenceDirectiveNames: string[], containingFile: string): (TS.ResolvedTypeReferenceDirective | undefined)[] { const resolvedTypeReferenceDirectives: (TS.ResolvedTypeReferenceDirective | undefined)[] = []; for (const typeReferenceDirectiveName of typeReferenceDirectiveNames) { // try to use standard resolution const result = resolveId({ moduleResolutionHost: this, parent: containingFile, id: typeReferenceDirectiveName, resolveCache: this.options.resolveCache }); if (result != null && result.resolvedAmbientFileName != null) { resolvedTypeReferenceDirectives.push({...result, primary: true, resolvedFileName: result.resolvedAmbientFileName}); } else if (result != null && result.resolvedFileName != null) { resolvedTypeReferenceDirectives.push({...result, primary: true, resolvedFileName: result.resolvedFileName}); } else { resolvedTypeReferenceDirectives.push(undefined); } } return resolvedTypeReferenceDirectives; } /** * Adds all default declaration files to the LanguageService */ private addDefaultFileNames(): void { this.getParsedCommandLine().fileNames.forEach(file => { const fileName = ensureAbsolute(this.getCwd(), file); if (!this.getFilter()(path.normalize(fileName))) return; const text = this.readFile(fileName); if (text != null) { this.add({ fileName, text, fromRollup: false }); } }); } }
the_stack
import * as aws from "@pulumi/aws"; import * as pulumi from "@pulumi/pulumi"; // import * as config from "../config"; // import * as region from "../region"; import { MetricStatistic } from "./metric"; import { Widget } from "./widget"; import { AlarmAnnotation, WidgetAnnotation } from "./widgets_annotations"; import * as wjson from "./widgets_json"; import * as utils from "../utils"; export interface SimpleWidgetArgs { /** * The width of the widget in grid units (in a 24-column grid). The default is 6. * * Valid Values: 1–24 */ width?: number; /** * The height of the widget in grid units. The default is 6. * * Valid Values: 1–1000 */ height?: number; } /** * Base type of all non-flow Widgets to place in a DashboardGrid. */ export abstract class SimpleWidget implements Widget { constructor(private readonly args: SimpleWidgetArgs) { if (args.width !== undefined) { if (args.width < 1 || args.width > 24) { throw new Error("[args.width] must be between 1 and 24 (inclusive)."); } } if (args.height !== undefined) { if (args.height < 1 || args.height > 1000) { throw new Error("[args.height] must be between 1 and 1000 (inclusive)."); } } } public width() { return this.args.width !== undefined ? this.args.width : 6; } public height() { return this.args.height !== undefined ? this.args.height : 6; } /** @internal */ protected abstract computeType(): wjson.WidgetJson["type"]; /** @internal */ protected abstract computeProperties(region: pulumi.Output<aws.Region>): wjson.WidgetJson["properties"]; /** For internal use only. */ public addWidgetJson(widgetJsons: wjson.WidgetJson[], xOffset: number, yOffset: number, region: pulumi.Output<aws.Region>) { // Build the structure common to all simple widgets. Defer to our subclasses for // details only they can fill in. widgetJsons.push({ x: xOffset, y: yOffset, width: this.width(), height: this.height(), type: this.computeType(), properties: this.computeProperties(region), }); } } export interface AlarmWidgetArgs extends SimpleWidgetArgs { /** An array of alarm ARNs to include in the widget. The array can have 1-100 ARNs. */ alarms: pulumi.Input<string>[]; /** * Specifies how to sort the alarms in the widget. * * Choose default to sort them in alphabetical order by alarm name. * * Choose stateUpdatedTimestamp to sort them first by alarm state, with alarms in ALARM state first, * INSUFFICIENT_DATA alarms next, and OK alarms last. Within each group, the alarms are sorted by when * they last changed state, with more recent state changes listed first. * * Choose timestamp to sort them by the time when the alarms most recently changed state, no matter * the current alarm state. The alarm that changed state most recently is listed first. * * If you omit this field, the alarms are sorted in alphabetical order. */ sortBy?: pulumi.Input<"default" | "stateUpdatedTimestamp" | "timestamp" | undefined>; /** * Use this field to filter the list of alarms displayed in the widget to only those alarms * currently in the specified states. You can specify one or more alarm states in the value * for this field. The alarm states that you can specify are ALARM, INSUFFICIENT_DATA, and OK. * * If you omit this field or specify an empty array, all the alarms specified in alarms are displayed. */ states?: pulumi.Input<("ALARM" | "INSUFFICIENT_DATA" | "OK")[] | undefined>; /** The title to be displayed for the alarm widget. */ title?: pulumi.Input<string>; } /** * Simple widget that displays an array of cloudwatch alarm status in the dashboard grid. */ export class AlarmWidget extends SimpleWidget { private readonly alarmArgs: AlarmWidgetArgs; constructor(args: AlarmWidgetArgs) { super(args); this.alarmArgs = args; } public height() { return this.alarmArgs.height !== undefined ? this.alarmArgs.height : 2; } protected computeType(): wjson.AlarmWidgetJson["type"] { return "alarm"; } protected computeProperties(region: pulumi.Output<aws.Region>): wjson.AlarmWidgetJson["properties"] { return { alarms: this.alarmArgs.alarms, sortBy: this.alarmArgs.sortBy, states: this.alarmArgs.states, title: this.alarmArgs.title, }; } } /** * Simple [Widget] that can be used for putting space between other widgets in the [Dashboard]. */ export class SpaceWidget implements Widget { private readonly _width: number; private readonly _height: number; constructor(width: number, height: number); constructor(args: SimpleWidgetArgs); constructor(widthOrArgs: number | SimpleWidgetArgs, height?: number) { if (typeof widthOrArgs === "number") { this._width = widthOrArgs; this._height = height!; } else { this._width = widthOrArgs.width !== undefined ? widthOrArgs.width : 6; this._height = widthOrArgs.height !== undefined ? widthOrArgs.height : 6; } } public width() { return this._width; } public height() { return this._height; } public addWidgetJson(widgetJsons: wjson.WidgetJson[], xOffset: number, yOffset: number): void { // Nothing to do. This Widget exists just to ensure proper placement of other real widgets. } } export interface TextWidgetArgs extends SimpleWidgetArgs { /** * The text to be displayed by the widget. */ markdown: pulumi.Input<string>; } /** * Simple widget that displays a piece of text in the dashboard grid. */ export class TextWidget extends SimpleWidget { private readonly textArgs: TextWidgetArgs; constructor(markdown: string); constructor(args: TextWidgetArgs); constructor(markdownOrArgs: string | TextWidgetArgs) { const args = typeof markdownOrArgs === "string" ? { markdown: markdownOrArgs } : markdownOrArgs; super(args); this.textArgs = args; } protected computeType(): wjson.TextWidgetJson["type"] { return "text"; } protected computeProperties(region: pulumi.Output<aws.Region>): wjson.TextWidgetJson["properties"] { return { markdown: this.textArgs.markdown }; } } function flattenArray<T>(annotations: T | T[] | undefined) { return Array.isArray(annotations) ? annotations : annotations ? [annotations] : []; } export interface MetricWidgetArgs extends SimpleWidgetArgs { /** * Used to show a graph of a single alarm. If, instead, you want to place horizontal lines in * graphs to show the trigger point of an alarm, then add the alarm to [annotations] instead. * * At least one of [alarm], [annotations] or [metrics] must be supplied. */ alarm?: pulumi.Input<string> | WidgetAlarm; /** * A single metric widget can have up to one alarm, and multiple horizontal and vertical * annotations. * * An alarm annotation is required only when metrics is not specified. A horizontal or vertical * annotation is not required. * * Instances of this interface include [aws.cloudwatch.Alarm], [AlarmAnnotation], * [HorizontalAnnotation] and [VerticalAnnotation]. * * At least one of [alarm], [annotations] or [metrics] must be supplied. */ annotations?: WidgetAnnotation | WidgetAnnotation[]; /** * Specify a metrics array to include one or more metrics (without alarms), math expressions, or * search expressions. One metrics array can include 0–100 metrics and expressions. * * See [ExpressionWidgetMetric] and [Metric] to create instances that can be added to this * array. * * At least one of [alarm], [annotations] or [metrics] must be supplied. */ metrics?: WidgetMetric | WidgetMetric[]; /** The title to be displayed for the graph or number. */ title?: pulumi.Input<string>; /** * The default period, in seconds, for all metrics in this widget. The period is the length of * time represented by one data point on the graph. This default can be overridden within each * metric definition. The default is 300. */ period?: pulumi.Input<number>; /** * The region of the metric. Defaults to the region of the stack if not specified. */ region?: pulumi.Input<aws.Region>; /** * The default statistic to be displayed for each metric in the array. This default can be * overridden within the definition of each individual metric in the metrics array. */ statistic?: pulumi.Input<MetricStatistic>; /** * The percentile statistic for the metric associated with the alarm. Specify a value between * [0.0] and [100]. */ extendedStatistic?: pulumi.Input<number>; } export interface WidgetAlarm { arn: pulumi.Input<string>; } /** * Base type for widgets that display data from a set of [Metric]s. See [LineGraphMetricWidget], * [StackedAreaGraphMetricWidget] and [SingleNumberMetricWidget] as concrete [Widget] instances for * displaying [Metric]s. */ export abstract class MetricWidget extends SimpleWidget { private readonly annotations: WidgetAnnotation[]; private readonly metrics: WidgetMetric[]; constructor(private readonly metricArgs: MetricWidgetArgs) { super(metricArgs); this.annotations = flattenArray(metricArgs.annotations); this.metrics = flattenArray(metricArgs.metrics); // If they specified an alarm, then make an appropriate annotation that will set // properties.alarms. const alarm = metricArgs.alarm; if (alarm) { const alarmArm = pulumi.all([(<WidgetAlarm>alarm).arn, <pulumi.Input<string>>alarm]) .apply(([s1, s2]) => s1 || s2); this.annotations.push(new AlarmAnnotation(alarmArm)); } if (this.annotations.length === 0 && this.metrics.length === 0) { throw new Error("[args.metrics] must be provided if [args.annotations] is not provided."); } } protected abstract computeView(): wjson.MetricWidgetPropertiesJson["view"]; protected abstract computedStacked(): wjson.MetricWidgetPropertiesJson["stacked"]; protected abstract computeYAxis(): wjson.MetricWidgetPropertiesJson["yAxis"]; protected computeType = (): wjson.MetricWidgetJson["type"] => "metric"; protected computeProperties(region: pulumi.Output<aws.Region>): wjson.MetricWidgetJson["properties"] { const stat = pulumi.all([this.metricArgs.extendedStatistic, this.metricArgs.statistic]) .apply(([extendedStatistic, statistic]) => { if (statistic !== undefined && extendedStatistic !== undefined) { throw new Error("[args.statistic] and [args.extendedStatistic] cannot both be provided."); } return extendedStatistic !== undefined ? `p${extendedStatistic}` : statistic!; }); let annotations: wjson.MetricWidgetAnnotationsJson | undefined; if (this.annotations.length > 0) { annotations = {}; for (const annotation of this.annotations) { annotation.addWidgetJson(annotations); } } let metrics: wjson.MetricJson[] | undefined; if (this.metrics.length > 0) { metrics = []; for (const metric of this.metrics) { metric.addWidgetJson(metrics); } } const result = { stat, metrics, annotations, title: this.metricArgs.title, period: utils.ifUndefined(this.metricArgs.period, 300).apply(p => { if (p % 60 !== 0) { throw new Error(`Dashboard metric period must be a multiple of 60: ${p}`); } return p; }), region: utils.ifUndefined(this.metricArgs.region, region), view: this.computeView(), stacked: this.computedStacked(), yAxis: this.computeYAxis(), }; return result; } } /** @internal */ export function statisticString(obj: { extendedStatistic: pulumi.Input<number | undefined>, statistic: pulumi.Input<MetricStatistic> }) { return pulumi.output(obj).apply(obj => { if (obj.statistic !== undefined && obj.extendedStatistic !== undefined) { throw new Error("[args.statistic] and [args.extendedStatistic] cannot both be provided."); } return obj.extendedStatistic !== undefined ? `p${obj.extendedStatistic}` : obj.statistic; }); } /** * Base type for all objects that can be placed in the [metrics] array of [MetricWidgetArgs]. * * See [ExpressionWidgetMetric] and [Metric] to create instances that can be added to * [MetricWidgetArgs.metrics]. */ export interface WidgetMetric { /** For internal use only. Only intended to be called by [MetricWidget]. */ addWidgetJson(metrics: wjson.MetricJson[]): void; } /** * Used to pass math or search expressions to a [MetricWidget]. * * See https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/using-metric-math.html and * https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/using-search-expressions.html for * more details. */ export class ExpressionWidgetMetric implements WidgetMetric { /** * @param expression The math expression or search expression. * @param label The label to display in the graph to represent this time series. * @param id The id of this time series. This id can be used as part of a math expression. */ constructor(private readonly expression: pulumi.Input<string>, private readonly label?: pulumi.Input<string>, private readonly id?: pulumi.Input<string>) { } /** For internal use only. */ addWidgetJson(metrics: wjson.MetricJson[]): void { const json: wjson.ExpressionMetricJson = [{ expression: this.expression, label: this.label, id: this.id, }]; metrics.push(json); } }
the_stack
import {spellInfo} from './spellToWord'; import {_warn, isCnChar, has} from './util'; import defultDict from './dict/spell-default.json'; import {AllArgs, ICnChar, TypeProp, ToneType, SpellArg, StrokeArg, TypeValueObject} from 'cnchar-types/main/index'; import {Json, ITransformReturn} from 'cnchar-types/main/common'; import {TSpellArg, IDealUpLowFirst, IRemoveTone, IFunc, ICheckArgs, ITransformTone} from 'cnchar-types/main/tool'; const defDict = defultDict as Json<string>; export const tones: string = 'āáǎàōóǒòēéěèīíǐìūúǔùǖǘǚǜ*ńňǹ'; // * 表示n的一声 const noTones: string = 'aoeiuün'; export const arg: TSpellArg = { array: 'array', low: 'low', up: 'up', first: 'first', poly: 'poly', tone: 'tone', simple: 'simple', trad: 'trad', }; let _cnchar: ICnChar; export function initCnchar (cnchar: ICnChar): void { _cnchar = cnchar; } const NOT_CNCHAR: string = 'NOT_CNCHAR'; export function spell (dict: Json<string>, originArgs: Array<string>): string | Array<string> { const strs = originArgs[0].split(''); const args = (originArgs.splice(1)) as Array<SpellArg>; checkArgs('spell', args); const poly = has(args, arg.poly); const tone = has(args, arg.tone); const res: Array<Array<string>> = []; for (const sp in dict) { // 遍历拼音 const ds: string = dict[sp]; // 某个拼音的所有汉字字符串 const pos = parseInt(ds[0]); // 某个拼音的音调位置 for (let i = 0; i < strs.length; i++) { // 遍历字符数组 const ch: string = strs[i]; if (isCnChar(ch)) { // 如果是汉字 let index = ds.indexOf(ch); if (index !== -1) { const ssp = getSpell(sp, ds, index, poly, tone, pos); // single spell if (ssp.poly) { // 多音字模式 且 当前汉字是多音字 if (!res[i]) { res[i] = []; } res[i].push(ssp.res); let dsNew = ds; const n = (dsNew.match(new RegExp(ch, 'g')) as Array<string>).length; for (let k = 1; k < n; k++) { dsNew = dsNew.substr(index + 2); index = dsNew.indexOf(ch); res[i].push(getSpell(sp, dsNew, index, poly, tone, pos).res); } } else { if (ssp.isPolyWord) { // 是多音字 不是多音字模式 if (defDict[ch]) { // 设置了多音字的默认拼音 ssp.res = removeTone(defDict[ch], tone).spell; // 默认有音调 } } res[i] = [ssp.res]; strs[i] = ''; } } } else if (ch !== '') { // 如果不是汉字 res[i] = [NOT_CNCHAR, ch]; } } } dealUpLowFirst(res, args); // 从res中读取数据 const result: Array<string> = []; for (let i = 0; i < strs.length; i++) { const item = res[i]; if (typeof item === 'undefined') { result[i] = strs[i]; // 查不到的汉字返回原字 } else if (item.length > 1) { if (item[0] === NOT_CNCHAR) { result[i] = item[1]; // 非汉字返回原字符 } else { result[i] = `(${res[i].join('|')})`; } } else { result[i] = item[0]; } } if (!has(args, arg.array)) { return result.join(''); } return result; } export const dealUpLowFirst: IDealUpLowFirst = ( res: Array<Array<string>> | Array<string>, args: Array<SpellArg> ): void => { if (_cnchar._.poly) { dealResCase(res, low); // 当启用了 多音词时 需要强制默认小写 // 因为会被覆盖 } if (has(args, arg.first)) { dealResCase(res, first); } if (has(args, arg.up)) { dealResCase(res, up); } else if (!has(args, arg.low)) { dealResCase(res, upFirst); } }; function dealResCase ( res: Array<Array<string>> | Array<string>, func:(str: string) => string ): void { res.forEach((item: Array<string> | string, index: number) => { if (typeof item !== 'string') { if (item[0] !== NOT_CNCHAR) { item.forEach((s, j) => {item[j] = func(s);}); } } else { res[index] = func(item); } }); } function first (s: string): string { return s[0]; } function up (s: string): string { return s.toUpperCase(); } function upFirst (s: string): string { return up(s[0]) + s.substr(1); } function low (s: string): string { return s.toLowerCase(); } function getSpell ( spell: string, str: string, index: number, isPoly: boolean, isTone: boolean, pos: number ): { res: string, poly: boolean, isPolyWord: boolean } { let tone = parseInt(str[index + 1]); const res = {res: spell, poly: false, isPolyWord: (tone >= 5)}; if (!isPoly && !isTone) { return res; } if (res.isPolyWord) { // 是多音字 tone -= 5; // 正确的音调 if (isPoly) { // 既是多音字模式 又是 多音字 res.poly = true; } } if (isTone) { res.res = setTone(spell, pos, tone as ToneType); } return res; } // tone=false : 根据有音调的拼音获得无音调的拼音和音调 // tone=true : 返回原拼音 export const removeTone: IRemoveTone = (spell: string, tone: boolean): { spell: string, tone?: ToneType, index?: number } => { if (tone) { return {spell}; } for (let i = 0; i < spell.length; i++) { const index: number = tones.indexOf(spell[i]); if (index !== -1) { // 命中 return { spell: spell.substr(0, i) + noTones[Math.floor(index / 4)] + spell.substr(i + 1), tone: ((index % 4) + 1) as ToneType, index: i + 1 }; } } return {spell, tone: 0, index: -1}; }; function setTone (spell: string, index: number, tone: ToneType): string { if (tone === 0) { // 轻声 return spell; } const p = spell[index]; const toneP = tones[noTones.indexOf(p) * 4 + (tone - 1)]; if (p !== toneP) { return spell.replace(p, toneP); } return spell; } // 笔画数 export function stroke ( dict: Json<string>, originArgs: Array<string> ): number | Array<number> { const strs = originArgs[0].split(''); const strokes: Array<number> = []; const args = originArgs.splice(1) as Array<StrokeArg>; checkArgs('stroke', args); for (const i in dict) { for (let j = 0; j < strs.length; j++) { if (strs[j]) { if (dict[i].indexOf(strs[j] as string) !== -1) { strs[j] = ''; strokes[j] = parseInt(i); } } } } strs.forEach((c: string, i: number): void => { if (c) {strokes[i] = 0;} }); if (!has(args, arg.array as StrokeArg)) { return sumStroke(strokes); } return strokes; } export const sumStroke: IFunc<number, Array<number>> = (strs: Array<number>): number => { let sum: number = 0; strs.forEach(function (c) { sum += c; }); return sum; }; // spell 所有参数 ["array", "low", "up", "first", "poly", "tone", "simple"] // simple 禁用繁体字 // stroke 所有参数 ["letter", "shape", "count", "name", "detail", "array", "order", "simple"] // let _hasCheck: boolean = false; export const checkArgs: ICheckArgs = ( type: TypeProp, args: Array<AllArgs>, jumpNext?: boolean ): void => { if (!_cnchar.check) { return; } if (_hasCheck) { _hasCheck = false; return; } if (jumpNext) { _hasCheck = true; } const useless: Array<AllArgs> = []; for (let i = args.length - 1; i >= 0; i--) { const arg = args[i]; if (!(_cnchar.type[type] as TypeValueObject)[arg]) { useless.push(arg); args.splice(i, 1); } } const ignore: Array<AllArgs> = []; const redunt: Array<AllArgs> = []; const check = (name: AllArgs | Array<AllArgs>, arr: Array<AllArgs> = ignore): void => { if (name instanceof Array) { name.forEach((item) => { check(item, arr); }); return; } if (has(args, name)) { arr.push(name); } }; if (_cnchar.plugins.indexOf('trad') === -1) { if (has(args, 'simple')) ignore.push('simple'); if (has(args, 'trad')) ignore.push('trad'); } if (type === 'spell') { if (has(args, 'up') && has(args, 'low')) { ignore.push('low'); } // t.spell.origin 表示启用了多音词 // !has(args,'origin') 表示没有禁用多音词 // 此时的 poly 就会被忽略 // if(t.spell.origin && !has(args,'origin') && has(args,'poly')){ // ignore.push('poly'); // } } else if (type === 'stroke') { // stroke if (has(args, 'order')) { // 笔画顺序模式 check('array', redunt); // detail > shape > name > count > letter 默认值是 letter if (has(args, 'letter')) { check(['detail', 'shape', 'name', 'count']); check('letter', redunt); } else if (has(args, 'detail')) { check(['shape', 'name', 'count']); } else if (has(args, 'shape')) { check(['name', 'count']); } else if (has(args, 'name')) { check(['count']); } } else { // 笔画数模式 check(['detail', 'shape', 'name', 'letter']); check('count', redunt); } } else if (type === 'orderToWord') { if (has(args, 'match')) { check(['matchorder', 'contain', 'start']); } else if (has(args, 'matchorder')) { check(['contain', 'start']); } else if (has(args, 'contain')) { check(['start']); } } else if (type === 'strokeToWord') { } else if (type === 'spellToWord') { } else if (type === 'idiom') { if (has(args, 'spell')) { check(['stroke', 'char']); } else if (has(args, 'stroke')) { check(['tone', 'char']); } else { check(['tone']); } } else if (type === 'xhy') { } else if (type === 'radical') { } warnArgs(useless, '无效', type, args); warnArgs(ignore, '被忽略', type, args); warnArgs(redunt, '冗余', type, args); }; function warnArgs ( arr: Array<AllArgs>, txt: string, type: TypeProp, args: Array<AllArgs> ): void { if (arr.length > 0) { let mes: string = `以下参数${txt}:${JSON.stringify(arr)};`; if (txt === '被忽略' && type === 'stroke' && !has(args, 'order')) { mes += ' 要启用笔顺模式必须使用 order 参数'; } else { mes += ` 可选值:[${Object.keys((_cnchar.type[type] as TypeValueObject))}]`; } _warn(mes); } } // lv2 => lǘ export function shapeSpell (spell: string): string { const tones: string = '01234'; if (tones.indexOf(spell[spell.length - 1]) === -1) { return spell; } return transformTone(spell, true, 'low').spell; } // lv2 => {spell:'lü', tone: 2, index: 2, isTrans: true} // lǘ => {spell:'lü', tone: 2, index: 2, isTrans: false} // needTone = true: lv2 => {spell:'lǘ', tone: 2, index: 2, isTrans: true} export const transformTone: ITransformTone = ( spell: string, needTone: boolean = false, type: 'low' | 'up' = 'low' ): ITransformReturn => { if (spell.indexOf('v') !== -1) { spell = spell.replace('v', 'ü'); } const lastStr: string = spell[spell.length - 1]; let tone: ToneType; let index: number = -1; let isTrans: boolean = false; if (parseInt(lastStr).toString() === lastStr) { // lv2 spell = spell.substr(0, spell.length - 1); const info = spellInfo(spell); index = info.index; tone = parseInt(lastStr) as ToneType; isTrans = true; if (needTone) { spell = setTone(spell, index - 1, tone); } } else { // lǘ const info = spellInfo(spell); index = info.index; tone = info.tone; // 声调已经带好了的情况 if (!needTone && tone !== 0) { // 需要去除音调并且有音调 spell = info.spell; } } if (type === 'low') { spell = spell.toLowerCase(); } else if (type === 'up') { spell = spell.toUpperCase(); } return {spell, tone, index, isTrans}; };
the_stack
import { Column, Header, HeaderGroup, TableGenerics, TableInstance, } from '../types' import { memo } from '../utils' import { TableFeature } from './instance' export type CoreHeaderGroup<TGenerics extends TableGenerics> = { id: string depth: number headers: Header<TGenerics>[] } export type CoreHeader<TGenerics extends TableGenerics> = { id: string index: number depth: number column: Column<TGenerics> headerGroup: HeaderGroup<TGenerics> subHeaders: Header<TGenerics>[] colSpan: number rowSpan: number getLeafHeaders: () => Header<TGenerics>[] isPlaceholder: boolean placeholderId?: string renderHeader: (options?: { renderPlaceholder?: boolean }) => string | null | TGenerics['Rendered'] renderFooter: (options?: { renderPlaceholder?: boolean }) => string | null | TGenerics['Rendered'] } export type HeadersInstance<TGenerics extends TableGenerics> = { getHeaderGroups: () => HeaderGroup<TGenerics>[] getLeftHeaderGroups: () => HeaderGroup<TGenerics>[] getCenterHeaderGroups: () => HeaderGroup<TGenerics>[] getRightHeaderGroups: () => HeaderGroup<TGenerics>[] getFooterGroups: () => HeaderGroup<TGenerics>[] getLeftFooterGroups: () => HeaderGroup<TGenerics>[] getCenterFooterGroups: () => HeaderGroup<TGenerics>[] getRightFooterGroups: () => HeaderGroup<TGenerics>[] getFlatHeaders: () => Header<TGenerics>[] getLeftFlatHeaders: () => Header<TGenerics>[] getCenterFlatHeaders: () => Header<TGenerics>[] getRightFlatHeaders: () => Header<TGenerics>[] getLeafHeaders: () => Header<TGenerics>[] getLeftLeafHeaders: () => Header<TGenerics>[] getCenterLeafHeaders: () => Header<TGenerics>[] getRightLeafHeaders: () => Header<TGenerics>[] } // function createHeader<TGenerics extends TableGenerics>( instance: TableInstance<TGenerics>, column: Column<TGenerics>, options: { id?: string isPlaceholder?: boolean placeholderId?: string index: number depth: number } ) { const id = options.id ?? column.id let header: CoreHeader<TGenerics> = { id, column, index: options.index, isPlaceholder: !!options.isPlaceholder, placeholderId: options.placeholderId, depth: options.depth, subHeaders: [], colSpan: 0, rowSpan: 0, headerGroup: null!, getLeafHeaders: (): Header<TGenerics>[] => { const leafHeaders: CoreHeader<TGenerics>[] = [] const recurseHeader = (h: CoreHeader<TGenerics>) => { if (h.subHeaders && h.subHeaders.length) { h.subHeaders.map(recurseHeader) } leafHeaders.push(h) } recurseHeader(header) return leafHeaders as Header<TGenerics>[] }, renderHeader: () => column.columnDef.header ? instance._render(column.columnDef.header, { instance, header: header as Header<TGenerics>, column, }) : null, renderFooter: () => column.columnDef.footer ? instance._render(column.columnDef.footer, { instance, header: header as Header<TGenerics>, column, }) : null, } instance._features.forEach(feature => { Object.assign(header, feature.createHeader?.(header, instance)) }) return header as Header<TGenerics> } export const Headers: TableFeature = { createInstance: <TGenerics extends TableGenerics>( instance: TableInstance<TGenerics> ): HeadersInstance<TGenerics> => { return { // Header Groups getHeaderGroups: memo( () => [ instance.getAllColumns(), instance.getVisibleLeafColumns(), instance.getState().columnPinning.left, instance.getState().columnPinning.right, ], (allColumns, leafColumns, left, right) => { const leftColumns = left ?.map(columnId => leafColumns.find(d => d.id === columnId)!) .filter(Boolean) ?? [] const rightColumns = right ?.map(columnId => leafColumns.find(d => d.id === columnId)!) .filter(Boolean) ?? [] const centerColumns = leafColumns.filter( column => !left?.includes(column.id) && !right?.includes(column.id) ) const headerGroups = buildHeaderGroups( allColumns, [...leftColumns, ...centerColumns, ...rightColumns], instance ) return headerGroups }, { key: process.env.NODE_ENV === 'development' && 'getHeaderGroups', debug: () => instance.options.debugAll ?? instance.options.debugHeaders, } ), getCenterHeaderGroups: memo( () => [ instance.getAllColumns(), instance.getVisibleLeafColumns(), instance.getState().columnPinning.left, instance.getState().columnPinning.right, ], (allColumns, leafColumns, left, right) => { leafColumns = leafColumns.filter( column => !left?.includes(column.id) && !right?.includes(column.id) ) return buildHeaderGroups(allColumns, leafColumns, instance, 'center') }, { key: process.env.NODE_ENV === 'development' && 'getCenterHeaderGroups', debug: () => instance.options.debugAll ?? instance.options.debugHeaders, } ), getLeftHeaderGroups: memo( () => [ instance.getAllColumns(), instance.getVisibleLeafColumns(), instance.getState().columnPinning.left, ], (allColumns, leafColumns, left) => { const orderedLeafColumns = left ?.map(columnId => leafColumns.find(d => d.id === columnId)!) .filter(Boolean) ?? [] return buildHeaderGroups( allColumns, orderedLeafColumns, instance, 'left' ) }, { key: process.env.NODE_ENV === 'development' && 'getLeftHeaderGroups', debug: () => instance.options.debugAll ?? instance.options.debugHeaders, } ), getRightHeaderGroups: memo( () => [ instance.getAllColumns(), instance.getVisibleLeafColumns(), instance.getState().columnPinning.right, ], (allColumns, leafColumns, right) => { const orderedLeafColumns = right ?.map(columnId => leafColumns.find(d => d.id === columnId)!) .filter(Boolean) ?? [] return buildHeaderGroups( allColumns, orderedLeafColumns, instance, 'right' ) }, { key: process.env.NODE_ENV === 'development' && 'getRightHeaderGroups', debug: () => instance.options.debugAll ?? instance.options.debugHeaders, } ), // Footer Groups getFooterGroups: memo( () => [instance.getHeaderGroups()], headerGroups => { return [...headerGroups].reverse() }, { key: process.env.NODE_ENV === 'development' && 'getFooterGroups', debug: () => instance.options.debugAll ?? instance.options.debugHeaders, } ), getLeftFooterGroups: memo( () => [instance.getLeftHeaderGroups()], headerGroups => { return [...headerGroups].reverse() }, { key: process.env.NODE_ENV === 'development' && 'getLeftFooterGroups', debug: () => instance.options.debugAll ?? instance.options.debugHeaders, } ), getCenterFooterGroups: memo( () => [instance.getCenterHeaderGroups()], headerGroups => { return [...headerGroups].reverse() }, { key: process.env.NODE_ENV === 'development' && 'getCenterFooterGroups', debug: () => instance.options.debugAll ?? instance.options.debugHeaders, } ), getRightFooterGroups: memo( () => [instance.getRightHeaderGroups()], headerGroups => { return [...headerGroups].reverse() }, { key: process.env.NODE_ENV === 'development' && 'getRightFooterGroups', debug: () => instance.options.debugAll ?? instance.options.debugHeaders, } ), // Flat Headers getFlatHeaders: memo( () => [instance.getHeaderGroups()], headerGroups => { return headerGroups .map(headerGroup => { return headerGroup.headers }) .flat() }, { key: process.env.NODE_ENV === 'development' && 'getFlatHeaders', debug: () => instance.options.debugAll ?? instance.options.debugHeaders, } ), getLeftFlatHeaders: memo( () => [instance.getLeftHeaderGroups()], left => { return left .map(headerGroup => { return headerGroup.headers }) .flat() }, { key: process.env.NODE_ENV === 'development' && 'getLeftFlatHeaders', debug: () => instance.options.debugAll ?? instance.options.debugHeaders, } ), getCenterFlatHeaders: memo( () => [instance.getCenterHeaderGroups()], left => { return left .map(headerGroup => { return headerGroup.headers }) .flat() }, { key: process.env.NODE_ENV === 'development' && 'getCenterFlatHeaders', debug: () => instance.options.debugAll ?? instance.options.debugHeaders, } ), getRightFlatHeaders: memo( () => [instance.getRightHeaderGroups()], left => { return left .map(headerGroup => { return headerGroup.headers }) .flat() }, { key: process.env.NODE_ENV === 'development' && 'getRightFlatHeaders', debug: () => instance.options.debugAll ?? instance.options.debugHeaders, } ), // Leaf Headers getCenterLeafHeaders: memo( () => [instance.getCenterFlatHeaders()], flatHeaders => { return flatHeaders.filter(header => !header.subHeaders?.length) }, { key: process.env.NODE_ENV === 'development' && 'getCenterLeafHeaders', debug: () => instance.options.debugAll ?? instance.options.debugHeaders, } ), getLeftLeafHeaders: memo( () => [instance.getLeftFlatHeaders()], flatHeaders => { return flatHeaders.filter(header => !header.subHeaders?.length) }, { key: process.env.NODE_ENV === 'development' && 'getLeftLeafHeaders', debug: () => instance.options.debugAll ?? instance.options.debugHeaders, } ), getRightLeafHeaders: memo( () => [instance.getRightFlatHeaders()], flatHeaders => { return flatHeaders.filter(header => !header.subHeaders?.length) }, { key: process.env.NODE_ENV === 'development' && 'getRightLeafHeaders', debug: () => instance.options.debugAll ?? instance.options.debugHeaders, } ), getLeafHeaders: memo( () => [ instance.getLeftHeaderGroups(), instance.getCenterHeaderGroups(), instance.getRightHeaderGroups(), ], (left, center, right) => { return [ ...(left[0]?.headers ?? []), ...(center[0]?.headers ?? []), ...(right[0]?.headers ?? []), ] .map(header => { return header.getLeafHeaders() }) .flat() }, { key: process.env.NODE_ENV === 'development' && 'getLeafHeaders', debug: () => instance.options.debugAll ?? instance.options.debugHeaders, } ), } }, } export function buildHeaderGroups<TGenerics extends TableGenerics>( allColumns: Column<TGenerics>[], columnsToGroup: Column<TGenerics>[], instance: TableInstance<TGenerics>, headerFamily?: 'center' | 'left' | 'right' ) { // Find the max depth of the columns: // build the leaf column row // build each buffer row going up // placeholder for non-existent level // real column for existing level let maxDepth = 0 const findMaxDepth = (columns: Column<TGenerics>[], depth = 1) => { maxDepth = Math.max(maxDepth, depth) columns .filter(column => column.getIsVisible()) .forEach(column => { if (column.columns?.length) { findMaxDepth(column.columns, depth + 1) } }, 0) } findMaxDepth(allColumns) let headerGroups: HeaderGroup<TGenerics>[] = [] const createHeaderGroup = ( headersToGroup: Header<TGenerics>[], depth: number ) => { // The header group we are creating const headerGroup: HeaderGroup<TGenerics> = { depth, id: [headerFamily, `${depth}`].filter(Boolean).join('_'), headers: [], } // The parent columns we're going to scan next const pendingParentHeaders: Header<TGenerics>[] = [] // Scan each column for parents headersToGroup.forEach(headerToGroup => { // What is the latest (last) parent column? const latestPendingParentHeader = [...pendingParentHeaders].reverse()[0] const isLeafHeader = headerToGroup.column.depth === headerGroup.depth let column: Column<TGenerics> let isPlaceholder = false if (isLeafHeader && headerToGroup.column.parent) { // The parent header is new column = headerToGroup.column.parent } else { // The parent header is repeated column = headerToGroup.column isPlaceholder = true } if (latestPendingParentHeader?.column === column) { // This column is repeated. Add it as a sub header to the next batch latestPendingParentHeader.subHeaders.push(headerToGroup) } else { // This is a new header. Let's create it const header = createHeader(instance, column, { id: [headerFamily, depth, column.id, headerToGroup?.id] .filter(Boolean) .join('_'), isPlaceholder, placeholderId: isPlaceholder ? `${pendingParentHeaders.filter(d => d.column === column).length}` : undefined, depth, index: pendingParentHeaders.length, }) // Add the headerToGroup as a subHeader of the new header header.subHeaders.push(headerToGroup) // Add the new header to the pendingParentHeaders to get grouped // in the next batch pendingParentHeaders.push(header) } headerGroup.headers.push(headerToGroup) headerToGroup.headerGroup = headerGroup }) headerGroups.push(headerGroup) if (depth > 0) { createHeaderGroup(pendingParentHeaders, depth - 1) } } const bottomHeaders = columnsToGroup.map((column, index) => createHeader(instance, column, { depth: maxDepth, index, }) ) createHeaderGroup(bottomHeaders, maxDepth - 1) headerGroups.reverse() // headerGroups = headerGroups.filter(headerGroup => { // return !headerGroup.headers.every(header => header.isPlaceholder) // }) const recurseHeadersForSpans = ( headers: Header<TGenerics>[] ): { colSpan: number; rowSpan: number }[] => { const filteredHeaders = headers.filter(header => header.column.getIsVisible() ) return filteredHeaders.map(header => { let colSpan = 0 let rowSpan = 0 let childRowSpans = [0] if (header.subHeaders && header.subHeaders.length) { childRowSpans = [] recurseHeadersForSpans(header.subHeaders).forEach( ({ colSpan: childColSpan, rowSpan: childRowSpan }) => { colSpan += childColSpan childRowSpans.push(childRowSpan) } ) } else { colSpan = 1 } const minChildRowSpan = Math.min(...childRowSpans) rowSpan = rowSpan + minChildRowSpan header.colSpan = colSpan header.rowSpan = rowSpan return { colSpan, rowSpan } }) } recurseHeadersForSpans(headerGroups[0]?.headers ?? []) return headerGroups }
the_stack
import EventEmitter from "events"; import Long from "long"; import * as dbadapters from "df/api/dbadapters"; import { Flags } from "df/common/flags"; import { retry } from "df/common/promises"; import { deepClone, equals } from "df/common/protos"; import { JSONObjectStringifier, StringifiedMap, StringifiedSet } from "df/common/strings/stringifier"; import { dataform } from "df/protos/ts"; const CANCEL_EVENT = "jobCancel"; const flags = { runnerNotificationPeriodMillis: Flags.number("runner-notification-period-millis", 5000) }; const isSuccessfulAction = (actionResult: dataform.IActionResult) => actionResult.status === dataform.ActionResult.ExecutionStatus.SUCCESSFUL || actionResult.status === dataform.ActionResult.ExecutionStatus.CACHE_SKIPPED || actionResult.status === dataform.ActionResult.ExecutionStatus.DISABLED; export interface IExecutedAction { executionAction: dataform.IExecutionAction; actionResult: dataform.IActionResult; } export function run( dbadapter: dbadapters.IDbAdapter, graph: dataform.IExecutionGraph, partiallyExecutedRunResult: dataform.IRunResult = {}, previouslyExecutedActions: IExecutedAction[] = [] ): Runner { return new Runner( dbadapter, graph, partiallyExecutedRunResult, previouslyExecutedActions ).execute(); } export class Runner { private readonly warehouseStateByTarget: StringifiedMap< dataform.ITarget, dataform.ITableMetadata >; private readonly nonTableDeclarationTargets: StringifiedSet<dataform.ITarget>; private readonly previouslyExecutedActions: StringifiedMap<dataform.ITarget, IExecutedAction>; private readonly allActionNames: Set<string>; private readonly runResult: dataform.IRunResult; private readonly changeListeners: Array<(graph: dataform.IRunResult) => void> = []; private readonly eEmitter: EventEmitter; private executedActionNames: Set<string>; private successfullyExecutedActionNames: Set<string>; private pendingActions: dataform.IExecutionAction[]; private lastNotificationTimestampMillis = 0; private stopped = false; private cancelled = false; private timeout: NodeJS.Timer; private timedOut = false; private executionTask: Promise<dataform.IRunResult>; constructor( private readonly dbadapter: dbadapters.IDbAdapter, private readonly graph: dataform.IExecutionGraph, partiallyExecutedRunResult: dataform.IRunResult = {}, previouslyExecutedActions: IExecutedAction[] = [] ) { this.allActionNames = new Set<string>(graph.actions.map(action => action.name)); this.runResult = { actions: [], ...partiallyExecutedRunResult }; this.warehouseStateByTarget = new StringifiedMap( JSONObjectStringifier.create(), graph.warehouseState.tables?.map(tableMetadata => [tableMetadata.target, tableMetadata]) ); this.nonTableDeclarationTargets = new StringifiedSet<dataform.ITarget>( JSONObjectStringifier.create(), graph.declarationTargets.filter( declarationTarget => this.warehouseStateByTarget.get(declarationTarget)?.type !== dataform.TableMetadata.Type.TABLE ) ); this.previouslyExecutedActions = new StringifiedMap( JSONObjectStringifier.create(), previouslyExecutedActions.map(executedAction => [ executedAction.executionAction.target, executedAction ]) ); this.executedActionNames = new Set( this.runResult.actions .filter(action => action.status !== dataform.ActionResult.ExecutionStatus.RUNNING) .map(action => action.name) ); this.successfullyExecutedActionNames = new Set( this.runResult.actions.filter(isSuccessfulAction).map(action => action.name) ); this.pendingActions = graph.actions.filter( action => !this.executedActionNames.has(action.name) ); this.eEmitter = new EventEmitter(); // There could feasibly be thousands of listeners to this, 0 makes the limit infinite. this.eEmitter.setMaxListeners(0); } public onChange(listener: (graph: dataform.IRunResult) => void): Runner { this.changeListeners.push(listener); return this; } public execute(): this { if (!!this.executionTask) { throw new Error("Executor already started."); } this.executionTask = this.executeGraph(); if (!!this.graph.runConfig && !!this.graph.runConfig.timeoutMillis) { const now = Date.now(); const runStartMillis = this.runResult.timing?.startTimeMillis?.toNumber?.() || now; const elapsedTimeMillis = now - runStartMillis; const timeoutMillis = this.graph.runConfig.timeoutMillis - elapsedTimeMillis; this.timeout = setTimeout(() => { this.timedOut = true; this.cancel(); }, timeoutMillis); } return this; } public stop() { this.stopped = true; } public cancel() { this.cancelled = true; this.eEmitter.emit(CANCEL_EVENT); } public async result(): Promise<dataform.IRunResult> { try { return await this.executionTask; } finally { if (!!this.timeout) { clearTimeout(this.timeout); } } } private notifyListeners() { if ( Date.now() - flags.runnerNotificationPeriodMillis.get() < this.lastNotificationTimestampMillis ) { return; } const runResultClone = deepClone(dataform.RunResult, this.runResult); this.lastNotificationTimestampMillis = Date.now(); this.changeListeners.forEach(listener => listener(runResultClone)); } private async executeGraph() { const timer = Timer.start(this.runResult.timing); this.runResult.status = dataform.RunResult.ExecutionStatus.RUNNING; this.runResult.timing = timer.current(); this.notifyListeners(); // If we're not resuming an existing run, prepare schemas. if (this.runResult.actions.length === 0) { await this.prepareAllSchemas(); } // Recursively execute all actions as they become executable. await this.executeAllActionsReadyForExecution(); if (this.stopped) { return this.runResult; } this.runResult.timing = timer.end(); this.runResult.status = dataform.RunResult.ExecutionStatus.SUCCESSFUL; if (this.timedOut) { this.runResult.status = dataform.RunResult.ExecutionStatus.TIMED_OUT; } else if (this.cancelled) { this.runResult.status = dataform.RunResult.ExecutionStatus.CANCELLED; } else if ( this.runResult.actions.some( action => action.status === dataform.ActionResult.ExecutionStatus.FAILED ) ) { this.runResult.status = dataform.RunResult.ExecutionStatus.FAILED; } return this.runResult; } private async prepareAllSchemas() { // Work out all the schemas we are going to need to create first. const databaseSchemas = new Map<string, Set<string>>(); this.graph.actions .filter(action => !!action.target && !!action.target.schema) .forEach(({ target }) => { // This field may not be present for older versions of dataform. const trueDatabase = target.database || this.graph.projectConfig.defaultDatabase; if (!databaseSchemas.has(trueDatabase)) { databaseSchemas.set(trueDatabase, new Set<string>()); } databaseSchemas.get(trueDatabase).add(target.schema); }); // Create all nonexistent schemas. await Promise.all( Array.from(databaseSchemas.entries()).map(async ([database, schemas]) => { const existingSchemas = new Set(await this.dbadapter.schemas(database)); await Promise.all( Array.from(schemas) .filter(schema => !existingSchemas.has(schema)) .map(schema => this.dbadapter.createSchema(database, schema)) ); }) ); } private async executeAllActionsReadyForExecution() { if (this.stopped) { return; } // If the run has been cancelled, cancel all pending actions. if (this.cancelled) { const allPendingActions = this.pendingActions; this.pendingActions = []; allPendingActions.forEach(pendingAction => this.runResult.actions.push({ name: pendingAction.name, target: pendingAction.target, status: dataform.ActionResult.ExecutionStatus.SKIPPED, tasks: pendingAction.tasks.map(() => ({ status: dataform.TaskResult.ExecutionStatus.SKIPPED })) }) ); this.notifyListeners(); return; } const executableActions = []; const skippableActions = []; const stillPendingActions = []; for (const pendingAction of this.pendingActions) { if ( // An action is executable if all dependencies either: do not exist in the graph, or // have executed successfully. pendingAction.dependencies.every( dependency => !this.allActionNames.has(dependency) || this.successfullyExecutedActionNames.has(dependency) ) ) { executableActions.push(pendingAction); } else if ( // An action is skippable if it is not executable and all dependencies either: do not // exist in the graph, or have completed execution. pendingAction.dependencies.every( dependency => !this.allActionNames.has(dependency) || this.executedActionNames.has(dependency) ) ) { skippableActions.push(pendingAction); } else { // Otherwise, the action is still pending. stillPendingActions.push(pendingAction); } } this.pendingActions = stillPendingActions; await Promise.all([ (async () => { skippableActions.forEach(skippableAction => { this.runResult.actions.push({ name: skippableAction.name, target: skippableAction.target, status: dataform.ActionResult.ExecutionStatus.SKIPPED, tasks: skippableAction.tasks.map(() => ({ status: dataform.TaskResult.ExecutionStatus.SKIPPED })) }); }); if (skippableActions.length > 0) { this.notifyListeners(); await this.executeAllActionsReadyForExecution(); } })(), Promise.all( executableActions.map(async executableAction => { const actionResult = await this.executeAction(executableAction); this.executedActionNames.add(executableAction.name); if (isSuccessfulAction(actionResult)) { this.successfullyExecutedActionNames.add(executableAction.name); } await this.executeAllActionsReadyForExecution(); }) ) ]); } private async executeAction(action: dataform.IExecutionAction): Promise<dataform.IActionResult> { let actionResult: dataform.IActionResult = { name: action.name, target: action.target, tasks: [], inputs: action.transitiveInputs.map(target => ({ target, metadata: this.warehouseStateByTarget.has(target) ? { lastModifiedTimestampMillis: this.warehouseStateByTarget.get(target).lastUpdatedMillis } : null })) }; if (action.tasks.length === 0) { actionResult.status = dataform.ActionResult.ExecutionStatus.DISABLED; this.runResult.actions.push(actionResult); this.notifyListeners(); return actionResult; } if (this.shouldCacheSkip(action)) { actionResult.status = dataform.ActionResult.ExecutionStatus.CACHE_SKIPPED; this.runResult.actions.push(actionResult); this.notifyListeners(); return actionResult; } const resumedActionResult = this.runResult.actions.find( existingActionResult => existingActionResult.name === action.name ); if (resumedActionResult) { actionResult = resumedActionResult; } else { this.runResult.actions.push(actionResult); } actionResult.status = dataform.ActionResult.ExecutionStatus.RUNNING; const timer = Timer.start(resumedActionResult?.timing); actionResult.timing = timer.current(); this.notifyListeners(); await this.dbadapter.withClientLock(async client => { // Start running tasks from the last executed task (if any), onwards. for (const task of action.tasks.slice(actionResult.tasks.length)) { if (this.stopped) { return actionResult; } if ( actionResult.status === dataform.ActionResult.ExecutionStatus.RUNNING && !this.cancelled ) { const taskStatus = await this.executeTask(client, task, actionResult, { bigquery: { labels: action.actionDescriptor?.bigqueryLabels } }); if (taskStatus === dataform.TaskResult.ExecutionStatus.FAILED) { actionResult.status = dataform.ActionResult.ExecutionStatus.FAILED; } else if (taskStatus === dataform.TaskResult.ExecutionStatus.CANCELLED) { actionResult.status = dataform.ActionResult.ExecutionStatus.CANCELLED; } } else { actionResult.tasks.push({ status: dataform.TaskResult.ExecutionStatus.SKIPPED }); } } }); if (this.stopped) { return actionResult; } if ( action.actionDescriptor && // Only set metadata if we expect the action to complete in SUCCESSFUL state // (i.e. it must still be RUNNING, and not FAILED). actionResult.status === dataform.ActionResult.ExecutionStatus.RUNNING && !(this.graph.runConfig && this.graph.runConfig.disableSetMetadata) && action.type === "table" && action.tableType !== "inline" ) { await this.dbadapter.setMetadata(action); } let newMetadata: dataform.ITableMetadata; if (this.graph.projectConfig.useRunCache) { try { newMetadata = await this.dbadapter.table(action.target); } catch (e) { // Ignore Errors thrown when trying to get new table metadata; just allow the relevant // warehouseStateAfterRunByTarget entry to be cleared out (below). } } if (newMetadata) { this.warehouseStateByTarget.set(action.target, newMetadata); actionResult.postExecutionTimestampMillis = newMetadata.lastUpdatedMillis; this.notifyListeners(); } else { this.warehouseStateByTarget.delete(action.target); } if (actionResult.status === dataform.ActionResult.ExecutionStatus.RUNNING) { actionResult.status = dataform.ActionResult.ExecutionStatus.SUCCESSFUL; } actionResult.timing = timer.end(); this.notifyListeners(); return actionResult; } private async executeTask( client: dbadapters.IDbClient, task: dataform.IExecutionTask, parentAction: dataform.IActionResult, options: { bigquery: { labels: { [label: string]: string } } } ): Promise<dataform.TaskResult.ExecutionStatus> { const timer = Timer.start(); const taskResult: dataform.ITaskResult = { status: dataform.TaskResult.ExecutionStatus.RUNNING, timing: timer.current(), metadata: {} }; parentAction.tasks.push(taskResult); this.notifyListeners(); try { // Retry this function a given number of times, configurable by user const { rows, metadata } = await retry( () => client.execute(task.statement, { onCancel: handleCancel => this.eEmitter.on(CANCEL_EVENT, handleCancel), rowLimit: 1, bigquery: options.bigquery }), task.type === "operation" ? 1 : this.graph.projectConfig.idempotentActionRetries + 1 || 1 ); taskResult.metadata = metadata; if (task.type === "assertion") { // We expect that an assertion query returns 1 row, with 1 field that is the row count. // We don't really care what that field/column is called. const rowCount = rows[0][Object.keys(rows[0])[0]]; if (rowCount > 0) { throw new Error(`Assertion failed: query returned ${rowCount} row(s).`); } } taskResult.status = dataform.TaskResult.ExecutionStatus.SUCCESSFUL; } catch (e) { taskResult.status = this.cancelled ? dataform.TaskResult.ExecutionStatus.CANCELLED : dataform.TaskResult.ExecutionStatus.FAILED; taskResult.errorMessage = `${this.graph.projectConfig.warehouse} error: ${e.message}`; } taskResult.timing = timer.end(); this.notifyListeners(); return taskResult.status; } private shouldCacheSkip(executionAction: dataform.IExecutionAction): boolean { // Run caching must be turned on. if (!this.graph.runConfig?.useRunCache) { return false; } // If the action is non-hermetic, always run it. if (executionAction.hermeticity === dataform.ActionHermeticity.NON_HERMETIC) { return false; } // This action must have been executed successfully before, and the previous ExecutionAction // must be equal to this one. if (!this.previouslyExecutedActions.has(executionAction.target)) { return false; } const previouslyExecutedAction = this.previouslyExecutedActions.get(executionAction.target); if ( previouslyExecutedAction.actionResult.status !== dataform.ActionResult.ExecutionStatus.SUCCESSFUL ) { return false; } if ( !equals(dataform.ExecutionAction, previouslyExecutedAction.executionAction, executionAction) ) { return false; } // The target table for this action must exist, and the table metadata's last update timestamp must match // the timestamp recorded after the most recent execution. if (!this.warehouseStateByTarget.has(executionAction.target)) { return false; } if ( this.warehouseStateByTarget.get(executionAction.target).lastUpdatedMillis.equals(0) || previouslyExecutedAction.actionResult.postExecutionTimestampMillis.equals(0) || this.warehouseStateByTarget .get(executionAction.target) .lastUpdatedMillis.notEquals( previouslyExecutedAction.actionResult.postExecutionTimestampMillis ) ) { return false; } const previousInputTimestamps = new StringifiedMap( JSONObjectStringifier.create<dataform.ITarget>(), previouslyExecutedAction.actionResult.inputs .filter(input => !!input.metadata) .map(input => [input.target, input.metadata.lastModifiedTimestampMillis]) ); for (const transitiveInput of executionAction.transitiveInputs) { // No transitive input can be a non-table declaration (because we don't know anything about the // data upstream of that non-table). if (this.nonTableDeclarationTargets.has(transitiveInput)) { return false; } // All transitive inputs' last change timestamps must match the corresponding timestamps stored // in persisted state. if (!previousInputTimestamps.has(transitiveInput)) { return false; } if (!this.warehouseStateByTarget.has(transitiveInput)) { return false; } const inputWarehouseState = this.warehouseStateByTarget.get(transitiveInput); if ( this.warehouseStateByTarget.get(transitiveInput).lastUpdatedMillis.equals(0) || previousInputTimestamps.get(transitiveInput).equals(0) || inputWarehouseState.lastUpdatedMillis.notEquals( previousInputTimestamps.get(transitiveInput) ) || // If the input has a streaming buffer, we cannot trust its last-updated timestamp. inputWarehouseState.bigquery?.hasStreamingBuffer ) { return false; } } return true; } } class Timer { public static start(existingTiming?: dataform.ITiming) { return new Timer(existingTiming?.startTimeMillis.toNumber() || new Date().valueOf()); } private constructor(readonly startTimeMillis: number) {} public current(): dataform.ITiming { return { startTimeMillis: Long.fromNumber(this.startTimeMillis) }; } public end(): dataform.ITiming { return { startTimeMillis: Long.fromNumber(this.startTimeMillis), endTimeMillis: Long.fromNumber(new Date().valueOf()) }; } }
the_stack
import { ChangeDetectionStrategy, ChangeDetectorRef, Component, EventEmitter, HostListener, Input, OnInit, Output, ViewChild, OnDestroy, } from '@angular/core'; import { DevToolsNode, ElementPosition, Events, MessageBus } from 'protocol'; import { FlatTreeControl } from '@angular/cdk/tree'; import { ComponentDataSource, FlatNode } from './component-data-source'; import { isChildOf, parentCollapsed } from './directive-forest-utils'; import { IndexedNode } from './index-forest'; import { CdkVirtualScrollViewport } from '@angular/cdk/scrolling'; import { TabUpdate } from '../../tab-update'; import { Subscription } from 'rxjs'; @Component({ selector: 'ng-directive-forest', templateUrl: './directive-forest.component.html', styleUrls: ['./directive-forest.component.scss'], changeDetection: ChangeDetectionStrategy.OnPush, }) export class DirectiveForestComponent implements OnInit, OnDestroy { @Input() set forest(forest: DevToolsNode[]) { this._latestForest = forest; const result = this._updateForest(forest); const changed = result.movedItems.length || result.newItems.length || result.removedItems.length; if (this.currentSelectedElement && changed) { this._reselectNodeOnUpdate(); } } @Input() currentSelectedElement: IndexedNode; @Input() set showCommentNodes(show: boolean) { this._showCommentNodes = show; this.forest = this._latestForest; } @Output() selectNode = new EventEmitter<IndexedNode | null>(); @Output() selectDomElement = new EventEmitter<IndexedNode>(); @Output() setParents = new EventEmitter<FlatNode[] | null>(); @Output() highlightComponent = new EventEmitter<ElementPosition>(); @Output() removeComponentHighlight = new EventEmitter<void>(); @Output() toggleInspector = new EventEmitter<void>(); @ViewChild(CdkVirtualScrollViewport) viewport: CdkVirtualScrollViewport; filterRegex = new RegExp('.^'); currentlyMatchedIndex = -1; selectedNode: FlatNode | null = null; parents: FlatNode[]; private _highlightIDinTreeFromElement: number | null = null; private _tabUpdateSubscription: Subscription; private _showCommentNodes = false; private _latestForest: DevToolsNode[]; set highlightIDinTreeFromElement(id: number | null) { this._highlightIDinTreeFromElement = id; this._cdr.markForCheck(); } readonly treeControl = new FlatTreeControl<FlatNode>( (node) => node.level, (node) => node.expandable ); readonly dataSource = new ComponentDataSource(this.treeControl); readonly itemHeight = 18; private _initialized = false; constructor( private _tabUpdate: TabUpdate, private _messageBus: MessageBus<Events>, private _cdr: ChangeDetectorRef ) {} ngOnInit(): void { this.subscribeToInspectorEvents(); this._tabUpdateSubscription = this._tabUpdate.tabUpdate$.subscribe(() => { if (this.viewport) { setTimeout(() => { this.viewport.scrollToIndex(0); this.viewport.checkViewportSize(); }); } }); } ngOnDestroy(): void { if (this._tabUpdateSubscription) { this._tabUpdateSubscription.unsubscribe(); } } subscribeToInspectorEvents(): void { this._messageBus.on('selectComponent', (id: number) => { this.selectNodeByComponentId(id); this.toggleInspector.emit(); this.expandParents(); }); this._messageBus.on('highlightComponent', (id: number) => { this.highlightIDinTreeFromElement = id; }); this._messageBus.on('removeComponentHighlight', () => { this.highlightIDinTreeFromElement = null; }); } selectNodeByComponentId(id: number): void { const foundNode = this.dataSource.data.find((node) => node.original.component?.id === id); if (foundNode) { this.handleSelect(foundNode); } } handleSelect(node: FlatNode): void { this.currentlyMatchedIndex = this.dataSource.data.findIndex((matchedNode) => matchedNode.id === node.id); this.selectAndEnsureVisible(node); } handleSelectDomElement(node: FlatNode): void { this.selectDomElement.emit(node.original); } selectAndEnsureVisible(node: FlatNode): void { this.select(node); const scrollParent = this.viewport.elementRef.nativeElement; // The top most point we see an element const top = scrollParent.scrollTop; // That's the bottom most point we currently see an element. const parentHeight = scrollParent.offsetHeight; const bottom = top + parentHeight; const idx = this.dataSource.expandedDataValues.findIndex((el) => el.id === node.id); // The node might be hidden. if (idx < 0) { return; } const itemTop = idx * this.itemHeight; if (itemTop < top) { scrollParent.scrollTo({ top: itemTop }); } else if (bottom < itemTop + this.itemHeight) { scrollParent.scrollTo({ top: itemTop - parentHeight + this.itemHeight }); } } select(node: FlatNode): void { this.populateParents(node.position); this.selectNode.emit(node.original); this.selectedNode = node; } clearSelectedNode(): void { this.selectNode.emit(null); this.selectedNode = null; this.parents = []; this.setParents.emit(null); } private _reselectNodeOnUpdate(): void { const nodeThatStillExists = this.dataSource.getFlatNodeFromIndexedNode(this.currentSelectedElement); if (nodeThatStillExists) { this.select(nodeThatStillExists); } else { this.clearSelectedNode(); } } private _updateForest( forest: DevToolsNode[] ): { newItems: FlatNode[]; movedItems: FlatNode[]; removedItems: FlatNode[] } { const result = this.dataSource.update(forest, this._showCommentNodes); if (!this._initialized && forest && forest.length) { this.treeControl.expandAll(); this._initialized = true; result.newItems.forEach((item) => (item.newItem = false)); } // We want to expand them once they are rendered. result.newItems.forEach((item) => { this.treeControl.expand(item); }); return result; } populateParents(position: ElementPosition): void { this.parents = []; for (let i = 1; i <= position.length; i++) { const current = position.slice(0, i); const selectedNode = this.dataSource.data.find((item) => item.position.toString() === current.toString()); // We might not be able to find the parent if the user has hidden the comment nodes. if (selectedNode) { this.parents.push(selectedNode); } } this.setParents.emit(this.parents); } @HostListener('document:keydown.ArrowUp', ['$event']) navigateUp(event: KeyboardEvent): void { if (this.isEditingDirectiveState(event)) { return; } event.preventDefault(); const data = this.dataSource.expandedDataValues; let prevIdx = data.findIndex((e) => this.selectedNode && e.id === this.selectedNode.id) - 1; if (prevIdx < 0) { return; } let prevNode = data[prevIdx]; const currentNode = data[prevIdx + 1]; if (prevNode.position.length <= currentNode.position.length) { return this.selectAndEnsureVisible(data[prevIdx]); } while (prevIdx >= 0 && parentCollapsed(prevIdx, data, this.treeControl)) { prevIdx--; prevNode = data[prevIdx]; } this.selectAndEnsureVisible(data[prevIdx]); } @HostListener('document:keydown.ArrowDown', ['$event']) navigateDown(event: KeyboardEvent): void { if (this.isEditingDirectiveState(event)) { return; } event.preventDefault(); const data = this.dataSource.expandedDataValues; let idx = data.findIndex((e) => this.selectedNode && e.id === this.selectedNode.id); const currentNode = data[idx]; if (!this.treeControl.isExpanded(currentNode) && currentNode.expandable) { for (let i = idx + 1; i < data.length; i++) { const node = data[i]; if (!isChildOf(node.position, currentNode.position)) { idx = i; break; } } } else { idx++; } if (idx >= data.length) { return; } this.selectAndEnsureVisible(data[idx]); } @HostListener('document:keydown.ArrowLeft', ['$event']) collapseCurrent(event: KeyboardEvent): void { if (this.isEditingDirectiveState(event)) { return; } if (!this.selectedNode) { return; } this.treeControl.collapse(this.selectedNode); event.preventDefault(); } @HostListener('document:keydown.ArrowRight', ['$event']) expandCurrent(event: KeyboardEvent): void { if (this.isEditingDirectiveState(event)) { return; } if (!this.selectedNode) { return; } this.treeControl.expand(this.selectedNode); event.preventDefault(); } isEditingDirectiveState(event: KeyboardEvent): boolean { return (event.target as Element).tagName === 'INPUT' || !this.selectedNode; } isSelected(node: FlatNode): boolean { return !!this.selectedNode && this.selectedNode.id === node.id; } isMatched(node: FlatNode): boolean { return this.filterRegex.test(node.name.toLowerCase()) || this.filterRegex.test(node.directives.toLowerCase()); } handleFilter(filterText: string): void { this.currentlyMatchedIndex = -1; try { this.filterRegex = new RegExp(filterText.toLowerCase() || '.^'); } catch { this.filterRegex = new RegExp('.^'); } } private _findMatchedNodes(): number[] { const indexesOfMatchedNodes: number[] = []; for (let i = 0; i < this.dataSource.data.length; i++) { if (this.isMatched(this.dataSource.data[i])) { indexesOfMatchedNodes.push(i); } } return indexesOfMatchedNodes; } get hasMatched(): boolean { return this._findMatchedNodes().length > 0; } nextMatched(): void { const indexesOfMatchedNodes = this._findMatchedNodes(); this.currentlyMatchedIndex = (this.currentlyMatchedIndex + 1) % indexesOfMatchedNodes.length; const indexToSelect = indexesOfMatchedNodes[this.currentlyMatchedIndex]; const nodeToSelect = this.dataSource.data[indexToSelect]; if (indexToSelect !== undefined) { this.treeControl.expand(nodeToSelect); this.selectAndEnsureVisible(nodeToSelect); } const nodeIsVisible = this.dataSource.expandedDataValues.find((node) => node === nodeToSelect); if (!nodeIsVisible) { this.expandParents(); } } prevMatched(): void { const indexesOfMatchedNodes = this._findMatchedNodes(); this.currentlyMatchedIndex = (this.currentlyMatchedIndex - 1 + indexesOfMatchedNodes.length) % indexesOfMatchedNodes.length; const indexToSelect = indexesOfMatchedNodes[this.currentlyMatchedIndex]; const nodeToSelect = this.dataSource.data[indexToSelect]; if (indexToSelect !== undefined) { this.treeControl.expand(nodeToSelect); this.selectAndEnsureVisible(nodeToSelect); } const nodeIsVisible = this.dataSource.expandedDataValues.find((node) => node === nodeToSelect); if (!nodeIsVisible) { this.expandParents(); } } expandParents(): void { this.parents.forEach((parent) => this.treeControl.expand(parent)); } highlightNode(position: ElementPosition): void { this._highlightIDinTreeFromElement = null; this.highlightComponent.emit(position); } removeHighlight(): void { this.removeComponentHighlight.emit(); } isHighlighted(node: FlatNode): boolean { return !!this._highlightIDinTreeFromElement && this._highlightIDinTreeFromElement === node.original.component?.id; } isElement(node: FlatNode): boolean | null { return node.original.component && node.original.component.isElement; } }
the_stack