text
stringlengths
2.5k
6.39M
kind
stringclasses
3 values
import { CommandInstance, CommandHandler, CommandBuilderDefinition, CommandBuilder, CommandHandlerCallback, FinishCommandHandler } from './command'; import { Dictionary } from './common-types'; import { Arguments as ParserArguments, DetailedArguments as ParserDetailedArguments, Configuration as ParserConfiguration, Options as ParserOptions, ConfigCallback, CoerceCallback } from 'yargs-parser'; import { YError } from './yerror'; import { UsageInstance, FailureFunction } from './usage'; import { CompletionFunction } from './completion'; import { ValidationInstance, KeyOrPos } from './validation'; import { Y18N } from 'y18n'; import { MiddlewareCallback, Middleware } from './middleware'; import { RequireDirectoryOptions } from 'require-directory'; export declare function Yargs(processArgs?: string | string[], cwd?: string, parentRequire?: NodeRequire): YargsInstance; export declare function rebase(base: string, dir: string): string; /** Instance of the yargs module. */ export interface YargsInstance { $0: string; argv: Arguments; customScriptName: boolean; parsed: DetailedArguments | false; _copyDoubleDash<T extends Arguments | Promise<Arguments>>(argv: T): T; _getLoggerInstance(): LoggerInstance; _getParseContext(): Object; _hasOutput(): boolean; _hasParseCallback(): boolean; _parseArgs: { (args: null, shortCircuit: null, _calledFromCommand: boolean, commandIndex?: number): Arguments | Promise<Arguments>; (args: string | string[], shortCircuit?: boolean): Arguments | Promise<Arguments>; }; _runValidation(argv: Arguments, aliases: Dictionary<string[]>, positionalMap: Dictionary<string[]>, parseErrors: Error | null, isDefaultCommand?: boolean): void; _setHasOutput(): void; addHelpOpt: { (opt?: string | false): YargsInstance; (opt?: string, msg?: string): YargsInstance; }; addShowHiddenOpt: { (opt?: string | false): YargsInstance; (opt?: string, msg?: string): YargsInstance; }; alias: { (keys: string | string[], aliases: string | string[]): YargsInstance; (keyAliases: Dictionary<string | string[]>): YargsInstance; }; array(keys: string | string[]): YargsInstance; boolean(keys: string | string[]): YargsInstance; check(f: (argv: Arguments, aliases: Dictionary<string[]>) => any, _global?: boolean): YargsInstance; choices: { (keys: string | string[], choices: string | string[]): YargsInstance; (keyChoices: Dictionary<string | string[]>): YargsInstance; }; coerce: { (keys: string | string[], coerceCallback: CoerceCallback): YargsInstance; (keyCoerceCallbacks: Dictionary<CoerceCallback>): YargsInstance; }; command(cmd: string | string[], description: CommandHandler['description'], builder?: CommandBuilderDefinition | CommandBuilder, handler?: CommandHandlerCallback, commandMiddleware?: Middleware[], deprecated?: boolean): YargsInstance; commandDir(dir: string, opts?: RequireDirectoryOptions<any>): YargsInstance; completion: { (cmd?: string, fn?: CompletionFunction): YargsInstance; (cmd?: string, desc?: string | false, fn?: CompletionFunction): YargsInstance; }; config: { (config: Dictionary): YargsInstance; (keys?: string | string[], configCallback?: ConfigCallback): YargsInstance; (keys?: string | string[], msg?: string, configCallback?: ConfigCallback): YargsInstance; }; conflicts: { (key: string, conflictsWith: string | string[]): YargsInstance; (keyConflicts: Dictionary<string | string[]>): YargsInstance; }; count(keys: string | string[]): YargsInstance; default: { (key: string, value: any, defaultDescription?: string): YargsInstance; (keys: string[], value: Exclude<any, Function>): YargsInstance; (keys: Dictionary<any>): YargsInstance; }; defaults: YargsInstance['default']; demand: { (min: number, max?: number | string, msg?: string): YargsInstance; (keys: string | string[], msg?: string | true): YargsInstance; (keys: string | string[], max: string[], msg?: string | true): YargsInstance; (keyMsgs: Dictionary<string | undefined>): YargsInstance; (keyMsgs: Dictionary<string | undefined>, max: string[], msg?: string): YargsInstance; }; demandCommand(): YargsInstance; demandCommand(min: number, minMsg?: string): YargsInstance; demandCommand(min: number, max: number, minMsg?: string | null, maxMsg?: string | null): YargsInstance; demandOption: { (keys: string | string[], msg?: string): YargsInstance; (keyMsgs: Dictionary<string | undefined>): YargsInstance; }; deprecateOption(option: string, message?: string | boolean): YargsInstance; describe: { (keys: string | string[], description?: string): YargsInstance; (keyDescriptions: Dictionary<string>): YargsInstance; }; detectLocale(detect: boolean): YargsInstance; env(prefix?: string | false): YargsInstance; epilog: YargsInstance['epilogue']; epilogue(msg: string): YargsInstance; example(cmd: string | [string, string?][], description?: string): YargsInstance; exit(code: number, err?: YError | string): void; exitProcess(enabled: boolean): YargsInstance; fail(f: FailureFunction): YargsInstance; getCommandInstance(): CommandInstance; getCompletion(args: string[], done: (completions: string[]) => any): void; getContext(): Context; getDemandedCommands(): Options['demandedCommands']; getDemandedOptions(): Options['demandedOptions']; getDeprecatedOptions(): Options['deprecatedOptions']; getDetectLocale(): boolean; getExitProcess(): boolean; getGroups(): Dictionary<string[]>; getHandlerFinishCommand(): FinishCommandHandler | null; getOptions(): Options; getParserConfiguration(): Configuration; getStrict(): boolean; getStrictCommands(): boolean; getUsageInstance(): UsageInstance; getValidationInstance(): ValidationInstance; global(keys: string | string[], global?: boolean): YargsInstance; group(keys: string | string[], groupName: string): YargsInstance; help: YargsInstance['addHelpOpt']; hide(key: string): YargsInstance; implies: { (key: string, implication: KeyOrPos | KeyOrPos[]): YargsInstance; (keyImplications: Dictionary<KeyOrPos | KeyOrPos[]>): YargsInstance; }; locale: { (): string; (locale: string): YargsInstance; }; middleware(callback: MiddlewareCallback | MiddlewareCallback[], applyBeforeValidation?: boolean): YargsInstance; nargs: { (keys: string | string[], nargs: number): YargsInstance; (keyNargs: Dictionary<number>): YargsInstance; }; normalize(keys: string | string[]): YargsInstance; number(keys: string | string[]): YargsInstance; onFinishCommand(f: FinishCommandHandler): YargsInstance; option: { (key: string, optionDefinition: OptionDefinition): YargsInstance; (keyOptionDefinitions: Dictionary<OptionDefinition>): YargsInstance; }; options: YargsInstance['option']; parse: { (): Arguments | Promise<Arguments>; (args: string | string[], context: object, parseCallback?: ParseCallback): Arguments | Promise<Arguments>; (args: string | string[], parseCallback: ParseCallback): Arguments | Promise<Arguments>; (args: string | string[], shortCircuit: boolean): Arguments | Promise<Arguments>; }; parserConfiguration(config: Configuration): YargsInstance; pkgConf(key: string, rootPath?: string): YargsInstance; positional(key: string, positionalDefinition: PositionalDefinition): YargsInstance; recommendCommands(recommend: boolean): YargsInstance; require: YargsInstance['demand']; required: YargsInstance['demand']; requiresArg(keys: string | string[] | Dictionary): YargsInstance; reset(aliases?: DetailedArguments['aliases']): YargsInstance; resetOptions(aliases?: DetailedArguments['aliases']): YargsInstance; scriptName(scriptName: string): YargsInstance; showCompletionScript($0?: string, cmd?: string): YargsInstance; showHelp(level: 'error' | 'log' | ((message: string) => void)): YargsInstance; showHelpOnFail: { (message?: string): YargsInstance; (enabled: boolean, message: string): YargsInstance; }; showHidden: YargsInstance['addShowHiddenOpt']; skipValidation(keys: string | string[]): YargsInstance; strict(enable?: boolean): YargsInstance; strictCommands(enable?: boolean): YargsInstance; string(key: string | string[]): YargsInstance; terminalWidth(): number | null; updateStrings(obj: Dictionary<string>): YargsInstance; updateLocale: YargsInstance['updateStrings']; usage: { (msg: string | null): YargsInstance; (msg: string, description: CommandHandler['description'], builder?: CommandBuilderDefinition | CommandBuilder, handler?: CommandHandlerCallback): YargsInstance; }; version: { (ver?: string | false): YargsInstance; (key?: string, ver?: string): YargsInstance; (key?: string, msg?: string, ver?: string): YargsInstance; }; wrap(cols: number | null | undefined): YargsInstance; } export declare function isYargsInstance(y: YargsInstance | void): y is YargsInstance; /** Yargs' context. */ export interface Context { commands: string[]; files: string[]; fullCommands: string[]; } declare type LoggerInstance = Pick<Console, 'error' | 'log'>; export interface Options extends ParserOptions { __: Y18N['__']; alias: Dictionary<string[]>; array: string[]; boolean: string[]; choices: Dictionary<string[]>; config: Dictionary<ConfigCallback | boolean>; configObjects: Dictionary[]; configuration: Configuration; count: string[]; defaultDescription: Dictionary<string | undefined>; demandedCommands: Dictionary<{ min: number; max: number; minMsg?: string | null; maxMsg?: string | null; }>; demandedOptions: Dictionary<string | undefined>; deprecatedOptions: Dictionary<string | boolean | undefined>; hiddenOptions: string[]; /** Manually set keys */ key: Dictionary<boolean | string>; local: string[]; normalize: string[]; number: string[]; showHiddenOpt: string; skipValidation: string[]; string: string[]; } export interface Configuration extends Partial<ParserConfiguration> { /** Should a config object be deep-merged with the object config it extends? */ 'deep-merge-config'?: boolean; /** Should commands be sorted in help? */ 'sort-commands'?: boolean; } export interface OptionDefinition { alias?: string | string[]; array?: boolean; boolean?: boolean; choices?: string | string[]; coerce?: CoerceCallback; config?: boolean; configParser?: ConfigCallback; conflicts?: string | string[]; count?: boolean; default?: any; defaultDescription?: string; deprecate?: string | boolean; deprecated?: OptionDefinition['deprecate']; desc?: string; describe?: OptionDefinition['desc']; description?: OptionDefinition['desc']; demand?: string | true; demandOption?: OptionDefinition['demand']; global?: boolean; group?: string; hidden?: boolean; implies?: string | number | KeyOrPos[]; nargs?: number; normalize?: boolean; number?: boolean; require?: OptionDefinition['demand']; required?: OptionDefinition['demand']; requiresArg?: boolean; skipValidation?: boolean; string?: boolean; type?: 'array' | 'boolean' | 'count' | 'number' | 'string'; } interface PositionalDefinition extends Pick<OptionDefinition, 'alias' | 'array' | 'coerce' | 'choices' | 'conflicts' | 'default' | 'defaultDescription' | 'demand' | 'desc' | 'describe' | 'description' | 'implies' | 'normalize'> { type?: 'boolean' | 'number' | 'string'; } interface ParseCallback { (err: YError | string | undefined | null, argv: Arguments | Promise<Arguments>, output: string): void; } export interface Arguments extends ParserArguments { /** The script name or node command */ $0: string; } export interface DetailedArguments extends ParserDetailedArguments { argv: Arguments; } export {};
the_stack
import { BaseEslintEngine, EslintStrategy } from "../../../src/lib/eslint/BaseEslintEngine"; import {StaticDependencies} from "../../../src/lib/eslint/EslintCommons"; import { RuleTarget, ESRule, ESReport, RuleViolation } from '../../../src/types'; import { expect } from 'chai'; import { CLIEngine } from 'eslint'; import {CUSTOM_CONFIG} from '../../../src/Constants'; import Mockito = require('ts-mockito'); import * as TestOverrides from '../../test-related-lib/TestOverrides'; import * as DataGenerator from './EslintTestDataGenerator'; TestOverrides.initializeTestSetup(); const engineName = 'TestHarnessEngine'; class TestHarnessEngine extends BaseEslintEngine { public init(): Promise<void> { throw new Error("Method not implemented."); } public async initializeContents(strategy: EslintStrategy, baseDependencies: StaticDependencies) { await super.initializeContents(strategy, baseDependencies); } public getName(): string { return engineName; } } const MockStrategy: EslintStrategy = Mockito.mock<EslintStrategy>(); const emptyEngineOptions = new Map<string, string>(); const configFilePath = '/some/file/path/config.json'; const engineOptionsWithEslintCustom = new Map<string, string>([ [CUSTOM_CONFIG.EslintConfig, configFilePath] ]); const engineOptionsWithPmdCustom = new Map<string, string>([ [CUSTOM_CONFIG.PmdConfig, configFilePath] ]); describe('Tests for BaseEslintEngine', () => { describe('Tests for shouldEngineRun()', () => { afterEach(() => { Mockito.reset(MockStrategy); }); it('should decide to not run when target is empty', async () => { //instantiate abstract engine const mockStrategy = Mockito.instance(MockStrategy); const engine = await createDummyEngine(mockStrategy); const shouldEngineRun = engine.shouldEngineRun( [DataGenerator.getDummyRuleGroup()], [DataGenerator.getDummyRule()], [], // no target emptyEngineOptions ); expect(shouldEngineRun).to.be.false; }); it('should decide to not run when rules are empty', async () => { //instantiate abstract engine const mockStrategy = Mockito.instance(MockStrategy); const engine = await createDummyEngine(mockStrategy); const shouldEngineRun = engine.shouldEngineRun( [DataGenerator.getDummyRuleGroup()], [], //no rules [DataGenerator.getDummyTarget()], emptyEngineOptions ); expect(shouldEngineRun).to.be.false; }); it('should decide to not run when EngineOptions has eslint custom config', async () => { //instantiate abstract engine const mockStrategy = Mockito.instance(MockStrategy); const engine = await createDummyEngine(mockStrategy); const engineOptions = new Map<string, string>(); engineOptions.set(CUSTOM_CONFIG.EslintConfig, '/some/dummy/path'); const shouldEngineRun = engine.shouldEngineRun( [DataGenerator.getDummyRuleGroup()], [DataGenerator.getDummyRule()], [DataGenerator.getDummyTarget()], engineOptions ); expect(shouldEngineRun).to.be.false; }); it('should decide to run when target, rules and options look right', async () => { //instantiate abstract engine const mockStrategy = Mockito.instance(MockStrategy); const engine = await createDummyEngine(mockStrategy); const shouldEngineRun = engine.shouldEngineRun( [DataGenerator.getDummyRuleGroup()], [DataGenerator.getDummyRule()], [DataGenerator.getDummyTarget()], emptyEngineOptions ); expect(shouldEngineRun).to.be.true; }); }); describe('Tests for run()', () => { describe('Related to target input', () => { afterEach(() => { Mockito.reset(MockStrategy); }); it('should use target as current working directory if target is a directory', async () => { const isDir = true; const target = DataGenerator.getDummyTarget(isDir); const StaticDependenciesMock = mockStaticDependencies(target, getDummyCliEngine()); // instantiate abstract engine const engine = await createAbstractEngine(target, StaticDependenciesMock); await engine.run( [DataGenerator.getDummyRuleGroup()], [DataGenerator.getDummyRule()], [target], emptyEngineOptions ); Mockito.verify(StaticDependenciesMock.resolveTargetPath(target.target)).called(); // verify config const capturedConfig = Mockito.capture(StaticDependenciesMock.createCLIEngine).first(); expect(capturedConfig[0]).instanceOf(Object); const config = <Object>capturedConfig[0]; expect(config['cwd']).equals(target.target); }); }); describe('Related to rules input', () => { afterEach(() => { Mockito.reset(MockStrategy); }); it('should not execute when rules are empty', async () => { // instantiate abstract engine const mockStrategy = Mockito.instance(MockStrategy); const engine = await createDummyEngine(mockStrategy); const results = await engine.run( [DataGenerator.getDummyRuleGroup()], [], // no rules [DataGenerator.getDummyTarget(true)], emptyEngineOptions ); expect(results).to.be.empty; }); }); describe('Rule mapping', () => { afterEach(() => { Mockito.reset(MockStrategy); }); it('should map Eslint-rule to sfdx scanner rule structure', async () => { const target = DataGenerator.getDummyTarget(); const ruleId = 'ruleId'; const category = 'myCategory'; const description = 'rule description'; const message = 'this is a message'; const esRuleMap = DataGenerator.getDummyEsRuleMap(ruleId, category, description); const esReport = DataGenerator.getDummyEsReport([DataGenerator.getDummyEsResult([DataGenerator.getDummyEsMessage(ruleId, message)])]); const cliEngineMock = getDummyCliEngine(esRuleMap, esReport); const StaticDependenciesMock = mockStaticDependencies(target, cliEngineMock); const engine = await createAbstractEngine(target, StaticDependenciesMock); const results = await engine.run( [DataGenerator.getDummyRuleGroup()], [DataGenerator.getDummyRule()], [target], emptyEngineOptions ); // verify results structure and content expect(results.length).greaterThan(0); const result = results[0]; // TODO: verify engineName - right now, unless we use a real ENGINE enum type, this won't work expect(result.fileName).equals(esReport.results[0].filePath); expect(result.violations.length).greaterThan(0); const violation = result.violations[0]; expect(violation.ruleName).equals(ruleId); expect(violation.message).equals(message); expect(violation.category).equals(category); }); }); }); describe('Tests for getCatalog()', () => { describe('Related to mapping all rules to Catalog', () => { afterEach(() => { Mockito.reset(MockStrategy); }); it('should map ESRules to Catalog', async () => { const target = DataGenerator.getDummyTarget(); const ruleId = 'ruleId'; const category = 'myCategory'; const description = 'some lengthy description'; const esRuleMap = DataGenerator.getDummyEsRuleMap(ruleId, category, description); const cliEngineMock = getDummyCliEngine(esRuleMap); const StaticDependenciesMock = mockStaticDependencies(target, cliEngineMock); const engine = await createAbstractEngine(target, StaticDependenciesMock); // execute const catalog = await engine.getCatalog(); //verify expect(catalog.categories.length).equals(1); const catalogCategory = catalog.categories[0]; expect(catalogCategory.engine).equals(engineName); expect(catalogCategory.name).equals(category); expect(catalog.rules.length).equals(1); const catalogRule = catalog.rules[0]; expect(catalogRule.name).equals(ruleId); expect(catalogRule.description).equals(description); }); it('should add rule to an existing category if applicable', async () => { const target = DataGenerator.getDummyTarget(); const category = 'myCategory'; const esRuleMap = new Map<string, ESRule>(); const ruleId1 = 'ruleId1'; const description1 = 'some lengthy description'; const esRule1 = DataGenerator.getDummyEsRule(category, description1); esRuleMap.set(ruleId1, esRule1); const ruleId2 = 'ruleId2'; const description2 = 'some lengthy description'; const esRule2 = DataGenerator.getDummyEsRule(category, description2); esRuleMap.set(ruleId2, esRule2); const cliEngineMock = getDummyCliEngine(esRuleMap); const StaticDependenciesMock = mockStaticDependencies(target, cliEngineMock); const engine = await createAbstractEngine(target, StaticDependenciesMock); // execute const catalog = await engine.getCatalog(); // verify expect(catalog.categories.length).equals(1); expect(catalog.categories[0].name).equals(category); expect(catalog.rules.length).equals(2, 'Rules with the same category string should be grouped together in the catalog'); }); }); }); describe('Tests for shouldEngineRun()', () => { const mockStrategy = Mockito.instance(MockStrategy); let engine; before(async () => { engine = await createDummyEngine(mockStrategy); }); it ('should decide to run if custom config, rules and target are correct', () => { const shouldRunEngine = engine.shouldEngineRun( [], [DataGenerator.getDummyRule()], [DataGenerator.getDummyTarget()], emptyEngineOptions ); expect(shouldRunEngine).to.be.true; }); it ('should decide to not run if using custom config', () => { const shouldRunEngine = engine.shouldEngineRun( [], [DataGenerator.getDummyRule()], [DataGenerator.getDummyTarget()], engineOptionsWithEslintCustom ); expect(shouldRunEngine).to.be.false; }); it('should decide to not run if target paths is empty', () => { const shouldRunEngine = engine.shouldEngineRun( [], [DataGenerator.getDummyRule()], [], emptyEngineOptions ); expect(shouldRunEngine).to.be.false; }); it('should decide to not run if no rules are chosen', () => { const shouldRunEngine = engine.shouldEngineRun( [], [], [DataGenerator.getDummyTarget()], emptyEngineOptions ); expect(shouldRunEngine).to.be.false; }); it ('should decide to run if using custom config contains PMD but not Eslint', () => { const shouldRunEngine = engine.shouldEngineRun( [], [DataGenerator.getDummyRule()], [DataGenerator.getDummyTarget()], engineOptionsWithPmdCustom ); expect(shouldRunEngine).to.be.true; }); }); describe('Tests for isEngineRequested()', () => { const mockStrategy = Mockito.instance(MockStrategy); let engine; before(async () => { engine = await createDummyEngine(mockStrategy); }); it('should return true when custom config is not present and filter contains engine name', () => { const filteredNames = ['pmd', engine.getName(), 'retire-js']; const isEngineRequested = engine.isEngineRequested(filteredNames, emptyEngineOptions); expect(isEngineRequested).to.be.true; }); it('should return false when custom config is present even if filter contains engine name', () => { const filteredNames = ['pmd', engine.getName(), 'retire-js']; const isEngineRequested = engine.isEngineRequested(filteredNames, engineOptionsWithEslintCustom); expect(isEngineRequested).to.be.false; }); it('should return false when custom config is not present but filter does not contain engine name', () => { const filteredNames = ['pmd', 'retire-js']; const isEngineRequested = engine.isEngineRequested(filteredNames, emptyEngineOptions); expect(isEngineRequested).to.be.false; }); it('should return false when custom config is not present and filter starts with "eslint"', () => { const filteredNames = ['pmd', 'retire-js', 'eslint-custom']; const isEngineRequested = engine.isEngineRequested(filteredNames, emptyEngineOptions); expect(isEngineRequested).to.be.false; }); it('should return true when only PMD custom config is present and filter contains engine name', () => { const filteredNames = ['pmd', engine.getName(), 'retire-js']; const isEngineRequested = engine.isEngineRequested(filteredNames, engineOptionsWithPmdCustom); expect(isEngineRequested).to.be.true; }); it('should return true when custom config is not present and filter is empty', () => { const filteredNames = []; const isEngineRequested = engine.isEngineRequested(filteredNames, emptyEngineOptions); expect(isEngineRequested).to.be.true; }); it('should return false when custom eslint config is present and filter is empty', () => { const filteredNames = []; const isEngineRequested = engine.isEngineRequested(filteredNames, engineOptionsWithEslintCustom); expect(isEngineRequested).to.be.false; }); }); }); /** HELPER FUNCTIONS TO KEEP TESTS EASIER TO READ */ function mockStaticDependencies(target: RuleTarget, cliEngineMock: any) { const StaticDependenciesMock = Mockito.mock(StaticDependencies); Mockito.when(StaticDependenciesMock.resolveTargetPath(target.target)).thenReturn(target.target); Mockito.when(StaticDependenciesMock.createCLIEngine(Mockito.anything())).thenReturn(cliEngineMock); return StaticDependenciesMock; } async function createAbstractEngine(target: RuleTarget, StaticDependenciesMock: StaticDependencies) { Mockito.when(MockStrategy.filterUnsupportedPaths(target.paths)).thenReturn(target.paths); Mockito.when(MockStrategy.getLanguages()).thenReturn(['language']); Mockito.when(MockStrategy.processRuleViolation()).thenReturn((filename: string, ruleViolation: RuleViolation)=> { //do nothing }); const engine = await createDummyEngine(Mockito.instance(MockStrategy), Mockito.instance(StaticDependenciesMock)); return engine; } async function createDummyEngine(strategy: EslintStrategy, baseDependencies = new StaticDependencies()) { const engine = new TestHarnessEngine(); await engine.initializeContents(strategy, baseDependencies); return engine; } function getDummyCliEngine(esRuleMap: Map<string, ESRule> = DataGenerator.getDummyEsRuleMap(), esReport: ESReport = DataGenerator.getDummyEsReport()): typeof CLIEngine { const CLIEngineMock: typeof CLIEngine = Mockito.mock(CLIEngine); Mockito.when(CLIEngineMock.getRules()).thenReturn(esRuleMap); Mockito.when(MockStrategy.filterDisallowedRules(esRuleMap)).thenReturn(esRuleMap); Mockito.when(CLIEngineMock.executeOnFiles(Mockito.anything())).thenReturn(esReport); return Mockito.instance(CLIEngineMock); }
the_stack
import {ContextParser, ERROR_CODES, ErrorCoded, JsonLdContextNormalized, Util as ContextUtil} from "jsonld-context-parser"; import * as RDF from "@rdfjs/types"; import {DataFactory} from "rdf-data-factory"; import {EntryHandlerContainer} from "./entryhandler/EntryHandlerContainer"; import {ParsingContext} from "./ParsingContext"; // tslint:disable-next-line:no-var-requires const canonicalizeJson = require('canonicalize'); /** * Utility functions and methods. */ export class Util { public static readonly XSD: string = 'http://www.w3.org/2001/XMLSchema#'; public static readonly XSD_BOOLEAN: string = Util.XSD + 'boolean'; public static readonly XSD_INTEGER: string = Util.XSD + 'integer'; public static readonly XSD_DOUBLE: string = Util.XSD + 'double'; public static readonly RDF: string = 'http://www.w3.org/1999/02/22-rdf-syntax-ns#'; public readonly dataFactory: RDF.DataFactory<RDF.BaseQuad>; public readonly rdfFirst: RDF.NamedNode; public readonly rdfRest: RDF.NamedNode; public readonly rdfNil: RDF.NamedNode; public readonly rdfType: RDF.NamedNode; public readonly rdfJson: RDF.NamedNode; private readonly parsingContext: ParsingContext; constructor(options: { parsingContext: ParsingContext, dataFactory?: RDF.DataFactory<RDF.BaseQuad> }) { this.parsingContext = options.parsingContext; this.dataFactory = options.dataFactory || new DataFactory(); this.rdfFirst = this.dataFactory.namedNode(Util.RDF + 'first'); this.rdfRest = this.dataFactory.namedNode(Util.RDF + 'rest'); this.rdfNil = this.dataFactory.namedNode(Util.RDF + 'nil'); this.rdfType = this.dataFactory.namedNode(Util.RDF + 'type'); this.rdfJson = this.dataFactory.namedNode(Util.RDF + 'JSON'); } /** * Helper function to get the value of a context entry, * or fallback to a certain value. * @param {JsonLdContextNormalized} context A JSON-LD context. * @param {string} contextKey A pre-defined JSON-LD key in context entries. * @param {string} key A context entry key. * @param {string} fallback A fallback value for when the given contextKey * could not be found in the value with the given key. * @return {string} The value of the given contextKey in the entry behind key in the given context, * or the given fallback value. */ public static getContextValue<FB>(context: JsonLdContextNormalized, contextKey: string, key: string, fallback: FB): string | any | FB { const entry = context.getContextRaw()[key]; if (!entry) { return fallback; } const type = entry[contextKey]; return type === undefined ? fallback : type; } /** * Get the container type of the given key in the context. * * Should any context-scoping bugs should occur related to this in the future, * it may be required to increase the offset from the depth at which the context is retrieved by one (to 2). * This is because containers act 2 levels deep. * * @param {JsonLdContextNormalized} context A JSON-LD context. * @param {string} key A context entry key. * @return {string} The container type. */ public static getContextValueContainer(context: JsonLdContextNormalized, key: string): { [typeName: string]: boolean } { return Util.getContextValue(context, '@container', key, { '@set': true }); } /** * Get the value type of the given key in the context. * @param {JsonLdContextNormalized} context A JSON-LD context. * @param {string} key A context entry key. * @return {string} The node type. */ public static getContextValueType(context: JsonLdContextNormalized, key: string): string | null { const valueType = Util.getContextValue(context, '@type', key, null); if (valueType === '@none') { return null; } return valueType; } /** * Get the language of the given key in the context. * @param {JsonLdContextNormalized} context A JSON-LD context. * @param {string} key A context entry key. * @return {string} The node type. */ public static getContextValueLanguage(context: JsonLdContextNormalized, key: string): string | null { return Util.getContextValue(context, '@language', key, context.getContextRaw()['@language'] || null); } /** * Get the direction of the given key in the context. * @param {JsonLdContextNormalized} context A JSON-LD context. * @param {string} key A context entry key. * @return {string} The node type. */ public static getContextValueDirection(context: JsonLdContextNormalized, key: string): string { return Util.getContextValue(context, '@direction', key, context.getContextRaw()['@direction'] || null); } /** * Check if the given key in the context is a reversed property. * @param {JsonLdContextNormalized} context A JSON-LD context. * @param {string} key A context entry key. * @return {boolean} If the context value has a @reverse key. */ public static isContextValueReverse(context: JsonLdContextNormalized, key: string): boolean { return !!Util.getContextValue(context, '@reverse', key, null); } /** * Get the @index of the given key in the context. * @param {JsonLdContextNormalized} context A JSON-LD context. * @param {string} key A context entry key. * @return {string} The index. */ public static getContextValueIndex(context: JsonLdContextNormalized, key: string): any | null { return Util.getContextValue(context, '@index', key, context.getContextRaw()['@index'] || null); } /** * Check if the given key refers to a reversed property. * @param {JsonLdContextNormalized} context A JSON-LD context. * @param {string} key The property key. * @param {string} parentKey The parent key. * @return {boolean} If the property must be reversed. */ public static isPropertyReverse(context: JsonLdContextNormalized, key: string, parentKey: string): boolean { // '!==' is needed because reversed properties in a @reverse container should cancel each other out. return parentKey === '@reverse' !== Util.isContextValueReverse(context, key); } /** * Check if the given IRI is valid. * @param {string} iri A potential IRI. * @return {boolean} If the given IRI is valid. */ public static isValidIri(iri: string | null): boolean { return iri !== null && ContextUtil.isValidIri(iri); } /** * Check if the given first array (needle) is a prefix of the given second array (haystack). * @param needle An array to check if it is a prefix. * @param haystack An array to look in. */ public static isPrefixArray(needle: string[], haystack: string[]): boolean { if (needle.length > haystack.length) { return false; } for (let i = 0; i < needle.length; i++) { if (needle[i] !== haystack[i]) { return false; } } return true; } /** * Make sure that @id-@index pairs are equal over all array values. * Reject otherwise. * @param {any[]} value An array value. * @return {Promise<void>} A promise rejecting if conflicts are present. */ public async validateValueIndexes(value: any[]): Promise<void> { if (this.parsingContext.validateValueIndexes) { const indexHashes: {[id: string]: any} = {}; for (const entry of value) { if (entry && typeof entry === 'object') { const id = entry['@id']; const index = entry['@index']; if (id && index) { const existingIndexValue = indexHashes[id]; if (existingIndexValue && existingIndexValue !== index) { throw new ErrorCoded(`Conflicting @index value for ${id}`, ERROR_CODES.CONFLICTING_INDEXES); } indexHashes[id] = index; } } } } } /** * Convert a given JSON value to an RDF term. * @param {JsonLdContextNormalized} context A JSON-LD context. * @param {string} key The current JSON key. * @param value A JSON value. * @param {number} depth The depth the value is at. * @param {string[]} keys The path of keys. * @return {Promise<RDF.Term[]>} An RDF term array. */ public async valueToTerm(context: JsonLdContextNormalized, key: string, value: any, depth: number, keys: string[]): Promise<RDF.Term[]> { // Skip further processing if we have an @type: @json if (Util.getContextValueType(context, key) === '@json') { return [ this.dataFactory.literal(this.valueToJsonString(value), this.rdfJson) ]; } const type: string = typeof value; switch (type) { case 'object': // Skip if we have a null or undefined object if (value === null || value === undefined) { return []; } // Special case for arrays if (Array.isArray(value)) { // We handle arrays at value level so we can emit earlier, so this is handled already when we get here. // Empty context-based lists are emitted at this place, because our streaming algorithm doesn't detect those. if ('@list' in Util.getContextValueContainer(context, key)) { if (value.length === 0) { return [ this.rdfNil ]; } else { return this.parsingContext.idStack[depth + 1] || []; } } await this.validateValueIndexes(value); return []; } // Handle property-scoped contexts context = await this.getContextSelfOrPropertyScoped(context, key); // Handle local context in the value if ('@context' in value) { context = await this.parsingContext.parseContext(value['@context'], (await this.parsingContext.getContext(keys, 0)).getContextRaw()); } // In all other cases, we have a hash value = await this.unaliasKeywords(value, keys, depth, context); // Un-alias potential keywords in this hash if ('@value' in value) { let val; let valueLanguage; let valueDirection; let valueType; let valueIndex; // We don't use the index, but we need to check its type for spec-compliance for (key in value) { const subValue = value[key]; switch (key) { case '@value': val = subValue; break; case '@language': valueLanguage = subValue; break; case '@direction': valueDirection = subValue; break; case '@type': valueType = subValue; break; case '@index': valueIndex = subValue; break; default: throw new ErrorCoded(`Unknown value entry '${key}' in @value: ${JSON.stringify(value)}`, ERROR_CODES.INVALID_VALUE_OBJECT); } } // Skip further processing if we have an @type: @json if (await this.unaliasKeyword(valueType, keys, depth, true, context) === '@json') { return [ this.dataFactory.literal(this.valueToJsonString(val), this.rdfJson) ]; } // Validate @value if (val === null) { return []; } if (typeof val === 'object') { throw new ErrorCoded(`The value of an '@value' can not be an object, got '${JSON.stringify(val)}'`, ERROR_CODES.INVALID_VALUE_OBJECT_VALUE); } // Validate @index if (this.parsingContext.validateValueIndexes && valueIndex && typeof valueIndex !== 'string') { throw new ErrorCoded(`The value of an '@index' must be a string, got '${JSON.stringify(valueIndex)}'`, ERROR_CODES.INVALID_INDEX_VALUE); } // Validate @language and @direction if (valueLanguage) { if (typeof val !== 'string') { throw new ErrorCoded( `When an '@language' is set, the value of '@value' must be a string, got '${JSON.stringify(val)}'`, ERROR_CODES.INVALID_LANGUAGE_TAGGED_VALUE); } if (!ContextParser.validateLanguage(valueLanguage, this.parsingContext.strictValues, ERROR_CODES.INVALID_LANGUAGE_TAGGED_STRING)) { return []; } // Language tags are always normalized to lowercase in 1.0. if (this.parsingContext.normalizeLanguageTags || this.parsingContext.activeProcessingMode === 1.0) { valueLanguage = valueLanguage.toLowerCase(); } } if (valueDirection) { if (typeof val !== 'string') { throw new Error( `When an '@direction' is set, the value of '@value' must be a string, got '${JSON.stringify(val)}'`); } if (!ContextParser.validateDirection(valueDirection, this.parsingContext.strictValues)) { return []; } } // Check @language and @direction if (valueLanguage && valueDirection && this.parsingContext.rdfDirection) { if (valueType) { throw new ErrorCoded(`Can not have '@language', '@direction' and '@type' in a value: '${JSON .stringify(value)}'`, ERROR_CODES.INVALID_VALUE_OBJECT); } return this.nullableTermToArray(this .createLanguageDirectionLiteral(depth, val, valueLanguage, valueDirection)); } else if (valueLanguage) { // Check @language if (valueType) { throw new ErrorCoded(`Can not have both '@language' and '@type' in a value: '${JSON.stringify(value)}'`, ERROR_CODES.INVALID_VALUE_OBJECT); } return [ this.dataFactory.literal(val, valueLanguage) ]; } else if (valueDirection && this.parsingContext.rdfDirection) { // Check @direction if (valueType) { throw new ErrorCoded(`Can not have both '@direction' and '@type' in a value: '${JSON.stringify(value)}'`, ERROR_CODES.INVALID_VALUE_OBJECT); } return this.nullableTermToArray(this .createLanguageDirectionLiteral(depth, val, valueLanguage, valueDirection)); } else if (valueType) { // Validate @type if (typeof valueType !== 'string') { throw new ErrorCoded(`The value of an '@type' must be a string, got '${JSON.stringify(valueType)}'`, ERROR_CODES.INVALID_TYPED_VALUE); } const typeTerm = this.createVocabOrBaseTerm(context, valueType); if (!typeTerm) { throw new ErrorCoded(`Invalid '@type' value, got '${JSON.stringify(valueType)}'`, ERROR_CODES.INVALID_TYPED_VALUE); } if (typeTerm.termType !== 'NamedNode') { throw new ErrorCoded(`Illegal value type (${typeTerm.termType}): ${valueType}`, ERROR_CODES.INVALID_TYPED_VALUE); } return [ this.dataFactory.literal(val, typeTerm) ]; } // We don't pass the context, because context-based things like @language should be ignored return await this.valueToTerm(new JsonLdContextNormalized({}), key, val, depth, keys); } else if ('@set' in value) { // No other entries are allow in this value if (Object.keys(value).length > 1) { throw new ErrorCoded(`Found illegal neighbouring entries next to @set for key: '${key}'`, ERROR_CODES.INVALID_SET_OR_LIST_OBJECT); } // No need to do anything here, this is handled at the deeper level. return []; } else if ('@list' in value) { // No other entries are allowed in this value if (Object.keys(value).length > 1) { throw new ErrorCoded(`Found illegal neighbouring entries next to @list for key: '${key}'`, ERROR_CODES.INVALID_SET_OR_LIST_OBJECT); } const listValue = value["@list"]; // We handle lists at value level so we can emit earlier, so this is handled already when we get here. // Empty anonymous lists are emitted at this place, because our streaming algorithm doesn't detect those. if (Array.isArray(listValue)) { if (listValue.length === 0) { return [ this.rdfNil ]; } else { return this.parsingContext.idStack[depth + 1] || []; } } else { // We only have a single list element here, so emit this directly as single element return await this.valueToTerm(await this.parsingContext.getContext(keys), key, listValue, depth - 1, keys.slice(0, -1)); } } else if ('@reverse' in value && typeof value['@reverse'] === 'boolean') { // We handle reverse properties at value level so we can emit earlier, // so this is handled already when we get here. return []; } else if ('@graph' in Util.getContextValueContainer(await this.parsingContext.getContext(keys), key)) { // We are processing a graph container const graphContainerEntries = this.parsingContext.graphContainerTermStack[depth + 1]; return graphContainerEntries ? Object.values(graphContainerEntries) : [ this.dataFactory.blankNode() ]; } else if ("@id" in value) { // Use deeper context if the value node contains other properties next to @id. if (Object.keys(value).length > 1) { context = await this.parsingContext.getContext(keys, 0); } // Handle local context in the value if ('@context' in value) { context = await this.parsingContext.parseContext(value['@context'], context.getContextRaw()); } if (value["@type"] === '@vocab') { return this.nullableTermToArray(this.createVocabOrBaseTerm(context, value["@id"])); } else { return this.nullableTermToArray(this.resourceToTerm(context, value["@id"])); } } else { // Only make a blank node if at least one triple was emitted at the value's level. if (this.parsingContext.emittedStack[depth + 1] || (value && typeof value === 'object' && Object.keys(value).length === 0)) { return (this.parsingContext.idStack[depth + 1] || (this.parsingContext.idStack[depth + 1] = [ this.dataFactory.blankNode() ])); } else { return []; } } case 'string': return this.nullableTermToArray(this.stringValueToTerm(depth, await this.getContextSelfOrPropertyScoped(context, key), key, value, null)); case 'boolean': return this.nullableTermToArray(this.stringValueToTerm(depth, await this.getContextSelfOrPropertyScoped(context, key), key, Boolean(value).toString(), this.dataFactory.namedNode(Util.XSD_BOOLEAN))); case 'number': return this.nullableTermToArray(this.stringValueToTerm(depth, await this.getContextSelfOrPropertyScoped(context, key), key, value, this.dataFactory.namedNode( value % 1 === 0 && value < 1e21 ? Util.XSD_INTEGER : Util.XSD_DOUBLE))); default: this.parsingContext.emitError(new Error(`Could not determine the RDF type of a ${type}`)); return []; } } /** * If the context defines a property-scoped context for the given key, * that context will be returned. * Otherwise, the given context will be returned as-is. * * This should be used for valueToTerm cases that are not objects. * @param context A context. * @param key A JSON key. */ public async getContextSelfOrPropertyScoped(context: JsonLdContextNormalized, key: string) : Promise<JsonLdContextNormalized> { const contextKeyEntry = context.getContextRaw()[key]; if (contextKeyEntry && typeof contextKeyEntry === 'object' && '@context' in contextKeyEntry) { context = await this.parsingContext.parseContext(contextKeyEntry, context.getContextRaw(), true); } return context; } /** * If the given term is null, return an empty array, otherwise return an array with the single given term. * @param term A term. */ public nullableTermToArray(term: RDF.Term | null): RDF.Term[] { return term ? [ term ] : []; } /** * Convert a given JSON key to an RDF predicate term, * based on @vocab. * @param {JsonLdContextNormalized} context A JSON-LD context. * @param key A JSON key. * @return {RDF.NamedNode} An RDF named node. */ public predicateToTerm(context: JsonLdContextNormalized, key: string): RDF.Term | null { const expanded: string | null = context.expandTerm(key, true, this.parsingContext.getExpandOptions()); // Immediately return if the predicate was disabled in the context if (!expanded) { return null; } // Check if the predicate is a blank node if (expanded[0] === '_' && expanded[1] === ':') { if (this.parsingContext.produceGeneralizedRdf) { return this.dataFactory.blankNode(expanded.substr(2)); } else { return null; } } // Check if the predicate is a valid IRI if (Util.isValidIri(expanded)) { return this.dataFactory.namedNode(expanded); } else { if (expanded && this.parsingContext.strictValues) { this.parsingContext.emitError(new ErrorCoded(`Invalid predicate IRI: ${expanded}`, ERROR_CODES.INVALID_IRI_MAPPING)); } else { return null; } } return null; } /** * Convert a given JSON key to an RDF resource term or blank node, * based on @base. * @param {JsonLdContextNormalized} context A JSON-LD context. * @param key A JSON key. * @return {RDF.NamedNode} An RDF named node or null. */ public resourceToTerm(context: JsonLdContextNormalized, key: string): RDF.NamedNode | RDF.BlankNode | null { if (key.startsWith('_:')) { return this.dataFactory.blankNode(key.substr(2)); } const iri = context.expandTerm(key, false, this.parsingContext.getExpandOptions()); if (!Util.isValidIri(iri)) { if (iri && this.parsingContext.strictValues) { this.parsingContext.emitError(new Error(`Invalid resource IRI: ${iri}`)); } else { return null; } } return this.dataFactory.namedNode(<string> iri); } /** * Convert a given JSON key to an RDF resource term. * It will do this based on the @vocab, * and fallback to @base. * @param {JsonLdContextNormalized} context A JSON-LD context. * @param key A JSON key. * @return {RDF.NamedNode} An RDF named node or null. */ public createVocabOrBaseTerm(context: JsonLdContextNormalized, key: string): RDF.Term | null { if (key.startsWith('_:')) { return this.dataFactory.blankNode(key.substr(2)); } const expandOptions = this.parsingContext.getExpandOptions(); let expanded = context.expandTerm(key, true, expandOptions); if (expanded === key) { expanded = context.expandTerm(key, false, expandOptions); } if (!Util.isValidIri(expanded)) { if (expanded && this.parsingContext.strictValues) { this.parsingContext.emitError(new Error(`Invalid term IRI: ${expanded}`)); } else { return null; } } return this.dataFactory.namedNode(<string> expanded); } /** * Ensure that the given value becomes a string. * @param {string | number} value A string or number. * @param {NamedNode} datatype The intended datatype. * @return {string} The returned string. */ public intToString(value: string | number, datatype: RDF.NamedNode | null): string { if (typeof value === 'number') { if (Number.isFinite(value)) { const isInteger = value % 1 === 0; if (isInteger && (!datatype || datatype.value !== Util.XSD_DOUBLE)) { return Number(value).toString(); } else { return value.toExponential(15).replace(/(\d)0*e\+?/, '$1E'); } } else { return value > 0 ? 'INF' : '-INF'; } } else { return value; } } /** * Convert a given JSON string value to an RDF term. * @param {number} depth The current stack depth. * @param {JsonLdContextNormalized} context A JSON-LD context. * @param {string} key The current JSON key. * @param {string} value A JSON value. * @param {NamedNode} defaultDatatype The default datatype for the given value. * @return {RDF.Term} An RDF term or null. */ public stringValueToTerm(depth: number, context: JsonLdContextNormalized, key: string, value: string | number, defaultDatatype: RDF.NamedNode | null): RDF.Term | null { // Check the datatype from the context const contextType = Util.getContextValueType(context, key); if (contextType) { if (contextType === '@id') { if (!defaultDatatype) { return this.resourceToTerm(context, this.intToString(value, defaultDatatype)); } } else if (contextType === '@vocab') { if (!defaultDatatype) { return this.createVocabOrBaseTerm(context, this.intToString(value, defaultDatatype)); } } else { defaultDatatype = this.dataFactory.namedNode(contextType); } } // If we don't find such a datatype, check the language from the context if (!defaultDatatype) { const contextLanguage = Util.getContextValueLanguage(context, key); const contextDirection = Util.getContextValueDirection(context, key); if (contextDirection && this.parsingContext.rdfDirection) { return this.createLanguageDirectionLiteral(depth, this.intToString(value, defaultDatatype), contextLanguage, contextDirection); } else { return this.dataFactory.literal(this.intToString(value, defaultDatatype), <string | RDF.NamedNode> contextLanguage); } } // If all else fails, make a literal based on the default content type return this.dataFactory.literal(this.intToString(value, defaultDatatype), defaultDatatype); } /** * Create a literal for the given value with the given language and direction. * Auxiliary quads may be emitted. * @param {number} depth The current stack depth. * @param {string} value A string value. * @param {string} language A language tag. * @param {string} direction A direction. * @return {Term} An RDF term. */ public createLanguageDirectionLiteral(depth: number, value: string, language: string | null, direction: string) : RDF.Term { if (this.parsingContext.rdfDirection === 'i18n-datatype') { // Create a datatyped literal, by encoding the language and direction into https://www.w3.org/ns/i18n#. if (!language) { language = ''; } return this.dataFactory.literal(value, this.dataFactory.namedNode(`https://www.w3.org/ns/i18n#${language}_${direction}`)); } else { // Reify the literal. const valueNode = this.dataFactory.blankNode(); const graph = this.getDefaultGraph(); this.parsingContext.emitQuad(depth, this.dataFactory.quad(valueNode, this.dataFactory.namedNode(Util.RDF + 'value'), this.dataFactory.literal(value), graph)); if (language) { this.parsingContext.emitQuad(depth, this.dataFactory.quad(valueNode, this.dataFactory.namedNode(Util.RDF + 'language'), this.dataFactory.literal(language), graph)); } this.parsingContext.emitQuad(depth, this.dataFactory.quad(valueNode, this.dataFactory.namedNode(Util.RDF + 'direction'), this.dataFactory.literal(direction), graph)); return valueNode; } } /** * Stringify the given JSON object to a canonical JSON string. * @param value Any valid JSON value. * @return {string} A canonical JSON string. */ public valueToJsonString(value: any): string { return canonicalizeJson(value); } /** * If the key is not a keyword, try to check if it is an alias for a keyword, * and if so, un-alias it. * @param {string} key A key, can be falsy. * @param {string[]} keys The path of keys. * @param {number} depth The depth to * @param {boolean} disableCache If the cache should be disabled * @param {JsonLdContextNormalized} context A context to unalias with, * will fallback to retrieving the context for the given keys. * @return {Promise<string>} A promise resolving to the key itself, or another key. */ public async unaliasKeyword(key: any, keys: string[], depth: number, disableCache?: boolean, context?: JsonLdContextNormalized): Promise<any> { // Numbers can not be an alias if (Number.isInteger(key)) { return key; } // Try to grab from cache if it was already un-aliased before. if (!disableCache) { const cachedUnaliasedKeyword = this.parsingContext.unaliasedKeywordCacheStack[depth]; if (cachedUnaliasedKeyword) { return cachedUnaliasedKeyword; } } if (!ContextUtil.isPotentialKeyword(key)) { context = context || await this.parsingContext.getContext(keys); let unliased = context.getContextRaw()[key]; if (unliased && typeof unliased === 'object') { unliased = unliased['@id']; } if (ContextUtil.isValidKeyword(unliased)) { key = unliased; } } return disableCache ? key : (this.parsingContext.unaliasedKeywordCacheStack[depth] = key); } /** * Unalias the keyword of the parent. * This adds a safety check if no parent exist. * @param {any[]} keys A stack of keys. * @param {number} depth The current depth. * @return {Promise<any>} A promise resolving to the parent key, or another key. */ public async unaliasKeywordParent(keys: any[], depth: number): Promise<any> { return await this.unaliasKeyword(depth > 0 && keys[depth - 1], keys, depth - 1); } /** * Un-alias all keywords in the given hash. * @param {{[p: string]: any}} hash A hash object. * @param {string[]} keys The path of keys. * @param {number} depth The depth. * @param {JsonLdContextNormalized} context A context to unalias with, * will fallback to retrieving the context for the given keys. * @return {Promise<{[p: string]: any}>} A promise resolving to the new hash. */ public async unaliasKeywords(hash: {[id: string]: any}, keys: string[], depth: number, context?: JsonLdContextNormalized): Promise<{[id: string]: any}> { const newHash: {[id: string]: any} = {}; for (const key in hash) { newHash[await this.unaliasKeyword(key, keys, depth + 1, true, context)] = hash[key]; } return newHash; } /** * Check if we are processing a literal (including JSON literals) at the given depth. * This will also check higher levels, * because if a parent is a literal, * then the deeper levels are definitely a literal as well. * @param {number} depth The depth. * @return {boolean} If we are processing a literal. */ public isLiteral(depth: number): boolean { for (let i = depth; i >= 0; i--) { if (this.parsingContext.literalStack[i] || this.parsingContext.jsonLiteralStack[i]) { return true; } } return false; } /** * Check how many parents should be skipped for checking the @graph for the given node. * * @param {number} depth The depth of the node. * @param {any[]} keys An array of keys. * @return {number} The graph depth offset. */ public async getDepthOffsetGraph(depth: number, keys: any[]): Promise<number> { for (let i = depth - 1; i > 0; i--) { if (await this.unaliasKeyword(keys[i], keys, i) === '@graph') { // Skip further processing if we are already in an @graph-@id or @graph-@index container const containers = (await EntryHandlerContainer.getContainerHandler(this.parsingContext, keys, i)).containers; if (EntryHandlerContainer.isComplexGraphContainer(containers)) { return -1; } return depth - i - 1; } } return -1; } /** * Check if the given subject is of a valid type. * This should be called when applying @reverse'd properties. * @param {Term} subject A subject. */ public validateReverseSubject(subject: RDF.Term) { if (subject.termType === 'Literal') { throw new ErrorCoded(`Found illegal literal in subject position: ${subject.value}`, ERROR_CODES.INVALID_REVERSE_PROPERTY_VALUE); } } /** * Get the default graph. * @return {Term} An RDF term. */ public getDefaultGraph(): RDF.NamedNode | RDF.BlankNode | RDF.DefaultGraph { return this.parsingContext.defaultGraph || this.dataFactory.defaultGraph(); } /** * Get the current graph, while taking into account a graph that can be defined via @container: @graph. * If not within a graph container, the default graph will be returned. * @param keys The current keys. * @param depth The current depth. */ public async getGraphContainerValue(keys: any[], depth: number) : Promise<RDF.NamedNode | RDF.BlankNode | RDF.DefaultGraph> { // Default to default graph let graph: RDF.NamedNode | RDF.BlankNode | RDF.DefaultGraph | null = this.getDefaultGraph(); // Check if we are in an @container: @graph. const { containers, depth: depthContainer } = await EntryHandlerContainer .getContainerHandler(this.parsingContext, keys, depth); if ('@graph' in containers) { // Get the graph from the stack. const graphContainerIndex = EntryHandlerContainer.getContainerGraphIndex(containers, depthContainer, keys); const entry = this.parsingContext.graphContainerTermStack[depthContainer]; graph = entry ? entry[graphContainerIndex] : null; // Set the graph in the stack if none has been set yet. if (!graph) { let graphId: RDF.NamedNode | RDF.BlankNode | null = null; if ('@id' in containers) { const keyUnaliased = await this.getContainerKey(keys[depthContainer], keys, depthContainer); if (keyUnaliased !== null) { graphId = await this.resourceToTerm(await this.parsingContext.getContext(keys), keyUnaliased); } } if (!graphId) { graphId = this.dataFactory.blankNode(); } if (!this.parsingContext.graphContainerTermStack[depthContainer]) { this.parsingContext.graphContainerTermStack[depthContainer] = {}; } graph = this.parsingContext.graphContainerTermStack[depthContainer][graphContainerIndex] = graphId; } } return graph; } /** * Get the properties depth for retrieving properties. * * Typically, the properties depth will be identical to the given depth. * * The following exceptions apply: * * When the parent is @reverse, the depth is decremented by one. * * When @nest parents are found, the depth is decremented by the number of @nest parents. * If in combination with the exceptions above an intermediary array is discovered, * the depth is also decremented by this number of arrays. * * @param keys The current key chain. * @param depth The current depth. */ public async getPropertiesDepth(keys: any[], depth: number): Promise<number> { let lastValidDepth = depth; for (let i = depth - 1; i > 0; i--) { if (typeof keys[i] !== 'number') { // Skip array keys const parentKey = await this.unaliasKeyword(keys[i], keys, i); if (parentKey === '@reverse') { return i; } else if (parentKey === '@nest') { lastValidDepth = i; } else { return lastValidDepth; } } } return lastValidDepth; } /** * Get the key for the current container entry. * @param key A key, can be falsy. * @param keys The key chain. * @param depth The current depth to get the key from. * @return Promise resolving to the key. * Null will be returned for @none entries, with aliasing taken into account. */ public async getContainerKey(key: any, keys: string[], depth: number): Promise<any> { const keyUnaliased = await this.unaliasKeyword(key, keys, depth); return keyUnaliased === '@none' ? null : keyUnaliased; } }
the_stack
import type { RegExpVisitor } from "regexpp/visitor" import type { Alternative, CapturingGroup, Character, CharacterClass, CharacterSet, Element, Group, Node, QuantifiableElement, Quantifier, } from "regexpp/ast" import type { AST } from "eslint" import type { RegExpContext, Quant } from "../utils" import { createRule, defineRegexpVisitor, quantToString } from "../utils" import type { ReadonlyFlags } from "regexp-ast-analysis" import { Chars, hasSomeDescendant, toCharSet } from "regexp-ast-analysis" import { getPossiblyConsumedChar } from "../utils/regexp-ast" import type { CharSet } from "refa" import { mention } from "../utils/mention" /** * Returns whether the given node is or contains a capturing group. */ function hasCapturingGroup(node: Node): boolean { return hasSomeDescendant(node, (d) => d.type === "CapturingGroup") } interface SingleConsumedChar { readonly char: CharSet /** * Whether the entire element is a single character. * * If `true`, the element is equivalent to `[char]`. * * If `false`, the element is equivalent to `[char]|unknown`. */ readonly complete: boolean } const EMPTY_UTF16: SingleConsumedChar = { char: Chars.empty({}), complete: false, } const EMPTY_UNICODE: SingleConsumedChar = { char: Chars.empty({ unicode: true }), complete: false, } /** * If the given element is guaranteed to only consume a single character set, * then this character set will be returned, `null` otherwise. */ function getSingleConsumedChar( element: Element | Alternative, flags: ReadonlyFlags, ): SingleConsumedChar { const empty = flags.unicode ? EMPTY_UNICODE : EMPTY_UTF16 switch (element.type) { case "Alternative": if (element.elements.length === 1) { return getSingleConsumedChar(element.elements[0], flags) } return empty case "Character": case "CharacterSet": case "CharacterClass": return { char: toCharSet(element, flags), complete: true, } case "Group": case "CapturingGroup": { const results = element.alternatives.map((a) => getSingleConsumedChar(a, flags), ) return { char: empty.char.union(...results.map((r) => r.char)), complete: results.every((r) => r.complete), } } default: return empty } } /** * Returns the sum of the given quant and constant. */ function quantAddConst(quant: Readonly<Quant>, constant: number): Quant { return { min: quant.min + constant, max: quant.max + constant, greedy: quant.greedy, } } /** * Returns the raw of the given quantifier. */ function quantize(element: QuantifiableElement, quant: Quant): string { if (quant.min === 0 && quant.max === 0) { return "" } if (quant.min === 1 && quant.max === 1) { return element.raw } return element.raw + quantToString(quant) } type GroupOrCharacter = | Group | CapturingGroup | Character | CharacterClass | CharacterSet /** * Returns whether the given element is a group or character. */ function isGroupOrCharacter(element: Element): element is GroupOrCharacter { switch (element.type) { case "Group": case "CapturingGroup": case "Character": case "CharacterClass": case "CharacterSet": return true default: return false } } type Replacement = BothReplacement | NestedReplacement /** Replace both the left and right quantifiers. */ interface BothReplacement { type: "Both" messageId: string raw: string } /** Replace only the nested quantifier. */ interface NestedReplacement { type: "Nested" messageId: string raw: string dominate: Quantifier nested: Quantifier } /** * Returns the replacement for the two adjacent elements. */ function getQuantifiersReplacement( left: Quantifier, right: Quantifier, flags: ReadonlyFlags, ): Replacement | null { // this only handles quantifiers that aren't simple repetitions // e.g. `a*\w*` will be handled but `a{6}\w` will not be. if (left.min === left.max || right.min === right.max) { return null } // both quantifiers must have the same greediness if (left.greedy !== right.greedy) { return null } // compare const lSingle = getSingleConsumedChar(left.element, flags) const rSingle = getSingleConsumedChar(right.element, flags) const lPossibleChar = lSingle.complete ? lSingle.char : getPossiblyConsumedChar(left.element, flags).char const rPossibleChar = rSingle.complete ? rSingle.char : getPossiblyConsumedChar(right.element, flags).char const greedy = left.greedy let lQuant: Readonly<Quant>, rQuant: Readonly<Quant> if ( lSingle.complete && rSingle.complete && lSingle.char.equals(rSingle.char) ) { // left is equal to right lQuant = { min: left.min + right.min, max: left.max + right.max, greedy, } rQuant = { min: 0, max: 0, greedy } } else if ( right.max === Infinity && rSingle.char.isSupersetOf(lPossibleChar) ) { // left is a subset of right lQuant = { min: left.min, max: left.min, greedy, } rQuant = right // unchanged } else if ( left.max === Infinity && lSingle.char.isSupersetOf(rPossibleChar) ) { // right is a subset of left lQuant = left // unchanged rQuant = { min: right.min, max: right.min, greedy, } } else { return null } const raw = quantize(left.element, lQuant) + quantize(right.element, rQuant) // eslint-disable-next-line one-var -- rule error let messageId if ( lQuant.max === 0 && right.max === rQuant.max && right.min === rQuant.min ) { messageId = "removeLeft" } else if ( rQuant.max === 0 && left.max === lQuant.max && left.min === lQuant.min ) { messageId = "removeRight" } else { messageId = "replace" } return { type: "Both", raw, messageId } } /** * A element that is repeated a constant number of times. */ interface RepeatedElement { type: "Repeated" element: GroupOrCharacter min: number } /** * Tries to convert the given element into a repeated element. */ function asRepeatedElement(element: Element): RepeatedElement | null { if (element.type === "Quantifier") { if ( element.min === element.max && element.min > 0 && isGroupOrCharacter(element.element) ) { return { type: "Repeated", element: element.element, min: element.min, } } } else if (isGroupOrCharacter(element)) { return { type: "Repeated", element, min: 1 } } return null } /** * Returns the replacement for the two adjacent elements. */ function getQuantifierRepeatedElementReplacement( pair: [Quantifier, RepeatedElement] | [RepeatedElement, Quantifier], flags: ReadonlyFlags, ): Replacement | null { const [left, right] = pair // the characters of both elements have to be complete and equal const lSingle = getSingleConsumedChar(left.element, flags) if (!lSingle.complete) { return null } const rSingle = getSingleConsumedChar(right.element, flags) if (!rSingle.complete) { return null } if (!rSingle.char.equals(lSingle.char)) { return null } let elementRaw, quant if (left.type === "Quantifier") { elementRaw = left.element.raw quant = quantAddConst(left, right.min) } else if (right.type === "Quantifier") { elementRaw = right.element.raw quant = quantAddConst(right, left.min) } else { throw new Error() } const raw = elementRaw + quantToString(quant) return { type: "Both", messageId: "combine", raw } } /** * Returns a replacement for the nested quantifier. */ function getNestedReplacement( dominate: Quantifier, nested: Quantifier, flags: ReadonlyFlags, ): Replacement | null { if (dominate.greedy !== nested.greedy) { return null } if (dominate.max < Infinity || nested.min === nested.max) { return null } const single = getSingleConsumedChar(dominate.element, flags) if (single.char.isEmpty) { return null } const nestedPossible = getPossiblyConsumedChar(nested.element, flags) if (single.char.isSupersetOf(nestedPossible.char)) { const { min } = nested if (min === 0) { return { type: "Nested", messageId: "nestedRemove", raw: "", nested, dominate, } } return { type: "Nested", messageId: "nestedReplace", raw: quantize(nested.element, { ...nested, max: min }), nested, dominate, } } return null } /** Yields all quantifiers at the start/end of the given element. */ function* nestedQuantifiers( root: Element | Alternative, direction: "start" | "end", ): Iterable<Quantifier> { switch (root.type) { case "Alternative": if (root.elements.length > 0) { const index = direction === "start" ? 0 : root.elements.length - 1 yield* nestedQuantifiers(root.elements[index], direction) } break case "CapturingGroup": case "Group": for (const a of root.alternatives) { yield* nestedQuantifiers(a, direction) } break case "Quantifier": yield root if (root.max === 1) { yield* nestedQuantifiers(root.element, direction) } break default: break } } /** * Whether the computed replacement is to be ignored. */ function ignoreReplacement( left: Element, right: Element, result: Replacement, ): boolean { // There is a relatively common case for which we want to make // an exception: `aa?` // We will only suggest the replacement if the new raw is // shorter than the current one. if (left.type === "Quantifier") { if ( left.raw.length + right.raw.length <= result.raw.length && isGroupOrCharacter(right) && left.min === 0 && left.max === 1 ) { return true } } if (right.type === "Quantifier") { if ( left.raw.length + right.raw.length <= result.raw.length && isGroupOrCharacter(left) && right.min === 0 && right.max === 1 ) { return true } } return false } /** * Returns the replacement for the two adjacent elements. */ function getReplacement( left: Element, right: Element, flags: ReadonlyFlags, ): Replacement | null { if (left.type === "Quantifier" && right.type === "Quantifier") { const result = getQuantifiersReplacement(left, right, flags) if (result && !ignoreReplacement(left, right, result)) return result } if (left.type === "Quantifier") { const rightRep = asRepeatedElement(right) if (rightRep) { const result = getQuantifierRepeatedElementReplacement( [left, rightRep], flags, ) if (result && !ignoreReplacement(left, right, result)) return result } } if (right.type === "Quantifier") { const leftRep = asRepeatedElement(left) if (leftRep) { const result = getQuantifierRepeatedElementReplacement( [leftRep, right], flags, ) if (result && !ignoreReplacement(left, right, result)) return result } } if (left.type === "Quantifier" && left.max === Infinity) { for (const nested of nestedQuantifiers(right, "start")) { const result = getNestedReplacement(left, nested, flags) if (result) return result } } if (right.type === "Quantifier" && right.max === Infinity) { for (const nested of nestedQuantifiers(left, "end")) { const result = getNestedReplacement(right, nested, flags) if (result) return result } } return null } /** * Returns the combined location of two adjacent elements. */ function getLoc( left: Element, right: Element, { patternSource }: RegExpContext, ): AST.SourceLocation { return patternSource.getAstLocation({ start: Math.min(left.start, right.start), end: Math.max(left.end, right.end), }) } export default createRule("optimal-quantifier-concatenation", { meta: { docs: { description: "require optimal quantifiers for concatenated quantifiers", category: "Best Practices", recommended: true, }, fixable: "code", schema: [], messages: { combine: "{{left}} and {{right}} can be combined into one quantifier {{fix}}.{{cap}}", removeLeft: "{{left}} can be removed because it is already included by {{right}}.{{cap}}", removeRight: "{{right}} can be removed because it is already included by {{left}}.{{cap}}", replace: "{{left}} and {{right}} can be replaced with {{fix}}.{{cap}}", nestedRemove: "{{nested}} can be removed because of {{dominate}}.{{cap}}", nestedReplace: "{{nested}} can be replaced with {{fix}} because of {{dominate}}.{{cap}}", }, type: "suggestion", }, create(context) { /** * Creates a visitor */ function createVisitor( regexpContext: RegExpContext, ): RegExpVisitor.Handlers { const { node, flags, getRegexpLocation, fixReplaceNode } = regexpContext return { onAlternativeEnter(aNode) { for (let i = 0; i < aNode.elements.length - 1; i++) { const left = aNode.elements[i] const right = aNode.elements[i + 1] const replacement = getReplacement(left, right, flags) if (!replacement) { continue } const involvesCapturingGroup = hasCapturingGroup(left) || hasCapturingGroup(right) const cap = involvesCapturingGroup ? " This cannot be fixed automatically because it might change or remove a capturing group." : "" if (replacement.type === "Both") { context.report({ node, loc: getLoc(left, right, regexpContext), messageId: replacement.messageId, data: { left: mention(left), right: mention(right), fix: mention(replacement.raw), cap, }, fix: fixReplaceNode(aNode, () => { if (involvesCapturingGroup) { return null } const before = aNode.raw.slice( 0, left.start - aNode.start, ) const after = aNode.raw.slice( right.end - aNode.start, ) return before + replacement.raw + after }), }) } else { context.report({ node, loc: getRegexpLocation(replacement.nested), messageId: replacement.messageId, data: { nested: mention(replacement.nested), dominate: mention(replacement.dominate), fix: mention(replacement.raw), cap, }, fix: fixReplaceNode(replacement.nested, () => { if (involvesCapturingGroup) { return null } return replacement.raw }), }) } } }, } } return defineRegexpVisitor(context, { createVisitor, }) }, })
the_stack
import * as React from 'react'; //import styles from '../../webparts/siteDesigns/components/SiteDesigns.module.scss'; import { ISiteScriptsProps } from './ISiteScriptsProps'; import { escape } from '@microsoft/sp-lodash-subset'; import { ListView, IViewField, SelectionMode, GroupOrder, IGrouping } from "@pnp/spfx-controls-react/lib/ListView"; import { IListViewItems } from './IListViewItems'; import spservice from '../../services/spservices'; import { Icon, IconType, CommandBar, Panel, PanelType, MessageBar, MessageBarType, Label, Spinner, SpinnerSize, Dialog, DialogType, DialogFooter, PrimaryButton, DefaultButton, } from 'office-ui-fabric-react'; import { WebPartTitle } from "@pnp/spfx-controls-react/lib/WebPartTitle"; import * as strings from 'SiteDesignsWebPartStrings'; import { ISiteScriptsState } from './ISiteScriptsState'; import { SiteDesignInfo, SiteScriptInfo, SiteScriptUpdateInfo, SiteDesignUpdateInfo } from '@pnp/sp'; import { panelMode } from '../../webparts/siteDesigns/components/IEnumPanel'; import styles from './siteScript.module.scss'; // ListView Columns const viewFields: IViewField[] = [ { name: 'Image', render: ((item: IListViewItems) => { const image = <Icon iconName="FileCode" />; return image; }), maxWidth: 70, }, { name: 'Id', displayName: strings.ListViewColumnIdLabel, sorting: true, isResizable: true, maxWidth: 200 }, { name: 'Title', displayName: strings.TitleFieldLabel, sorting: true, isResizable: true, maxWidth: 250 }, { name: 'Description', displayName: strings.ListViewColumnDescriptionLabel, sorting: true, isResizable: true, maxWidth: 250 }, { name: 'Version', displayName: "Version", sorting: true, isResizable: true, maxWidth: 65 } ]; export default class SiteScripts extends React.Component<ISiteScriptsProps, ISiteScriptsState> { private spService: spservice; private items: IListViewItems[] = []; private refreshParent: boolean = false; private siteScripts: string[]; private AddSiteScriptToSiteDesignDialog = React.lazy(() => import('../AddSiteScriptToSiteDesign/AddSiteScriptToSiteDesign' /* webpackChunkName: "addscriptdialog" */)); private EditScriptDialog = React.lazy(() => import('../../controls/EditSiteScript/EditSiteScript' /* webpackChunkName: "editscriptdialog" */)); public constructor(props) { super(props); // Initialize state this.state = ({ items: [], isLoading: false, disableCommandOption: true, showPanel: false, selectItem: [], panelMode: panelMode.New, hasError: false, errorMessage: '', showPanelAddScript: false, showDialogDelete: false, deleting: false, disableDeleteButton: false, showError: false, showCommmandEdit: '' }); // Init class services this.spService = new spservice(this.props.context); // Register event handlers this.getSelection = this.getSelection.bind(this); this.onNewItem = this.onNewItem.bind(this); this.onEditItem = this.onEditItem.bind(this); this.onDeleteItem = this.onDeleteItem.bind(this); this.onDismissPanel = this.onDismissPanel.bind(this); this.onRefresh = this.onRefresh.bind(this); this.onCancel = this.onCancel.bind(this); this.onDismissAddScriptPanel = this.onDismissAddScriptPanel.bind(this); this.onCloseDialog = this.onCloseDialog.bind(this); this.onDeleteConfirm = this.onDeleteConfirm.bind(this); } /** * * * @private * @param {boolean} refresh * @memberof SiteScripts */ private onDismissAddScriptPanel(refresh: boolean) { this.setState({ showPanel: false }); if (refresh) { this.refreshParent = true; this.loadSiteScripts(); } } // Get Selection Item from List /** * * * @private * @param {IListViewItems[]} items * @memberof SiteScripts */ private getSelection(items: IListViewItems[]) { if (items.length > 0) { this.setState({ disableCommandOption: false, selectItem: items, }); } else { this.setState({ disableCommandOption: true, selectItem: [], showPanel: false, }); } } /** * cancel event option SiteScrips * * @private * @param {React.MouseEvent<HTMLButtonElement>} ev * @memberof SiteScripts */ private onCancel(ev: React.MouseEvent<HTMLButtonElement>) { this.props.onDismiss(this.refreshParent); } private onCloseDialog(ev: React.MouseEvent<HTMLButtonElement>) { ev.preventDefault(); this.setState({ showDialogDelete: false }); } /** * * * @private * @param {React.MouseEvent<HTMLButtonElement>} ev * @memberof SiteScripts */ private async onDeleteConfirm(ev: React.MouseEvent<HTMLButtonElement>) { ev.preventDefault(); try { let updateSiteScripts: string[] = this.siteScripts; for (const item of this.state.selectItem) { const idx = updateSiteScripts.indexOf(item.Id); if (idx !== -1) { updateSiteScripts.splice(idx, 1); } } this.setState({ deleting: true, disableDeleteButton: true }); const siteDesignUpdateInfo: SiteDesignUpdateInfo = { Id: this.props.SiteDesignSelectedItem.Id, SiteScriptIds: updateSiteScripts }; const result = await this.spService.updateSiteDesign(siteDesignUpdateInfo); this.refreshParent = true; this.setState({ deleting: false, disableDeleteButton: false, showDialogDelete: false, showError: false }); this.loadSiteScripts(); } catch (error) { console.log(error.message); this.setState({ deleting: false, disableDeleteButton: true, showError: true, errorMessage: error.message }); } } /** * Panel Dismiss CallBack * * @param {boolean} [refresh] * @returns * @memberof SiteScripts */ public async onDismissPanel(refresh?: boolean) { this.setState({ showPanel: false }); if (refresh) { await this.loadSiteScripts(); } return; } // On New Item /** * * * @private * @param {React.MouseEvent<HTMLElement>} e * @memberof SiteScripts */ private onNewItem(e: React.MouseEvent<HTMLElement>) { e.preventDefault(); this.setState({ panelMode: panelMode.New, showPanel: true, }); } /** * On Delete * * @private * @param {React.MouseEvent<HTMLElement>} e * @memberof SiteScripts */ private onDeleteItem(e: React.MouseEvent<HTMLElement>) { e.preventDefault(); this.setState({ panelMode: panelMode.Delete, showDialogDelete: true, }); } /** * On Edit item * * @private * @param {React.MouseEvent<HTMLElement>} e * @memberof SiteScripts */ private onEditItem(e: React.MouseEvent<HTMLElement>) { e.preventDefault(); this.setState({ panelMode: panelMode.edit, showPanel: true }); } /** * Load SiteScripts * * @private * @memberof SiteScripts */ private async loadSiteScripts() { this.items = []; this.setState({ isLoading: true }); try { // check if user is Teanant Global Admin const isGlobalAdmin = await this.spService.checkUserIsGlobalAdmin(); if (isGlobalAdmin) { // get SiteScripts for SiteDesign const siteDesignInfo: SiteDesignInfo = await this.spService.getSiteDesignMetadata(this.props.SiteDesignSelectedItem.Id); this.siteScripts = siteDesignInfo.SiteScriptIds; if (this.siteScripts.length > 0) { for (const siteScriptId of this.siteScripts) { if (siteScriptId === "") continue; const siteScript: SiteScriptInfo = await this.spService.getSiteScriptMetadata(siteScriptId); this.items.push( { key: siteScript.Id, Description: siteScript.Description, Id: siteScript.Id, Title: siteScript.Title, Version: siteScript.Version } ); } } this.setState({ items: this.items, isLoading: false, disableCommandOption: true }); } else { this.setState({ items: this.items, hasError: true, errorMessage: strings.ErrorMessageUserNotAdmin, isLoading: false }); } } catch (error) { this.setState({ items: this.items, hasError: true, errorMessage: error.message, isLoading: false }); } } /** Refresh * * @param {React.MouseEvent<HTMLElement>} ev * @memberof SiteScripts */ public onRefresh(ev: React.MouseEvent<HTMLElement>) { ev.preventDefault(); // loadSiteScripts this.loadSiteScripts(); } /** * Component Did Mount * * @memberof SiteScripts */ public async componentDidMount() { // loadSiteScripts await this.loadSiteScripts(); } // On Render public render(): React.ReactElement<ISiteScriptsProps> { return ( <div> <Panel isOpen={this.props.showPanel} onDismiss={this.onCancel} type={PanelType.large} headerText="Site Scripts"> <div> <span className={styles.label}>SiteDesign Id:</span> <span className={styles.title}>{this.props.SiteDesignSelectedItem.Id}</span> </div> <div> <span className={styles.label}>Title:</span> <span className={styles.title}>{this.props.SiteDesignSelectedItem.Title}</span> </div> <div> <span className={styles.label}>WebTemplate:</span> <span className={styles.title}>{this.props.SiteDesignSelectedItem.WebTemplate === '64' ? "Team Site" : "Communication Site"}</span> </div> <br /> { this.state.isLoading ? <Spinner size={SpinnerSize.large} label={strings.LoadingLabel} ariaLive="assertive" /> : this.state.hasError ? <MessageBar messageBarType={MessageBarType.error}> <span>{this.state.errorMessage}</span> </MessageBar> : <div style={{ marginBottom: 10 }}> <CommandBar items={[ { key: 'newItem', name: strings.CommandbarNewLabel, iconProps: { iconName: 'Add' }, onClick: this.onNewItem, }, { key: 'edit', name: strings.CommandbarEditLabel, iconProps: { iconName: 'Edit' }, onClick: this.onEditItem, disabled: this.state.selectItem.length ===0 ? true : this.state.selectItem.length > 1 ? true : false, }, { key: 'delete', name: strings.CommandbarDeleteLabel, iconProps: { iconName: 'Delete' }, onClick: this.onDeleteItem, disabled: this.state.disableCommandOption, } ]} farItems={[ { key: 'refresh', name: strings.CommandbarRefreshLabel, iconProps: { iconName: 'Refresh' }, onClick: this.onRefresh, } ]} /> </div> } { !this.state.hasError && !this.state.isLoading && <ListView items={this.state.items} viewFields={viewFields} compact={false} selectionMode={SelectionMode.multiple} selection={this.getSelection} showFilter={true} filterPlaceHolder={strings.SearchPlaceholder} /> } { this.state.showPanel && this.state.panelMode == panelMode.New && <React.Suspense fallback={<div>Loading...</div>}> <this.AddSiteScriptToSiteDesignDialog showPanel={this.state.showPanel} onDismiss={this.onDismissAddScriptPanel} context={this.props.context} siteDesignInfo={this.props.SiteDesignSelectedItem} /> </React.Suspense> } { this.state.showPanel && this.state.panelMode == panelMode.edit && <React.Suspense fallback={<div>Loading...</div>}> <this.EditScriptDialog hideDialog={!this.state.showPanel} onDismiss={this.onDismissAddScriptPanel} context={this.props.context} siteScriptId={this.state.selectItem[0].Id} /> </React.Suspense> } <Dialog hidden={!this.state.showDialogDelete} onDismiss={this.onCloseDialog} dialogContentProps={{ type: DialogType.normal, title: strings.DeleteSiteScriptDialogConfirmTitle, }} modalProps={{ isBlocking: true, }} > <p>{strings.DeleteSiteScriptDialogConfirmText}</p> <br /> { this.state.showError && <div style={{ marginTop: '15px' }}> <MessageBar messageBarType={MessageBarType.error} > <span>{this.state.errorMessage}</span> </MessageBar> </div> } <br /> <DialogFooter> { this.state.deleting && <div style={{ display: "inline-block", marginRight: '10px', verticalAlign: 'middle' }}> <Spinner size={SpinnerSize.small} ariaLive="assertive" /> </div> } <DefaultButton onClick={this.onDeleteConfirm} text={strings.ButtonDeleteLabel} disabled={this.state.disableDeleteButton} /> <PrimaryButton onClick={this.onCloseDialog} text={strings.ButtonCancelLabel} /> </DialogFooter> </Dialog> </Panel> </div > ); } }
the_stack
import { CancellationToken } from 'vscode-languageserver'; import * as AnalyzerNodeInfo from '../analyzer/analyzerNodeInfo'; import { AliasDeclaration, Declaration, DeclarationType, isAliasDeclaration } from '../analyzer/declaration'; import { areDeclarationsSame, createSynthesizedAliasDeclaration, getDeclarationsWithUsesLocalNameRemoved, } from '../analyzer/declarationUtils'; import { getModuleNode, getStringNodeValueRange } from '../analyzer/parseTreeUtils'; import { ParseTreeWalker } from '../analyzer/parseTreeWalker'; import * as ScopeUtils from '../analyzer/scopeUtils'; import { isStubFile, SourceMapper } from '../analyzer/sourceMapper'; import { TypeEvaluator } from '../analyzer/typeEvaluatorTypes'; import { TypeCategory } from '../analyzer/types'; import { throwIfCancellationRequested } from '../common/cancellationUtils'; import { TextRange } from '../common/textRange'; import { ImportAsNode, NameNode, ParseNode, ParseNodeType, StringNode } from '../parser/parseNodes'; export type CollectionResult = { node: NameNode | StringNode; range: TextRange; }; // This walker looks for symbols that are semantically equivalent // to the requested symbol. export class DocumentSymbolCollector extends ParseTreeWalker { static collectFromNode( node: NameNode, evaluator: TypeEvaluator, cancellationToken: CancellationToken, startingNode?: ParseNode, treatModuleInImportAndFromImportSame = false ): CollectionResult[] { const symbolName = node.value; const declarations = this.getDeclarationsForNode( node, evaluator, /* resolveLocalName */ true, cancellationToken ); startingNode = startingNode ?? getModuleNode(node); if (!startingNode) { return []; } const collector = new DocumentSymbolCollector( symbolName, declarations, evaluator, cancellationToken, startingNode, treatModuleInImportAndFromImportSame ); return collector.collect(); } static getDeclarationsForNode( node: NameNode, evaluator: TypeEvaluator, resolveLocalName: boolean, token: CancellationToken, sourceMapper?: SourceMapper ): Declaration[] { throwIfCancellationRequested(token); const declarations = this._getDeclarationsForNode(node, evaluator); const resolvedDeclarations: Declaration[] = []; declarations.forEach((decl) => { const resolvedDecl = evaluator.resolveAliasDeclaration(decl, resolveLocalName); if (resolvedDecl) { resolvedDeclarations.push(resolvedDecl); if (sourceMapper && isStubFile(resolvedDecl.path)) { const implDecls = sourceMapper.findDeclarations(resolvedDecl); for (const implDecl of implDecls) { if (implDecl && implDecl.path) { this._addIfUnique(resolvedDeclarations, implDecl); } } } } }); return resolvedDeclarations; } private _results: CollectionResult[] = []; private _dunderAllNameNodes = new Set<StringNode>(); constructor( private _symbolName: string, private _declarations: Declaration[], private _evaluator: TypeEvaluator, private _cancellationToken: CancellationToken, private _startingNode: ParseNode, private _treatModuleInImportAndFromImportSame = false ) { super(); // Don't report strings in __all__ right away, that will // break the assumption on the result ordering. this._setDunderAllNodes(this._startingNode); } collect() { this.walk(this._startingNode); return this._results; } override walk(node: ParseNode) { if (!AnalyzerNodeInfo.isCodeUnreachable(node)) { super.walk(node); } } override visitName(node: NameNode): boolean { throwIfCancellationRequested(this._cancellationToken); // No need to do any more work if the symbol name doesn't match. if (node.value !== this._symbolName) { return false; } if (this._declarations.length > 0) { const declarations = DocumentSymbolCollector._getDeclarationsForNode(node, this._evaluator); if (declarations && declarations.length > 0) { // Does this name share a declaration with the symbol of interest? if (declarations.some((decl) => this._resultsContainsDeclaration(decl))) { this._addResult(node); } } } else { // There were no declarations this._addResult(node); } return false; } override visitString(node: StringNode): boolean { throwIfCancellationRequested(this._cancellationToken); if (this._dunderAllNameNodes.has(node)) { this._addResult(node); } return false; } private _addResult(node: NameNode | StringNode) { const range: TextRange = node.nodeType === ParseNodeType.Name ? node : getStringNodeValueRange(node); this._results.push({ node, range }); } private _resultsContainsDeclaration(declaration: Declaration) { // Resolve the declaration. const resolvedDecl = this._evaluator.resolveAliasDeclaration(declaration, /* resolveLocalNames */ false); if (!resolvedDecl) { return false; } // The reference results declarations are already resolved, so we don't // need to call resolveAliasDeclaration on them. if ( this._declarations.some((decl) => areDeclarationsSame(decl, resolvedDecl, this._treatModuleInImportAndFromImportSame) ) ) { return true; } // We didn't find the declaration using local-only alias resolution. Attempt // it again by fully resolving the alias. const resolvedDeclNonlocal = this._getResolveAliasDeclaration(resolvedDecl); if (!resolvedDeclNonlocal || resolvedDeclNonlocal === resolvedDecl) { return false; } return this._declarations.some((decl) => areDeclarationsSame(decl, resolvedDeclNonlocal, this._treatModuleInImportAndFromImportSame) ); } private _getResolveAliasDeclaration(declaration: Declaration) { // TypeEvaluator.resolveAliasDeclaration only resolve alias in AliasDeclaration in the form of // "from x import y as [y]" but don't do thing for alias in "import x as [x]" // Here, alias should have same name as module name. if (isAliasDeclFromImportAsWithAlias(declaration)) { return getDeclarationsWithUsesLocalNameRemoved([declaration])[0]; } const resolvedDecl = this._evaluator.resolveAliasDeclaration(declaration, /* resolveLocalNames */ true); return isAliasDeclFromImportAsWithAlias(resolvedDecl) ? getDeclarationsWithUsesLocalNameRemoved([resolvedDecl])[0] : resolvedDecl; function isAliasDeclFromImportAsWithAlias(decl?: Declaration): decl is AliasDeclaration { return ( !!decl && decl.type === DeclarationType.Alias && decl.node && decl.usesLocalName && decl.node.nodeType === ParseNodeType.ImportAs ); } } private _setDunderAllNodes(node: ParseNode) { if (node.nodeType !== ParseNodeType.Module) { return; } const dunderAllInfo = AnalyzerNodeInfo.getDunderAllInfo(node); if (!dunderAllInfo) { return; } const moduleScope = ScopeUtils.getScopeForNode(node); if (!moduleScope) { return; } dunderAllInfo.stringNodes.forEach((stringNode) => { if (stringNode.value !== this._symbolName) { return; } const symbolInScope = moduleScope.lookUpSymbolRecursive(stringNode.value); if (!symbolInScope) { return; } if (!symbolInScope.symbol.getDeclarations().some((d) => this._resultsContainsDeclaration(d))) { return; } this._dunderAllNameNodes.add(stringNode); }); } private static _addIfUnique(declarations: Declaration[], itemToAdd: Declaration) { for (const def of declarations) { if (areDeclarationsSame(def, itemToAdd)) { return; } } declarations.push(itemToAdd); } private static _getDeclarationsForNode(node: NameNode, evaluator: TypeEvaluator): Declaration[] { // This can handle symbols brought in by wildcard as long as declarations symbol collector // compare against point to actual alias declaration, not one that use local name (ex, import alias) if (node.parent?.nodeType !== ParseNodeType.ModuleName) { let decls = evaluator.getDeclarationsForNameNode(node) || []; if (node.parent?.nodeType === ParseNodeType.ImportFromAs) { // Make sure we get the decl for this specific from import statement decls = decls.filter((d) => d.node === node.parent); } // If we can't get decl, see whether we can get type from the node. // Some might have synthesized type for the node such as subModule in import X.Y statement. if (decls.length === 0) { const type = evaluator.getType(node); if (type?.category === TypeCategory.Module) { // Synthesize decl for the module. return [createSynthesizedAliasDeclaration(type.filePath)]; } } // We would like to make X in import X and import X.Y as Y to match, but path for // X in import X and one in import X.Y as Y might not match since path in X.Y will point // to X.Y rather than X if import statement has an alias. // so, for such case, we put synthesized one so we can treat X in both statement same. for (const aliasDecl of decls.filter((d) => isAliasDeclaration(d) && !d.loadSymbolsFromPath)) { const node = (aliasDecl as AliasDeclaration).node; if (node.nodeType === ParseNodeType.ImportFromAs) { // from ... import X case, decl in the submodulefallback has the path. continue; } decls.push(...(evaluator.getDeclarationsForNameNode(node.module.nameParts[0]) || [])); } return decls; } // We treat module name special in find all references. so symbol highlight or rename on multiple files // works even if it is not actually a symbol defined in the file. const moduleName = node.parent; if ( moduleName.parent?.nodeType === ParseNodeType.ImportAs || moduleName.parent?.nodeType === ParseNodeType.ImportFrom ) { const index = moduleName.nameParts.findIndex((n) => n === node); // Special case, first module name part. if (index === 0) { // 1. import X or from X import ... let decls: Declaration[] = []; // ex, import X as x const isImportAsWithAlias = moduleName.nameParts.length === 1 && moduleName.parent.nodeType === ParseNodeType.ImportAs && !!moduleName.parent.alias; // if "import" has alias, symbol is assigned to alias, not the module. const importName = isImportAsWithAlias ? (moduleName.parent as ImportAsNode).alias!.value : moduleName.nameParts[0].value; // First, we need to re-use "decls for X" binder has created // so that it matches with decls type evaluator returns for "references for X". // ex) import X or from .X import ... in init file and etc. const symbolWithScope = ScopeUtils.getScopeForNode(node)?.lookUpSymbolRecursive(importName); if (symbolWithScope && moduleName.nameParts.length === 1) { decls.push(...symbolWithScope.symbol.getDeclarations().filter((d) => isAliasDeclaration(d))); // If symbols are re-used, then find one that belong to this import statement. if (decls.length > 1) { decls = decls.filter((d) => { d = d as AliasDeclaration; if (d.firstNamePart !== undefined) { // For multiple import statements with sub modules, decl can be re-used. // ex) import X.Y and import X.Z or from .X import ... in init file. // Decls for X will be reused for both import statements, and node will point // to first import statement. For those case, use firstNamePart instead to check. return d.firstNamePart === moduleName.nameParts[0].value; } return d.node === moduleName.parent; }); } // ex, import X as x // We have decls for the alias "x" not the module name "X". Convert decls for the "X" if (isImportAsWithAlias) { decls = getDeclarationsWithUsesLocalNameRemoved(decls); } } // But, also, we need to put decls for module names type evaluator synthesized so that // we can match both "import X" and "from X import ..." decls.push( ...(evaluator .getDeclarationsForNameNode(moduleName.nameParts[0]) ?.filter((d) => isAliasDeclaration(d)) || []) ); return decls; } if (index > 0) { // 2. import X.Y or from X.Y import .... // For submodule "Y", we just use synthesized decls from type evaluator. // Decls for these sub module don't actually exist in the system. Instead, symbol for Y in // "import X.Y" hold onto synthesized module type (without any decl). // And "from X.Y import ..." doesn't have any symbol associated module names. // they can't be referenced in the module. return evaluator.getDeclarationsForNameNode(moduleName.nameParts[index]) || []; } return []; } return []; } }
the_stack
import { retrieve, ForcecodeCommand, getAnyNameFromUri } from '.'; import * as vscode from 'vscode'; import { fcConnection, codeCovViewService, FCOauth, FCConnection, commandViewService, dxService, notifications, FCFile, ClassType, PXMLMember, getHomeDir, } from '../services'; import { getFileName } from '../parsers'; import { readConfigFile, removeConfigFolder } from '../services'; import { Config } from '../forceCode'; import { updateDecorations } from '../decorators'; import * as path from 'path'; import * as fs from 'fs-extra'; export class ToolingQuery extends ForcecodeCommand { constructor() { super(); this.commandName = 'ForceCode.toolingQuery'; this.hidden = true; } public command(context: string) { return vscode.window.forceCode.conn.tooling.query(context); } } export class CreateProject extends ForcecodeCommand { constructor() { super(); this.commandName = 'ForceCode.createProjectMenu'; this.name = 'Creating new project'; this.hidden = false; this.description = 'Create new project'; this.detail = 'Create a new Forcecode project in a folder you select.'; this.icon = 'file-directory'; this.label = 'New Project'; } public command() { return vscode.commands.executeCommand('ForceCode.createProject'); } } export class Logout extends ForcecodeCommand { constructor() { super(); this.commandName = 'ForceCode.logout'; this.name = 'Logging out'; this.hidden = false; this.description = 'Log out from current org'; this.detail = 'Log out of the current org in this project.'; this.icon = 'x'; this.label = 'Log out of Salesforce'; } public command(context: FCConnection | undefined) { var conn = context || fcConnection.currentConnection; return fcConnection.disconnect(conn); } } export class SwitchUser extends ForcecodeCommand { constructor() { super(); this.commandName = 'ForceCode.switchUser'; this.cancelable = true; this.name = 'Logging in'; this.hidden = false; this.description = 'Enter the credentials you wish to use.'; this.detail = 'Log into an org not in the saved usernames list.'; this.icon = 'key'; this.label = 'Log in to Salesforce'; } public command(context: FCOauth | FCConnection) { codeCovViewService.clear(); var orgInfo: FCOauth; if (context instanceof FCConnection) { orgInfo = context.orgInfo; } else { orgInfo = context; } return fcConnection.connect(orgInfo, this.cancellationToken); } } export class FileModified extends ForcecodeCommand { constructor() { super(); this.commandName = 'ForceCode.fileModified'; this.cancelable = true; this.name = 'Modified file'; this.hidden = true; } public command(context: vscode.Uri, selectedResource: string) { return vscode.workspace.openTextDocument(context).then(theDoc => { return notifications .showWarning( selectedResource + ' has changed ' + getFileName(theDoc), 'Refresh', 'Diff', 'Dismiss' ) .then(s => { if (s === 'Refresh') { return retrieve(theDoc.uri, this.cancellationToken); } else if (s === 'Diff') { return vscode.commands.executeCommand('ForceCode.diff', theDoc.uri); } }); }); } } export class CheckForFileChanges extends ForcecodeCommand { constructor() { super(); this.commandName = 'ForceCode.checkForFileChanges'; this.name = 'Getting workspace information'; this.hidden = true; } public command() { return vscode.window.forceCode.checkForFileChanges(); } } export class ShowTasks extends ForcecodeCommand { constructor() { super(); this.commandName = 'ForceCode.showTasks'; this.name = 'Show tasks'; this.hidden = true; } public command() { if (fcConnection.isLoggedIn()) { var treePro = vscode.window.createTreeView('ForceCode.treeDataProvider', { treeDataProvider: commandViewService, }); return treePro.reveal(commandViewService.getChildren()[0]); } } } export class OpenOnClick extends ForcecodeCommand { constructor() { super(); this.commandName = 'ForceCode.openOnClick'; this.name = 'Open From TestCov view'; this.hidden = true; } public command(context: string) { return vscode.workspace .openTextDocument(context) .then(doc => vscode.window.showTextDocument(doc, { preview: false })); } } export class ChangeCoverageDecoration extends ForcecodeCommand { constructor() { super(); this.commandName = 'ForceCode.changeCoverageDecoration'; this.name = 'Change Coverage Decoration'; this.hidden = true; } public command(context: FCFile) { var parent = context.getParentFCFile() || context; if (context.label) { var newCoverage = context.label.split(' ').pop(); if (parent === context) { newCoverage = 'overall'; } if ( parent.getType() === ClassType.CoveredClass || parent.getType() === ClassType.UncoveredClass ) { // turn on line decorations when user clicks the class vscode.window.forceCode.config.showTestCoverage = true; } return vscode.workspace .openTextDocument(parent.getWsMember().path) .then(doc => vscode.window.showTextDocument(doc, { preview: false })) .then(_res => { parent.setCoverageTestClass(newCoverage); return updateDecorations(); }); } } } export class Login extends ForcecodeCommand { constructor() { super(); this.commandName = 'ForceCode.login'; this.hidden = true; } public command(context: FCOauth | FCConnection) { var orgInfo: FCOauth; if (context instanceof FCConnection) { orgInfo = context.orgInfo; } else { orgInfo = context; } const cfg: Config = readConfigFile(orgInfo.username); return dxService.login(cfg.url, this.cancellationToken).then(res => { return vscode.commands.executeCommand('ForceCode.switchUser', res); }); } } export class RemoveConfig extends ForcecodeCommand { constructor() { super(); this.commandName = 'ForceCode.removeConfig'; this.hidden = true; } public command(context: string | FCConnection) { var username: string; if (context instanceof FCConnection) { username = context.orgInfo.username; } else { username = context; } return notifications .showWarning( 'This will remove the .forceCode/' + username + ' folder and all contents. Continue?', 'Yes', 'No' ) .then(s => { if (s === 'Yes') { if (removeConfigFolder(username)) { return notifications.showInfo( '.forceCode/' + username + ' folder removed successfully', 'OK' ); } else { return notifications.showInfo('.forceCode/' + username + ' folder not found', 'OK'); } } }) .then(() => { const conn: FCConnection | undefined = fcConnection.getConnByUsername(username); return fcConnection.disconnect(conn); }); } } export class DeleteFile extends ForcecodeCommand { constructor() { super(); this.commandName = 'ForceCode.deleteFile'; this.name = 'Deleting '; this.hidden = true; } // selectedResource = Array (multiple files) right click via explorer // context = right click in file or explorer // command pallette => both undefined, so check current open file public async command(context?: vscode.Uri, selectedResource?: vscode.Uri[]) { var toDelete: Set<PXMLMember> = new Set<PXMLMember>(); var filesToDelete: Set<vscode.Uri> = new Set<vscode.Uri>(); // check that file is in the project and get tooling type if (selectedResource) { selectedResource.forEach(resource => { filesToDelete.add(resource); }); } else if (context) { filesToDelete.add(context); } else { if (!vscode.window.activeTextEditor) { return Promise.resolve(); } filesToDelete.add(vscode.window.activeTextEditor.document.uri); } if (filesToDelete.size === 0) { return Promise.resolve(); } var toDeleteNames: string = 'Are you sure you want to delete the following?\n'; var toDelString: string = ''; const backupPathBase: string = path.join(getHomeDir(), '.forceCode', 'backup'); if (fs.existsSync(backupPathBase)) { fs.removeSync(backupPathBase); } await new Promise((resolve, reject) => { var count = 0; filesToDelete.forEach(async resource => { const toAdd = await getAnyNameFromUri(resource, true).catch(reject); if (toAdd) { const thePath = resource.fsPath; const theMetaPath: string = thePath + '-meta.xml'; const isDir: boolean = fs.lstatSync(thePath).isDirectory(); const isMetaData: boolean = thePath.endsWith('-meta.xml'); const metaExists: boolean = fs.existsSync(theMetaPath); const isAura: boolean = toAdd.name === 'AuraDefinitionBundle' || toAdd.name === 'LightningComponentBundle'; const ttFoldername: string | undefined = thePath .replace(vscode.window.forceCode.projectRoot + path.sep, '') .split(path.sep) .shift(); var backupPath: string = path.join(backupPathBase, ttFoldername ? ttFoldername : ''); backupPath = thePath === path.join(vscode.window.forceCode.projectRoot, ttFoldername ? ttFoldername : '') ? backupPathBase : backupPath; const basePathArray = thePath.split(path.sep); basePathArray.pop(); const basePath = isDir || !isAura ? thePath : basePathArray.join(path.sep); fs.mkdirpSync(backupPath); fs.copySync(basePath, path.join(backupPath, path.basename(basePath)), { overwrite: true, preserveTimestamps: true, }); if (isMetaData || metaExists) { const theSourcePath: string = isMetaData ? thePath.replace('-meta.xml', '') : theMetaPath; if (fs.existsSync(theSourcePath)) { fs.copySync(theSourcePath, path.join(backupPath, path.basename(theSourcePath)), { overwrite: true, preserveTimestamps: true, }); } } if (toAdd.defType) { toAdd.name = 'AuraDefinition'; } toAdd.members.forEach(mem => { toDeleteNames += mem + (toAdd.defType ? ' ' + toAdd.defType : '') + ': ' + toAdd.name + '\n'; }); toDelete.add(toAdd); toDelString += resource.fsPath .replace(vscode.window.forceCode.workspaceRoot + path.sep, '') .replace('-meta.xml', '') + ','; } count++; if (count === filesToDelete.size) resolve(); }); }); if (toDelete.size === 0) { return Promise.resolve(); } // ask user if they're sure const choice: string | undefined = await vscode.window.showWarningMessage( toDeleteNames, { modal: true }, 'Yes' ); if (choice !== 'Yes') { return Promise.resolve(); } toDelString = toDelString.substr(0, toDelString.length - 1); await dxService.deleteSource(toDelString, this.cancellationToken); // ask user if they want to delete from workspace const delWSChoice = await notifications.showInfo( 'Metadata deleted from org. Delete from workspace?', 'Yes', 'No' ); if (delWSChoice !== 'Yes') { fs.copySync(backupPathBase, vscode.window.forceCode.projectRoot, { overwrite: true, preserveTimestamps: true, }); fs.removeSync(backupPathBase); return Promise.resolve(); } fs.removeSync(backupPathBase); // delete file(s) from workspace filesToDelete.forEach(uri => { var thePath: string = uri.fsPath; if (fs.existsSync(thePath)) { const theMetaPath: string = thePath + '-meta.xml'; const projPath: string = vscode.window.forceCode.projectRoot + path.sep; const isDir: boolean = fs.lstatSync(uri.fsPath).isDirectory(); const isMetaData: boolean = thePath.endsWith('-meta.xml'); const metaExists: boolean = fs.existsSync(theMetaPath); const isLWC: boolean = thePath.indexOf(projPath + 'lwc' + path.sep) !== -1; const isAura: boolean = thePath.indexOf(projPath + 'aura' + path.sep) !== -1; if (!isDir && (isLWC || (isAura && (metaExists || isMetaData)))) { thePath = thePath.substring(0, thePath.lastIndexOf(path.sep) + 1); } // delete the file/folder fs.removeSync(thePath); if (!isDir && !isLWC && !isAura && metaExists) { // delete the meta.xml file fs.removeSync(theMetaPath); } } }); return Promise.resolve(); } }
the_stack
import * as coreClient from "@azure/core-client"; export type LimitJsonObjectUnion = LimitJsonObject | LimitObject; /** Resource usage. */ export interface CurrentUsagesBase { /** * The resource ID. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly id?: string; /** * The resource type. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly type?: string; /** * The resource name. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly name?: string; /** Usage properties for the specified resource. */ properties?: UsagesProperties; } /** Usage properties for the specified resource. */ export interface UsagesProperties { /** The quota limit properties for this resource. */ usages?: UsagesObject; /** * The units for the quota usage, such as Count and Bytes. When requesting quota, use the **unit** value returned in the GET response in the request body of your PUT operation. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly unit?: string; /** Resource name provided by the resource provider. Use this property name when requesting quota. */ name?: ResourceName; /** The name of the resource type. */ resourceType?: string; /** * The time period for the summary of the quota usage values. For example: * *P1D (per one day) * *PT1M (per one minute) * *PT1S (per one second). * This parameter is optional because it is not relevant for all resources such as compute. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly quotaPeriod?: string; /** * States if quota can be requested for this resource. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly isQuotaApplicable?: boolean; /** Additional properties for the specific resource provider. */ properties?: Record<string, unknown>; } /** The resource usages value. */ export interface UsagesObject { /** The usages value. */ value: number; /** The quota or usages limit types. */ usagesType?: UsagesTypes; } /** Name of the resource provided by the resource Provider. When requesting quota, use this property name. */ export interface ResourceName { /** Resource name. */ value?: string; /** * Resource display name. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly localizedValue?: string; } /** Error. */ export interface ExceptionResponse { /** API error details. */ error?: ServiceError; } /** API error details. */ export interface ServiceError { /** Error code. */ code?: string; /** Error message. */ message?: string; /** * List of error details. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly details?: ServiceErrorDetail[]; } /** Error details. */ export interface ServiceErrorDetail { /** * Error code. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly code?: string; /** * Error message. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly message?: string; } /** Quota limits. */ export interface UsagesLimits { /** List of quota limits. */ value?: CurrentUsagesBase[]; /** The URI used to fetch the next page of quota limits. When there are no more pages, this is null. */ nextLink?: string; } /** Quota limit. */ export interface CurrentQuotaLimitBase { /** * The resource ID. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly id?: string; /** * The resource type. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly type?: string; /** * The resource name. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly name?: string; /** Quota properties for the specified resource, based on the API called, Quotas or Usages. */ properties?: QuotaProperties; } /** Quota properties for the specified resource. */ export interface QuotaProperties { /** Resource quota limit properties. */ limit?: LimitJsonObjectUnion; /** * The quota units, such as Count and Bytes. When requesting quota, use the **unit** value returned in the GET response in the request body of your PUT operation. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly unit?: string; /** Resource name provided by the resource provider. Use this property name when requesting quota. */ name?: ResourceName; /** Resource type name. */ resourceType?: string; /** * The time period over which the quota usage values are summarized. For example: * *P1D (per one day) * *PT1M (per one minute) * *PT1S (per one second). * This parameter is optional because, for some resources like compute, the period is irrelevant. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly quotaPeriod?: string; /** * States if quota can be requested for this resource. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly isQuotaApplicable?: boolean; /** Additional properties for the specific resource provider. */ properties?: Record<string, unknown>; } /** LimitJson abstract class. */ export interface LimitJsonObject { /** Polymorphic discriminator, which specifies the different types this object can be */ limitObjectType: "LimitValue"; } /** Quota limits. */ export interface QuotaLimits { /** List of quota limits. */ value?: CurrentQuotaLimitBase[]; /** The URI used to fetch the next page of quota limits. When there are no more pages, this string is null. */ nextLink?: string; } /** List of quota requests with details. */ export interface QuotaRequestDetails { /** * Quota request ID. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly id?: string; /** * Quota request name. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly name?: string; /** * Resource type. "Microsoft.Quota/quotas". * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly type?: string; /** * The quota request status. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly provisioningState?: QuotaRequestState; /** * User-friendly status message. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly message?: string; /** Error details of the quota request. */ error?: ServiceErrorDetail; /** * The quota request submission time. The date conforms to the following format specified by the ISO 8601 standard: yyyy-MM-ddTHH:mm:ssZ * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly requestSubmitTime?: Date; /** Quota request details. */ value?: SubRequest[]; } /** Quota request properties. */ export interface QuotaRequestProperties { /** * The quota request status. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly provisioningState?: QuotaRequestState; /** * User-friendly status message. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly message?: string; /** Error details of the quota request. */ error?: ServiceErrorDetail; /** * The quota request submission time. The date conforms to the following format specified by the ISO 8601 standard: yyyy-MM-ddTHH:mm:ssZ * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly requestSubmitTime?: Date; /** Quota request details. */ value?: SubRequest[]; } /** Request property. */ export interface SubRequest { /** Resource name. */ name?: ResourceName; /** * Resource type for which the quota properties were requested. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly resourceType?: string; /** Quota limit units, such as Count and Bytes. When requesting quota, use the **unit** value returned in the GET response in the request body of your PUT operation. */ unit?: string; /** * The quota request status. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly provisioningState?: QuotaRequestState; /** * User-friendly status message. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly message?: string; /** * Quota request ID. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly subRequestId?: string; /** Resource quota limit properties. */ limit?: LimitJsonObjectUnion; } /** Quota request information. */ export interface QuotaRequestDetailsList { /** Quota request details. */ value?: QuotaRequestDetails[]; /** The URI for fetching the next page of quota limits. When there are no more pages, this string is null. */ nextLink?: string; } export interface OperationList { value?: OperationResponse[]; /** URL to get the next page of items. */ nextLink?: string; } export interface OperationResponse { name?: string; display?: OperationDisplay; origin?: string; } export interface OperationDisplay { /** Provider name. */ provider?: string; /** Resource name. */ resource?: string; /** Operation name. */ operation?: string; /** Operation description. */ description?: string; } /** Resource properties. */ export interface CommonResourceProperties { /** * Resource ID * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly id?: string; /** * Resource name. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly name?: string; /** * Resource type. Example: "Microsoft.Quota/quotas" * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly type?: string; } /** Quota limits request response. */ export interface QuotaLimitsResponse { /** List of quota limits with the quota request status. */ value?: CurrentQuotaLimitBase[]; /** The URI used to fetch the next page of quota limits. When there are no more pages, this is null. */ nextLink?: string; } /** Quota change requests information. */ export interface CreateGenericQuotaRequestParameters { /** Quota change requests. */ value?: CurrentQuotaLimitBase[]; } /** Quota request response. */ export interface QuotaRequestOneResourceSubmitResponse { /** * Quota request ID. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly id?: string; /** * The name of the quota request. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly name?: string; /** * Resource type. "Microsoft.Quota/ServiceLimitRequests" * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly type?: string; /** * Quota request status. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly provisioningState?: QuotaRequestState; /** * User-friendly status message. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly message?: string; /** * Quota request submission time. The date conforms to the following ISO 8601 standard format: yyyy-MM-ddTHH:mm:ssZ. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly requestSubmitTime?: Date; /** Resource quota limit properties. */ limit?: LimitObject; /** * Usage information for the current resource. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly currentValue?: number; /** The quota limit units, such as Count and Bytes. When requesting quota, use the **unit** value returned in the GET response in the request body of your PUT operation. */ unit?: string; /** Resource name provided by the resource provider. Use this property name when requesting quota. */ namePropertiesName?: ResourceName; /** Resource type name. */ resourceType?: string; /** * The time period over which the quota usage values are summarized. For example: * *P1D (per one day) * *PT1M (per one minute) * *PT1S (per one second). * This parameter is optional because, for some resources like compute, the period is irrelevant. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly quotaPeriod?: string; /** * States if quota can be requested for this resource. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly isQuotaApplicable?: boolean; /** Error details of the quota request. */ error?: ServiceErrorDetail; /** Additional properties for the specific resource provider. */ properties?: Record<string, unknown>; } /** Quota request response. */ export interface QuotaRequestSubmitResponse { /** * Quota request ID. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly id?: string; /** * Quota request name. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly name?: string; /** Quota request details. */ properties?: QuotaRequestProperties; /** * Resource type. "Microsoft.Quota/quotas". * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly type?: string; } /** The quota request response with the quota request ID. */ export interface QuotaRequestSubmitResponse202 { /** * The quota request ID. To check the request status, use the **id** value in a [Quota Request Status](https://docs.microsoft.com/en-us/rest/api/reserved-vm-instances/quotarequeststatus/get) GET operation. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly id?: string; /** * Operation ID. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly name?: string; /** * Resource type. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly type?: string; /** * Quota request status. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly provisioningState?: QuotaRequestState; /** * User-friendly message. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly message?: string; /** Resource quota limit properties. */ limit?: LimitObject; /** The quota limit units, such as Count and Bytes. When requesting quota, use the **unit** value returned in the GET response in the request body of your PUT operation. */ unit?: string; /** Resource name provided by the resource provider. Use this property name when requesting quota. */ namePropertiesName?: ResourceName; /** Resource type name. */ resourceType?: string; /** * The time period over which the quota usage values are summarized. For example: * *P1D (per one day) * *PT1M (per one minute) * *PT1S (per one second). * This parameter is optional because, for some resources like compute, the period is irrelevant. * NOTE: This property will not be serialized. It can only be populated by the server. */ readonly quotaPeriod?: string; /** Additional properties for the specific resource provider. */ properties?: Record<string, unknown>; } /** The resource quota limit value. */ export type LimitObject = LimitJsonObject & { /** Polymorphic discriminator, which specifies the different types this object can be */ limitObjectType: "LimitValue"; /** The quota/limit value */ value: number; /** The quota or usages limit types. */ limitType?: QuotaLimitTypes; }; /** Defines headers for Usages_get operation. */ export interface UsagesGetHeaders { /** Current entity state version. Should be treated as opaque and used to make conditional HTTP requests. */ eTag?: string; } /** Defines headers for Usages_list operation. */ export interface UsagesListHeaders { /** Current entity state version. It should be treated as opaque and used to make conditional HTTP requests. */ eTag?: string; } /** Defines headers for Usages_listNext operation. */ export interface UsagesListNextHeaders { /** Current entity state version. It should be treated as opaque and used to make conditional HTTP requests. */ eTag?: string; } /** Defines headers for Quota_get operation. */ export interface QuotaGetHeaders { /** Current entity state version. Should be treated as opaque and used to make conditional HTTP requests. */ eTag?: string; } /** Defines headers for Quota_list operation. */ export interface QuotaListHeaders { /** Current entity state version. Should be treated as opaque and used to make conditional HTTP requests. */ eTag?: string; } /** Defines headers for Quota_listNext operation. */ export interface QuotaListNextHeaders { /** Current entity state version. Should be treated as opaque and used to make conditional HTTP requests. */ eTag?: string; } /** Known values of {@link UsagesTypes} that the service accepts. */ export enum KnownUsagesTypes { Individual = "Individual", Combined = "Combined" } /** * Defines values for UsagesTypes. \ * {@link KnownUsagesTypes} can be used interchangeably with UsagesTypes, * this enum contains the known values that the service supports. * ### Known values supported by the service * **Individual** \ * **Combined** */ export type UsagesTypes = string; /** Known values of {@link LimitType} that the service accepts. */ export enum KnownLimitType { LimitValue = "LimitValue" } /** * Defines values for LimitType. \ * {@link KnownLimitType} can be used interchangeably with LimitType, * this enum contains the known values that the service supports. * ### Known values supported by the service * **LimitValue** */ export type LimitType = string; /** Known values of {@link QuotaRequestState} that the service accepts. */ export enum KnownQuotaRequestState { Accepted = "Accepted", Invalid = "Invalid", Succeeded = "Succeeded", Failed = "Failed", InProgress = "InProgress" } /** * Defines values for QuotaRequestState. \ * {@link KnownQuotaRequestState} can be used interchangeably with QuotaRequestState, * this enum contains the known values that the service supports. * ### Known values supported by the service * **Accepted** \ * **Invalid** \ * **Succeeded** \ * **Failed** \ * **InProgress** */ export type QuotaRequestState = string; /** Known values of {@link QuotaLimitTypes} that the service accepts. */ export enum KnownQuotaLimitTypes { Independent = "Independent", Shared = "Shared" } /** * Defines values for QuotaLimitTypes. \ * {@link KnownQuotaLimitTypes} can be used interchangeably with QuotaLimitTypes, * this enum contains the known values that the service supports. * ### Known values supported by the service * **Independent** \ * **Shared** */ export type QuotaLimitTypes = string; /** Optional parameters. */ export interface UsagesGetOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the get operation. */ export type UsagesGetResponse = UsagesGetHeaders & CurrentUsagesBase; /** Optional parameters. */ export interface UsagesListOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the list operation. */ export type UsagesListResponse = UsagesListHeaders & UsagesLimits; /** Optional parameters. */ export interface UsagesListNextOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listNext operation. */ export type UsagesListNextResponse = UsagesListNextHeaders & UsagesLimits; /** Optional parameters. */ export interface QuotaGetOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the get operation. */ export type QuotaGetResponse = QuotaGetHeaders & CurrentQuotaLimitBase; /** Optional parameters. */ export interface QuotaCreateOrUpdateOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the createOrUpdate operation. */ export type QuotaCreateOrUpdateResponse = CurrentQuotaLimitBase; /** Optional parameters. */ export interface QuotaUpdateOptionalParams extends coreClient.OperationOptions { /** Delay to wait until next poll, in milliseconds. */ updateIntervalInMs?: number; /** A serialized poller which can be used to resume an existing paused Long-Running-Operation. */ resumeFrom?: string; } /** Contains response data for the update operation. */ export type QuotaUpdateResponse = CurrentQuotaLimitBase; /** Optional parameters. */ export interface QuotaListOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the list operation. */ export type QuotaListResponse = QuotaListHeaders & QuotaLimits; /** Optional parameters. */ export interface QuotaListNextOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listNext operation. */ export type QuotaListNextResponse = QuotaListNextHeaders & QuotaLimits; /** Optional parameters. */ export interface QuotaRequestStatusGetOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the get operation. */ export type QuotaRequestStatusGetResponse = QuotaRequestDetails; /** Optional parameters. */ export interface QuotaRequestStatusListOptionalParams extends coreClient.OperationOptions { /** * | Field | Supported operators * |---------------------|------------------------ * * |requestSubmitTime | ge, le, eq, gt, lt * |provisioningState eq {QuotaRequestState} * |resourceName eq {resourceName} * */ filter?: string; /** Number of records to return. */ top?: number; /** The **Skiptoken** parameter is used only if a previous operation returned a partial result. If a previous response contains a **nextLink** element, its value includes a **skiptoken** parameter that specifies a starting point to use for subsequent calls. */ skiptoken?: string; } /** Contains response data for the list operation. */ export type QuotaRequestStatusListResponse = QuotaRequestDetailsList; /** Optional parameters. */ export interface QuotaRequestStatusListNextOptionalParams extends coreClient.OperationOptions { /** * | Field | Supported operators * |---------------------|------------------------ * * |requestSubmitTime | ge, le, eq, gt, lt * |provisioningState eq {QuotaRequestState} * |resourceName eq {resourceName} * */ filter?: string; /** Number of records to return. */ top?: number; /** The **Skiptoken** parameter is used only if a previous operation returned a partial result. If a previous response contains a **nextLink** element, its value includes a **skiptoken** parameter that specifies a starting point to use for subsequent calls. */ skiptoken?: string; } /** Contains response data for the listNext operation. */ export type QuotaRequestStatusListNextResponse = QuotaRequestDetailsList; /** Optional parameters. */ export interface QuotaOperationListOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the list operation. */ export type QuotaOperationListResponse = OperationList; /** Optional parameters. */ export interface QuotaOperationListNextOptionalParams extends coreClient.OperationOptions {} /** Contains response data for the listNext operation. */ export type QuotaOperationListNextResponse = OperationList; /** Optional parameters. */ export interface AzureQuotaExtensionAPIOptionalParams extends coreClient.ServiceClientOptions { /** server parameter */ $host?: string; /** Api Version */ apiVersion?: string; /** Overrides client endpoint. */ endpoint?: string; }
the_stack
import { h, Component } from 'preact'; import { bind } from '../../lib/initial-util'; import { inputFieldCheckedAsNumber, inputFieldValueAsNumber, preventDefault } from '../../lib/util'; import { EncodeOptions, WebPImageHint } from './encoder-meta'; import * as style from '../../components/Options/style.scss'; import Checkbox from '../../components/checkbox'; import Expander from '../../components/expander'; import Select from '../../components/select'; import Range from '../../components/range'; import linkState from 'linkstate'; interface Props { options: EncodeOptions; onChange(newOptions: EncodeOptions): void; } interface State { showAdvanced: boolean; } // From kLosslessPresets in config_enc.c // The format is [method, quality]. const losslessPresets:[number, number][] = [ [0, 0], [1, 20], [2, 25], [3, 30], [3, 50], [4, 50], [4, 75], [4, 90], [5, 90], [6, 100], ]; const losslessPresetDefault = 6; function determineLosslessQuality(quality: number, method: number): number { const index = losslessPresets.findIndex( ([presetMethod, presetQuality]) => presetMethod === method && presetQuality === quality, ); if (index !== -1) return index; // Quality doesn't match one of the presets. // This can happen when toggling 'lossless'. return losslessPresetDefault; } export default class WebPEncoderOptions extends Component<Props, State> { state: State = { showAdvanced: false, }; @bind onChange(event: Event) { const form = (event.currentTarget as HTMLInputElement).closest('form') as HTMLFormElement; const lossless = inputFieldCheckedAsNumber(form.lossless); const { options } = this.props; const losslessPresetValue = inputFieldValueAsNumber( form.lossless_preset, determineLosslessQuality(options.quality, options.method), ); const newOptions: EncodeOptions = { // Copy over options the form doesn't care about, eg emulate_jpeg_size ...options, // And now stuff from the form: lossless, // Special-cased inputs: // In lossless mode, the quality is derived from the preset. quality: lossless ? losslessPresets[losslessPresetValue][1] : inputFieldValueAsNumber(form.quality, options.quality), // In lossless mode, the method is derived from the preset. method: lossless ? losslessPresets[losslessPresetValue][0] : inputFieldValueAsNumber(form.method_input, options.method), image_hint: inputFieldCheckedAsNumber(form.image_hint, options.image_hint) ? WebPImageHint.WEBP_HINT_GRAPH : WebPImageHint.WEBP_HINT_DEFAULT, // .checked exact: inputFieldCheckedAsNumber(form.exact, options.exact), alpha_compression: inputFieldCheckedAsNumber( form.alpha_compression, options.alpha_compression, ), autofilter: inputFieldCheckedAsNumber(form.autofilter, options.autofilter), filter_type: inputFieldCheckedAsNumber(form.filter_type, options.filter_type), use_sharp_yuv: inputFieldCheckedAsNumber(form.use_sharp_yuv, options.use_sharp_yuv), // .value near_lossless: 100 - inputFieldValueAsNumber(form.near_lossless, 100 - options.near_lossless), alpha_quality: inputFieldValueAsNumber(form.alpha_quality, options.alpha_quality), alpha_filtering: inputFieldValueAsNumber(form.alpha_filtering, options.alpha_filtering), sns_strength: inputFieldValueAsNumber(form.sns_strength, options.sns_strength), filter_strength: inputFieldValueAsNumber(form.filter_strength, options.filter_strength), filter_sharpness: 7 - inputFieldValueAsNumber(form.filter_sharpness, 7 - options.filter_sharpness), pass: inputFieldValueAsNumber(form.pass, options.pass), preprocessing: inputFieldValueAsNumber(form.preprocessing, options.preprocessing), segments: inputFieldValueAsNumber(form.segments, options.segments), partitions: inputFieldValueAsNumber(form.partitions, options.partitions), }; this.props.onChange(newOptions); } private _losslessSpecificOptions(options: EncodeOptions) { return ( <div key="lossless"> <div class={style.optionOneCell}> <Range name="lossless_preset" min="0" max="9" value={determineLosslessQuality(options.quality, options.method)} onInput={this.onChange} > Effort: </Range> </div> <div class={style.optionOneCell}> <Range name="near_lossless" min="0" max="100" value={'' + (100 - options.near_lossless)} onInput={this.onChange} > Slight loss: </Range> </div> <label class={style.optionInputFirst}> {/* Although there are 3 different kinds of image hint, webp only seems to do something with the 'graph' type, and I don't really understand what it does. */} <Checkbox name="image_hint" checked={options.image_hint === WebPImageHint.WEBP_HINT_GRAPH} onChange={this.onChange} /> Discrete tone image </label> </div> ); } private _lossySpecificOptions(options: EncodeOptions) { const { showAdvanced } = this.state; return ( <div key="lossy"> <div class={style.optionOneCell}> <Range name="method_input" min="0" max="6" value={options.method} onInput={this.onChange} > Effort: </Range> </div> <div class={style.optionOneCell}> <Range name="quality" min="0" max="100" step="0.1" value={options.quality} onInput={this.onChange} > Quality: </Range> </div> <label class={style.optionInputFirst}> <Checkbox checked={showAdvanced} onChange={linkState(this, 'showAdvanced')} /> Show advanced settings </label> <Expander> {showAdvanced ? <div> <label class={style.optionInputFirst}> <Checkbox name="alpha_compression" checked={!!options.alpha_compression} onChange={this.onChange} /> Compress alpha </label> <div class={style.optionOneCell}> <Range name="alpha_quality" min="0" max="100" value={options.alpha_quality} onInput={this.onChange} > Alpha quality: </Range> </div> <div class={style.optionOneCell}> <Range name="alpha_filtering" min="0" max="2" value={options.alpha_filtering} onInput={this.onChange} > Alpha filter quality: </Range> </div> <label class={style.optionInputFirst}> <Checkbox name="autofilter" checked={!!options.autofilter} onChange={this.onChange} /> Auto adjust filter strength </label> <Expander> {options.autofilter ? null : <div class={style.optionOneCell}> <Range name="filter_strength" min="0" max="100" value={options.filter_strength} onInput={this.onChange} > Filter strength: </Range> </div> } </Expander> <label class={style.optionInputFirst}> <Checkbox name="filter_type" checked={!!options.filter_type} onChange={this.onChange} /> Strong filter </label> <div class={style.optionOneCell}> <Range name="filter_sharpness" min="0" max="7" value={7 - options.filter_sharpness} onInput={this.onChange} > Filter sharpness: </Range> </div> <label class={style.optionInputFirst}> <Checkbox name="use_sharp_yuv" checked={!!options.use_sharp_yuv} onChange={this.onChange} /> Sharp RGB→YUV conversion </label> <div class={style.optionOneCell}> <Range name="pass" min="1" max="10" value={options.pass} onInput={this.onChange} > Passes: </Range> </div> <div class={style.optionOneCell}> <Range name="sns_strength" min="0" max="100" value={options.sns_strength} onInput={this.onChange} > Spacial noise shaping: </Range> </div> <label class={style.optionTextFirst}> Preprocess: <Select name="preprocessing" value={options.preprocessing} onChange={this.onChange} > <option value="0">None</option> <option value="1">Segment smooth</option> <option value="2">Pseudo-random dithering</option> </Select> </label> <div class={style.optionOneCell}> <Range name="segments" min="1" max="4" value={options.segments} onInput={this.onChange} > Segments: </Range> </div> <div class={style.optionOneCell}> <Range name="partitions" min="0" max="3" value={options.partitions} onInput={this.onChange} > Partitions: </Range> </div> </div> : null } </Expander> </div> ); } render({ options }: Props) { // I'm rendering both lossy and lossless forms, as it becomes much easier when // gathering the data. return ( <form class={style.optionsSection} onSubmit={preventDefault}> <label class={style.optionInputFirst}> <Checkbox name="lossless" checked={!!options.lossless} onChange={this.onChange} /> Lossless </label> {options.lossless ? this._losslessSpecificOptions(options) : this._lossySpecificOptions(options) } <label class={style.optionInputFirst}> <Checkbox name="exact" checked={!!options.exact} onChange={this.onChange} /> Preserve transparent data </label> </form> ); } }
the_stack
namespace ts.projectSystem { describe("unittests:: tsserver:: rename", () => { it("works with fileToRename", () => { const aTs: File = { path: "/a.ts", content: "export const a = 0;" }; const bTs: File = { path: "/b.ts", content: 'import { a } from "./a";' }; const session = createSession(createServerHost([aTs, bTs])); openFilesForSession([bTs], session); // rename fails with allowRenameOfImportPath disabled const response1 = executeSessionRequest<protocol.RenameRequest, protocol.RenameResponse>(session, protocol.CommandTypes.Rename, protocolFileLocationFromSubstring(bTs, 'a";')); assert.deepEqual<protocol.RenameResponseBody | undefined>(response1, { info: { canRename: false, localizedErrorMessage: "You cannot rename this element." }, locs: [], }); // rename succeeds with allowRenameOfImportPath enabled in host session.getProjectService().setHostConfiguration({ preferences: { allowRenameOfImportPath: true } }); const response2 = executeSessionRequest<protocol.RenameRequest, protocol.RenameResponse>(session, protocol.CommandTypes.Rename, protocolFileLocationFromSubstring(bTs, 'a";')); assert.deepEqual<protocol.RenameResponseBody | undefined>(response2, { info: { canRename: true, fileToRename: aTs.path, displayName: aTs.path, fullDisplayName: aTs.path, kind: ScriptElementKind.moduleElement, kindModifiers: "", triggerSpan: protocolTextSpanFromSubstring(bTs.content, "a", { index: 1 }), }, locs: [{ file: bTs.path, locs: [ protocolRenameSpanFromSubstring({ fileText: bTs.content, text: "./a", contextText: bTs.content }) ] }], }); // rename succeeds with allowRenameOfImportPath enabled in file session.getProjectService().setHostConfiguration({ preferences: { allowRenameOfImportPath: false } }); session.getProjectService().setHostConfiguration({ file: "/b.ts", formatOptions: {}, preferences: { allowRenameOfImportPath: true } }); const response3 = executeSessionRequest<protocol.RenameRequest, protocol.RenameResponse>(session, protocol.CommandTypes.Rename, protocolFileLocationFromSubstring(bTs, 'a";')); assert.deepEqual<protocol.RenameResponseBody | undefined>(response3, { info: { canRename: true, fileToRename: aTs.path, displayName: aTs.path, fullDisplayName: aTs.path, kind: ScriptElementKind.moduleElement, kindModifiers: "", triggerSpan: protocolTextSpanFromSubstring(bTs.content, "a", { index: 1 }), }, locs: [{ file: bTs.path, locs: [ protocolRenameSpanFromSubstring({ fileText: bTs.content, text: "./a", contextText: bTs.content }) ] }], }); }); it("works with prefixText and suffixText when enabled", () => { const aTs: File = { path: "/a.ts", content: "const x = 0; const o = { x };" }; const host = createServerHost([aTs]); const session = createSession(host); openFilesForSession([aTs], session); // rename with prefixText and suffixText disabled const response1 = executeSessionRequest<protocol.RenameRequest, protocol.RenameResponse>(session, protocol.CommandTypes.Rename, protocolFileLocationFromSubstring(aTs, "x")); assert.deepEqual<protocol.RenameResponseBody | undefined>(response1, { info: { canRename: true, fileToRename: undefined, displayName: "x", fullDisplayName: "x", kind: ScriptElementKind.constElement, kindModifiers: ScriptElementKindModifier.none, triggerSpan: protocolTextSpanFromSubstring(aTs.content, "x"), }, locs: [ { file: aTs.path, locs: [ protocolRenameSpanFromSubstring({ fileText: aTs.content, text: "x", contextText: "const x = 0;" }), protocolRenameSpanFromSubstring({ fileText: aTs.content, text: "x", options: { index: 1 } }), ], }, ], }); // rename with prefixText and suffixText enabled in host session.getProjectService().setHostConfiguration({ preferences: { providePrefixAndSuffixTextForRename: true } }); const response2 = executeSessionRequest<protocol.RenameRequest, protocol.RenameResponse>(session, protocol.CommandTypes.Rename, protocolFileLocationFromSubstring(aTs, "x")); assert.deepEqual<protocol.RenameResponseBody | undefined>(response2, { info: { canRename: true, fileToRename: undefined, displayName: "x", fullDisplayName: "x", kind: ScriptElementKind.constElement, kindModifiers: ScriptElementKindModifier.none, triggerSpan: protocolTextSpanFromSubstring(aTs.content, "x"), }, locs: [ { file: aTs.path, locs: [ protocolRenameSpanFromSubstring({ fileText: aTs.content, text: "x", contextText: "const x = 0;" }), protocolRenameSpanFromSubstring({ fileText: aTs.content, text: "x", options: { index: 1 }, prefixSuffixText: { prefixText: "x: " } }), ], }, ], }); // rename with prefixText and suffixText enabled for file session.getProjectService().setHostConfiguration({ preferences: { providePrefixAndSuffixTextForRename: false } }); session.getProjectService().setHostConfiguration({ file: "/a.ts", formatOptions: {}, preferences: { providePrefixAndSuffixTextForRename: true } }); const response3 = executeSessionRequest<protocol.RenameRequest, protocol.RenameResponse>(session, protocol.CommandTypes.Rename, protocolFileLocationFromSubstring(aTs, "x")); assert.deepEqual<protocol.RenameResponseBody | undefined>(response3, { info: { canRename: true, fileToRename: undefined, displayName: "x", fullDisplayName: "x", kind: ScriptElementKind.constElement, kindModifiers: ScriptElementKindModifier.none, triggerSpan: protocolTextSpanFromSubstring(aTs.content, "x"), }, locs: [ { file: aTs.path, locs: [ protocolRenameSpanFromSubstring({ fileText: aTs.content, text: "x", contextText: "const x = 0;" }), protocolRenameSpanFromSubstring({ fileText: aTs.content, text: "x", options: { index: 1 }, prefixSuffixText: { prefixText: "x: " } }), ], }, ], }); }); it("rename behavior is based on file of rename initiation", () => { const aTs: File = { path: "/a.ts", content: "const x = 1; export { x };" }; const bTs: File = { path: "/b.ts", content: `import { x } from "./a"; const y = x + 1;` }; const host = createServerHost([aTs, bTs]); const session = createSession(host); openFilesForSession([aTs, bTs], session); // rename from file with prefixText and suffixText enabled session.getProjectService().setHostConfiguration({ file: "/a.ts", formatOptions: {}, preferences: { providePrefixAndSuffixTextForRename: true } }); const response1 = executeSessionRequest<protocol.RenameRequest, protocol.RenameResponse>(session, protocol.CommandTypes.Rename, protocolFileLocationFromSubstring(aTs, "x")); assert.deepEqual<protocol.RenameResponseBody | undefined>(response1, { info: { canRename: true, fileToRename: undefined, displayName: "x", fullDisplayName: "x", kind: ScriptElementKind.constElement, kindModifiers: ScriptElementKindModifier.none, triggerSpan: protocolTextSpanFromSubstring(aTs.content, "x"), }, locs: [ { file: aTs.path, locs: [ protocolRenameSpanFromSubstring({ fileText: aTs.content, text: "x", contextText: "const x = 1;" }), protocolRenameSpanFromSubstring({ fileText: aTs.content, text: "x", options: { index: 2 }, contextText: "export { x };", prefixSuffixText: { suffixText: " as x" } }), ], }, ], }); // rename from file with prefixText and suffixText disabled const response2 = executeSessionRequest<protocol.RenameRequest, protocol.RenameResponse>(session, protocol.CommandTypes.Rename, protocolFileLocationFromSubstring(bTs, "x")); assert.deepEqual<protocol.RenameResponseBody | undefined>(response2, { info: { canRename: true, fileToRename: undefined, displayName: "x", fullDisplayName: "x", kind: ScriptElementKind.alias, kindModifiers: ScriptElementKindModifier.none, triggerSpan: protocolTextSpanFromSubstring(bTs.content, "x"), }, locs: [ { file: bTs.path, locs: [ protocolRenameSpanFromSubstring({ fileText: bTs.content, text: "x", contextText: `import { x } from "./a";` }), protocolRenameSpanFromSubstring({ fileText: bTs.content, text: "x", options: { index: 1 }, }) ] }, { file: aTs.path, locs: [ protocolRenameSpanFromSubstring({ fileText: aTs.content, text: "x", contextText: "const x = 1;" }), protocolRenameSpanFromSubstring({ fileText: aTs.content, text: "x", options: { index: 2 }, contextText: "export { x };", }), ], }, ], }); }); }); }
the_stack
import { d3, c3, initChart } from './c3-helper' describe('c3 chart axis', function() { 'use strict' var chart var args: any = { data: { columns: [ ['data1', 30, 200, 100, 400, 150, 250], ['data2', 50, 20, 10, 40, 15, 25], ['data3', 150, 120, 110, 140, 115, 125] ] }, axis: { y: { tick: { values: null, count: undefined } }, y2: { tick: { values: null, count: undefined } } } } beforeEach(function(done) { chart = initChart(chart, args, done) }) describe('axis.y.tick.count', function() { describe('with only 1 tick on y axis', function() { beforeAll(function() { args.axis.y.tick.count = 1 }) it('should have only 1 tick on y axis', function() { var ticksSize = d3 .select('.c3-axis-y') .selectAll('g.tick') .size() expect(ticksSize).toBe(1) }) }) describe('with 2 ticks on y axis', function() { beforeAll(function() { args.axis.y.tick.count = 2 }) it('should have 2 ticks on y axis', function() { var ticksSize = d3 .select('.c3-axis-y') .selectAll('g.tick') .size() expect(ticksSize).toBe(2) }) }) describe('with 3 ticks on y axis', function() { beforeAll(function() { args.axis.y.tick.count = 3 }) it('should have 3 ticks on y axis', function() { var ticksSize = d3 .select('.c3-axis-y') .selectAll('g.tick') .size() expect(ticksSize).toBe(3) }) }) }) describe('axis.y.tick.values', function() { var values = [100, 500] describe('with only 2 ticks on y axis', function() { beforeAll(function() { args.axis.y.tick.values = values }) it('should have only 2 tick on y axis', function() { var ticksSize = d3 .select('.c3-axis-y') .selectAll('g.tick') .size() expect(ticksSize).toBe(2) }) it('should have specified tick texts', function() { d3.select('.c3-axis-y') .selectAll('g.tick') .each(function(d, i) { var text = d3 .select(this) .select('text') .text() expect(+text).toBe(values[i]) }) }) }) }) describe('axis x timeseries with seconds', function() { beforeAll(function() { args = { data: { type: 'line', columns: [ ['epoch', 1401879600000, 1401883200000, 1401886800000], ['y', 1955, 2419, 2262] ], xs: { y: 'epoch' } }, axis: { x: { type: 'timeseries', min: new Date(1401879600000), max: new Date(1401969600000), localtime: false } } } }) it('should have 3 ticks on x axis', function() { var ticksSize = d3 .select('.c3-axis-x') .selectAll('g.tick') .size() expect(ticksSize).toBe(3) }) it('should have specified 1 hour intervals', function() { var prevValue d3.select('.c3-axis-x') .selectAll('g.tick') .each(function(d: any, i) { if (i !== 0) { var result = d - prevValue expect(result).toEqual(3600000) // expressed in milliseconds } prevValue = d }) }) describe('changing min x time and columns', function() { beforeAll(function() { args.axis.x.min = new Date(1401876000000) args.axis.x.max = new Date(1401876075000) args.data.columns = [ [ 'epoch', 1401876000000, 1401876015000, 1401876030000, 1401876045000, 1401876060000, 1401876075000 ], ['y', 1968, 1800, 1955, 2419, 2262, 1940] ] }) it('should have 6 ticks on x axis', function() { var ticksSize = d3 .select('.c3-axis-x') .selectAll('g.tick') .size() expect(ticksSize).toBe(6) // the count starts at initial value and increments by the set interval }) it('should have specified 15 seconds intervals', function() { var prevValue d3.select('.c3-axis-x') .selectAll('g.tick') .each(function(d: any, i) { if (i !== 0) { var result = d - prevValue expect(result).toEqual(15000) // expressed in milliseconds } prevValue = d }) }) describe('with axis.x.time.format %Y-%m-%d %H:%M:%S', function() { beforeAll(function() { args.axis.x.tick = { format: '%M:%S' // https://github.com/mbostock/d3/wiki/Time-Formatting#wiki-format } }) var textDates = ['00:00', '00:15', '00:30', '00:45', '01:00', '01:15'] it('should format x ticks as dates with time', function() { var ticks = d3 .select('.c3-axis-x') .selectAll('g.tick') .selectAll('tspan') .each(function(d: any) { expect(d.splitted).toEqual(textDates[d.index]) }) expect(ticks.size()).toBe(6) }) }) }) }) describe('axis x timeseries with iso dates', function() { beforeAll(function() { args = { data: { type: 'line', columns: [ ['epoch', 1527811200000, 1527897600000, 1527984000000], ['y', 1955, 2419, 2262] ], xs: { y: 'epoch' } }, axis: { x: { type: 'timeseries', min: new Date('2018-06-01'), max: new Date('2018-06-03'), localtime: false, tick: { format: '%Y-%m-%dT%H:%M:%S' // https://github.com/mbostock/d3/wiki/Time-Formatting#wiki-format } } } } }) var textDates = [ '2018-06-01T00:00:00', '2018-06-02T00:00:00', '2018-06-03T00:00:00' ] it('should format x ticks as dates', function() { var ticks = d3 .select('.c3-axis-x') .selectAll('g.tick') .selectAll('tspan') .each(function(d: any) { expect(d.splitted).toEqual(textDates[d.index]) }) expect(ticks.size()).toBe(3) }) }) describe('axis y timeseries', function() { beforeAll(function() { args = { data: { columns: [['times', 60000, 120000, 180000, 240000]] }, axis: { y: { type: 'timeseries', tick: { time: {} } } } } }) it('should have 7 ticks on y axis', function() { var ticksSize = d3 .select('.c3-axis-y') .selectAll('g.tick') .size() expect(ticksSize).toBe(7) // the count starts at initial value and increments by the set interval }) it('should have specified 30 second intervals', function() { var prevValue d3.select('.c3-axis-y') .selectAll('g.tick') .each(function(d: any, i) { if (i !== 0) { var result = d - prevValue expect(result).toEqual(30000) // expressed in milliseconds } prevValue = d }) }) describe('with axis.y.time', function() { beforeAll(function() { args.axis.y.tick.time = { type: d3.timeSecond, interval: 60 } }) it('should have 4 ticks on y axis', function() { var ticksSize = d3 .select('.c3-axis-y') .selectAll('g.tick') .size() expect(ticksSize).toBe(4) // the count starts at initial value and increments by the set interval }) it('should have specified 60 second intervals', function() { var prevValue d3.select('.c3-axis-y') .selectAll('g.tick') .each(function(d: any, i) { if (i !== 0) { var result = d - prevValue expect(result).toEqual(60000) // expressed in milliseconds } prevValue = d }) }) }) }) describe('axis.y.type', function() { describe('type=log', function() { beforeAll(function() { args = { data: { columns: [ ['linear', 318, 37, 0, 4, 0, 1], ['log', 318, 37, 0, 4, 0, 1] ], type: 'bar', axes: { log: 'y', linear: 'y2' }, labels: true }, axis: { y: { type: 'log' }, y2: { show: true } } } }) it('should have bars from y bigger than y2', function() { expect( (d3.select('.c3-bars-log .c3-bar-5').node() as any).getBBox().height ).toBeGreaterThan( (d3.select('.c3-bars-linear .c3-bar-5').node() as any).getBBox() .height ) }) it('should not have truncated data label', () => { const text = d3.select('.c3-texts-log .c3-text-0').node() as any expect(text).not.toBeUndefined() const bbox = text.getBBox() expect(Math.abs(bbox.y) - bbox.height).toBeGreaterThan(0) }) }) }) describe('axis.x.tick.values', function() { describe('formatted correctly when negative', function() { var xValues = [-3.3, -2.2, -1.1, 1.1, 2.2, 3.3] beforeEach(function() { args.data = { x: 'x', columns: [ ['x'].concat(xValues as any), ['data1', 30, 200, 100, 400, 150, 250] ] } }) it('should not generate whole number for negative values', function() { var tickValues = [] d3.select('.c3-axis-x') .selectAll('g.tick') .selectAll('tspan') .each(function(d: any, i) { expect(tickValues.push(parseFloat(d.splitted)) === xValues[i]) }) }) }) describe('function is provided', function() { var tickGenerator = function() { var values = [] for (var i = 0; i <= 300; i += 50) { values.push(i) } return values } beforeEach(function() { args.axis.x = { tick: { values: tickGenerator } } chart = c3.generate(args) ;(window as any).generatedTicks = tickGenerator() // This should be removed from window }) it('should use function to generate ticks', function() { d3.select('.c3-axis-x') .selectAll('g.tick') .each(function(d, i) { var tick = d3 .select(this) .select('text') .text() expect(+tick).toBe((window as any).generatedTicks[i]) }) }) }) }) describe('axis.x.tick.width', function() { describe('indexed x axis and y/y2 axis', function() { describe('not rotated', function() { beforeAll(function() { args = { data: { columns: [ ['data1', 30, 200, 100, 400, 150, 250], ['data2', 50, 20, 10, 40, 15, 25] ], axes: { data2: 'y2' } }, axis: { y2: { show: true } } } }) it('should construct indexed x axis properly', function() { var ticks = chart.internal.main .select('.c3-axis-x') .selectAll('g.tick'), expectedX = '0', expectedDy = '.71em' expect(ticks.size()).toBe(6) ticks.each(function(d, i) { var tspans = d3.select(this).selectAll('tspan') expect(tspans.size()).toBe(1) tspans.each(function() { var tspan = d3.select(this) expect(tspan.text()).toBe(i + '') expect(tspan.attr('x')).toBe(expectedX) expect(tspan.attr('dy')).toBe(expectedDy) }) }) }) describe('should set axis.x.tick.format', function() { beforeAll(function() { args.axis.x = { tick: { format: function() { return 'very long tick text on x axis' } } } }) it('should split x axis tick text to multiple lines', function() { var ticks = chart.internal.main .select('.c3-axis-x') .selectAll('g.tick'), expectedTexts = ['very long tick text', 'on x axis'], expectedX = '0' expect(ticks.size()).toBe(6) ticks.each(function() { var tspans = d3.select(this).selectAll('tspan') expect(tspans.size()).toBe(2) tspans.each(function(d, i) { var tspan = d3.select(this) expect(tspan.text()).toBe(expectedTexts[i]) expect(tspan.attr('x')).toBe(expectedX) if (i === 0) { expect(tspan.attr('dy')).toBe('.71em') } else { expect(tspan.attr('dy')).toBeGreaterThan(8) } }) }) }) it('should construct y axis properly', function() { var ticks = chart.internal.main .select('.c3-axis-y') .selectAll('g.tick'), expectedX = '-9', expectedDy = '3' expect(ticks.size()).toBe(9) ticks.each(function(d) { var tspans = d3.select(this).selectAll('tspan') expect(tspans.size()).toBe(1) tspans.each(function() { var tspan = d3.select(this) expect(tspan.text()).toBe(d + '') expect(tspan.attr('x')).toBe(expectedX) expect(tspan.attr('dy')).toBe(expectedDy) }) }) }) it('should construct y2 axis properly', function() { var ticks = chart.internal.main .select('.c3-axis-y2') .selectAll('g.tick'), expectedX = '9', expectedDy = '3' expect(ticks.size()).toBe(9) ticks.each(function(d) { var tspans = d3.select(this).selectAll('tspan') expect(tspans.size()).toBe(1) tspans.each(function() { var tspan = d3.select(this) expect(tspan.text()).toBe(d + '') expect(tspan.attr('x')).toBe(expectedX) expect(tspan.attr('dy')).toBe(expectedDy) }) }) }) }) describe('should set big values in y', function() { beforeAll(function() { args.data.columns = [ ['data1', 3000000000000000, 200, 100, 400, 150, 250], ['data2', 50, 20, 10, 40, 15, 25] ] }) it('should not split y axis tick text to multiple lines', function() { var ticks = chart.internal.main .select('.c3-axis-y2') .selectAll('g.tick') ticks.each(function() { var tspans = d3.select(this).selectAll('tspan') expect(tspans.size()).toBe(1) }) }) }) }) describe('rotated', function() { beforeAll(function() { args.axis.rotated = true }) it('should split x axis tick text to multiple lines', function() { var ticks = chart.internal.main .select('.c3-axis-x') .selectAll('g.tick'), expectedTexts = ['very long tick text on', 'x axis'], expectedX = '-9' expect(ticks.size()).toBe(6) ticks.each(function() { var tspans = d3.select(this).selectAll('tspan') expect(tspans.size()).toBe(2) tspans.each(function(d, i) { var tspan = d3.select(this) expect(tspan.text()).toBe(expectedTexts[i]) expect(tspan.attr('x')).toBe(expectedX) if (i === 0) { expect(tspan.attr('dy')).toBeLessThan(0) } else { expect(tspan.attr('dy')).toBeGreaterThan(9) } }) }) }) it('should not split y axis tick text to multiple lines', function() { var ticks = chart.internal.main .select('.c3-axis-y') .selectAll('g.tick'), expectedTexts = [ '0', '500000000000000', '1000000000000000', '1500000000000000', '2000000000000000', '2500000000000000', '3000000000000000' ], expectedX = '0', expectedDy = '.71em' expect(ticks.size()).toBe(7) ticks.each(function(d, i) { var tspans = d3.select(this).selectAll('tspan') expect(tspans.size()).toBe(1) tspans.each(function() { var tspan = d3.select(this) expect(tspan.text()).toBe(expectedTexts[i]) expect(tspan.attr('x')).toBe(expectedX) expect(tspan.attr('dy')).toBe(expectedDy) }) }) }) }) }) describe('category axis', function() { describe('not rotated', function() { beforeAll(function() { args = { data: { x: 'x', columns: [ [ 'x', 'this is a very long tick text on category axis', 'cat1', 'cat2', 'cat3', 'cat4', 'cat5' ], ['data1', 30, 200, 100, 400, 150, 250], ['data2', 50, 20, 10, 40, 15, 25] ] }, axis: { x: { type: 'category' } } } }) it('should locate ticks properly', function() { var ticks = chart.internal.main .select('.c3-axis-x') .selectAll('g.tick') ticks.each(function(d, i) { var tspans = d3.select(this).selectAll('tspan'), expectedX = '0', expectedDy = '.71em' if (i > 0) { // i === 0 should be checked in next test expect(tspans.size()).toBe(1) tspans.each(function() { var tspan = d3.select(this) expect(tspan.attr('x')).toBe(expectedX) expect(tspan.attr('dy')).toBe(expectedDy) }) } }) }) xit('should split tick text properly', function() { var tick = chart.internal.main.select('.c3-axis-x').select('g.tick'), tspans = tick.selectAll('tspan'), expectedTickTexts = [ 'this is a very long', 'tick text on category', 'axis' ], expectedX = '0' expect(tspans.size()).toBe(3) tspans.each(function(d, i) { var tspan = d3.select(this) expect(tspan.text()).toBe(expectedTickTexts[i]) expect(tspan.attr('x')).toBe(expectedX) // unable to define pricise number because it differs depends on environment.. if (i === 0) { expect(tspan.attr('dy')).toBe('.71em') } else { expect(tspan.attr('dy')).toBeGreaterThan(8) } }) }) }) describe('rotated', function() { beforeAll(function() { args.axis.rotated = true }) it('should locate ticks on rotated axis properly', function() { var ticks = chart.internal.main .select('.c3-axis-x') .selectAll('g.tick') ticks.each(function(d, i) { var tspans = d3.select(this).selectAll('tspan'), expectedX = '-9', expectedDy = '3' if (i > 0) { // i === 0 should be checked in next test expect(tspans.size()).toBe(1) tspans.each(function() { var tspan = d3.select(this) expect(tspan.attr('x')).toBe(expectedX) expect(tspan.attr('dy')).toBe(expectedDy) }) } }) }) it('should split tick text on rotated axis properly', function() { var tick = chart.internal.main.select('.c3-axis-x').select('g.tick'), tspans = tick.selectAll('tspan'), expectedTickTexts = [ 'this is a very long', 'tick text on category', 'axis' ], expectedX = '-9' expect(tspans.size()).toBe(3) tspans.each(function(d, i) { var tspan = d3.select(this) expect(tspan.text()).toBe(expectedTickTexts[i]) expect(tspan.attr('x')).toBe(expectedX) // unable to define pricise number because it differs depends on environment.. if (i === 0) { expect(tspan.attr('dy')).toBeLessThan(0) } else { expect(tspan.attr('dy')).toBeGreaterThan(8) } }) }) }) describe('option used', function() { describe('as null', function() { beforeAll(function() { //'without split ticks', args.axis.x.tick = { multiline: false } }) it('should split x tick', function() { var tick = chart.internal.main .select('.c3-axis-x') .select('g.tick'), tspans = tick.selectAll('tspan') expect(tspans.size()).toBe(1) }) }) describe('as value', function() { beforeAll(function() { // 'without split ticks', args.axis.x.tick = { width: 150 } }) it('should split x tick to 2 lines properly', function() { var tick = chart.internal.main .select('.c3-axis-x') .select('g.tick'), tspans = tick.selectAll('tspan'), expectedTickTexts = [ 'this is a very long tick text on', 'category axis' ], expectedX = '-9' expect(tspans.size()).toBe(2) tspans.each(function(d, i) { var tspan = d3.select(this) expect(tspan.text()).toBe(expectedTickTexts[i]) expect(tspan.attr('x')).toBe(expectedX) // unable to define pricise number because it differs depends on environment.. if (i === 0) { expect(tspan.attr('dy')).toBeLessThan(0) } else { expect(tspan.attr('dy')).toBeGreaterThan(8) } }) }) }) describe('with multilineMax', function() { beforeAll(function() { args.axis.x.tick = { multiline: true, multilineMax: 2 } }) it('should ellipsify x tick properly', function() { var tick = chart.internal.main.select('.c3-axis-x').select('g.tick') var tspans = tick.selectAll('tspan') var expectedTickText = [ 'this is a very long', 'tick text on categ...' ] expect(tspans.size()).toBe(2) tspans.each(function(d, i) { var tspan = d3.select(this) expect(tspan.text()).toBe(expectedTickText[i]) }) }) }) }) }) describe('with axis.x.tick.format', function() { beforeAll(function() { // 'with axis.x.tick.format', args.axis.x.tick.format = function() { return ['this is a very long tick text', 'on category axis'] } }) it('should have multiline tick text', function() { var tick = chart.internal.main.select('.c3-axis-x').select('g.tick'), tspans = tick.selectAll('tspan'), expectedTickTexts = [ 'this is a very long tick text', 'on category axis' ] expect(tspans.size()).toBe(2) tspans.each(function(d, i) { var tspan = d3.select(this) expect(tspan.text()).toBe(expectedTickTexts[i]) }) }) }) }) describe('axis.x.tick.rotate', function() { describe('not rotated', function() { beforeAll(function() { args = { data: { x: 'x', columns: [ [ 'x', 'category 1', 'category 2', 'category 3', 'category 4', 'category 5', 'category 6' ], ['data1', 30, 200, 100, 400, 150, 250], ['data2', 50, 20, 10, 40, 15, 25] ] }, axis: { x: { type: 'category', tick: { rotate: 60 } } } } }) it('should rotate tick texts', function() { chart.internal.main.selectAll('.c3-axis-x g.tick').each(function() { var tick = d3.select(this), text = tick.select('text'), tspan = text.select('tspan') expect(text.attr('transform')).toBe('rotate(60)') expect(text.attr('y')).toBe('1.5') expect(tspan.attr('dx')).toBe('6.928203230275509') }) }) it('should have automatically calculated x axis height', function() { var box = chart.internal.main .select('.c3-axis-x') .node() .getBoundingClientRect(), height = chart.internal.getHorizontalAxisHeight('x') expect(box.height).toBeGreaterThan(50) expect(height).toBeCloseTo(76, -1.3) // @TODO make this test better }) }) }) describe('axis.y.tick.rotate', function() { describe('not rotated', function() { beforeAll(function() { args = { data: { columns: [ ['data1', 30, 200, 100, 400, 150, 250, 100, 600], ['data2', 50, 20, 10, 40, 15, 25] ] }, axis: { rotated: true, y: { tick: { rotate: 45 } } } } }) it('should rotate tick texts', function() { chart.internal.main.selectAll('.c3-axis-y g.tick').each(function() { var tick = d3.select(this), text = tick.select('text'), tspan = text.select('tspan') expect(text.attr('transform')).toBe('rotate(45)') expect(text.attr('y')).toBe('4') expect(tspan.attr('dx')).toBeCloseTo(5.6, 0) }) }) it('should have automatically calculated y axis width', function() { var box = chart.internal.main .select('.c3-axis-y') .node() .getBoundingClientRect() expect(box.width).toBeCloseTo(590, 1) }) }) }) describe('axis.x.tick.fit', function() { describe('axis.x.tick.fit = true', function() { beforeAll(function() { // 'should set args for indexed data', args = { data: { columns: [ ['data1', 30, 200, 100, 400, 150, 250], ['data2', 50, 20, 10, 40, 15, 25], ['data3', 150, 120, 110, 140, 115, 125] ] } } }) it('should show fitted ticks on indexed data', function() { var ticks = chart.internal.main.selectAll('.c3-axis-x g.tick') expect(ticks.size()).toBe(6) }) describe('should set args for x-based data', function() { beforeAll(function() { args = { data: { x: 'x', columns: [ ['x', 10, 20, 100, 110, 200, 1000], ['data1', 30, 200, 100, 400, 150, 250], ['data2', 50, 20, 10, 40, 15, 25], ['data3', 150, 120, 110, 140, 115, 125] ] } } }) it('should show fitted ticks on indexed data', function() { var ticks = chart.internal.main.selectAll('.c3-axis-x g.tick') expect(ticks.size()).toBe(6) }) it('should show fitted ticks after hide and show', function() { chart.hide() chart.show() var ticks = chart.internal.main.selectAll('.c3-axis-x g.tick') expect(ticks.size()).toBe(6) }) }) }) describe('axis.x.tick.fit = false', function() { describe('should set args for indexed data', function() { beforeAll(function() { args = { data: { columns: [ ['data1', 30, 200, 100, 400, 150, 250], ['data2', 50, 20, 10, 40, 15, 25], ['data3', 150, 120, 110, 140, 115, 125] ] }, axis: { x: { tick: { fit: false } } } } }) it('should show fitted ticks on indexed data', function() { var ticks = chart.internal.main.selectAll('.c3-axis-x g.tick') expect(ticks.size()).toBe(11) }) }) describe('should set args for x-based data', function() { beforeAll(function() { args.data = { x: 'x', columns: [ ['x', 10, 20, 100, 110, 200, 1000], ['data1', 30, 200, 100, 400, 150, 250], ['data2', 50, 20, 10, 40, 15, 25], ['data3', 150, 120, 110, 140, 115, 125] ] } }) it('should show fitted ticks on indexed data', function() { var ticks = chart.internal.main.selectAll('.c3-axis-x g.tick') expect(ticks.size()).toBe(10) }) it('should show fitted ticks after hide and show', function() { chart.hide() chart.show() var ticks = chart.internal.main.selectAll('.c3-axis-x g.tick') expect(ticks.size()).toBe(10) }) }) }) }) describe('axis.y.inner', function() { beforeAll(function() { args = { data: { columns: [ ['data1', 30, 200, 100, 400, 150, 250], ['data2', 50, 20, 10, 40, 15, 25] ] }, axis: { y: { inner: false } } } }) it('should not have inner y axis', function() { var paddingLeft = chart.internal.getCurrentPaddingLeft(), tickTexts = chart.internal.main.selectAll('.c3-axis-y g.tick text') expect(paddingLeft).toBeGreaterThan(19) tickTexts.each(function() { expect(+d3.select(this).attr('x')).toBeLessThan(0) }) }) describe('with inner y axis', function() { beforeAll(function() { args.axis.y.inner = true }) it('should have inner y axis', function() { var paddingLeft = chart.internal.getCurrentPaddingLeft(), tickTexts = chart.internal.main.selectAll('.c3-axis-y g.tick text') expect(paddingLeft).toBe(1) tickTexts.each(function() { expect(+d3.select(this).attr('x')).toBeGreaterThan(0) }) }) }) }) describe('axis.y2.inner', function() { beforeAll(function() { args = { data: { columns: [ ['data1', 30, 200, 100, 400, 150, 250], ['data2', 50, 20, 10, 40, 15, 25] ] }, axis: { y2: { show: true, inner: false } } } }) it('should not have inner y axis', function() { var paddingRight = chart.internal.getCurrentPaddingRight(), tickTexts = chart.internal.main.selectAll('.c3-axis-2y g.tick text') expect(paddingRight).toBeGreaterThan(19) tickTexts.each(function() { expect(+d3.select(this).attr('x')).toBeGreaterThan(0) }) }) describe('with inner y axis', function() { beforeAll(function() { args.axis.y2.inner = true }) it('should have inner y axis', function() { var paddingRight = chart.internal.getCurrentPaddingRight(), tickTexts = chart.internal.main.selectAll('.c3-axis-2y g.tick text') expect(paddingRight).toBe(2) tickTexts.each(function() { expect(+d3.select(this).attr('x')).toBeLessThan(0) }) }) }) }) describe('axis.x.label', function() { beforeAll(function() { args = { data: { columns: [ ['somewhat long 1', 30, 200, 100, 400, 150, 250], ['somewhat long 2', 50, 20, 10, 40, 15, 25] ] }, axis: { x: { show: true, label: { text: 'Label of X axis' } } } } }) it('renders label text properly', () => { expect(d3.select('.c3-axis-x-label').text()).toEqual('Label of X axis') }) describe('outer label position', function() { beforeAll(function() { args.axis.x.label.position = 'outer-center' }) it('renders position properly', () => { const label = d3.select('.c3-axis-x-label') expect(label.attr('dy')).toEqual('30') }) describe('with rotated tick', function() { beforeAll(function() { args.axis.x.tick = { rotate: 90 } }) it('renders position properly', () => { const label = d3.select('.c3-axis-x-label') expect(label.attr('dy')).toBeGreaterThan(30) }) }) }) describe('inner label position', function() { beforeAll(function() { args.axis.x.label.position = 'inner-center' }) it('renders position properly', () => { const label = d3.select('.c3-axis-x-label') expect(label.attr('dy')).toEqual('-0.5em') }) describe('with rotated tick', function() { beforeAll(function() { args.axis.x.tick = { rotate: 90 } }) it('renders position properly', () => { const label = d3.select('.c3-axis-x-label') expect(label.attr('dy')).toEqual('-0.5em') }) }) }) }) })
the_stack
* Copyright 2020 Bonitasoft S.A. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import ShapeBpmnElement, { ShapeBpmnActivity, ShapeBpmnBoundaryEvent, ShapeBpmnCallActivity, ShapeBpmnEvent, ShapeBpmnEventBasedGateway, ShapeBpmnStartEvent, ShapeBpmnSubProcess, } from '../../../../model/bpmn/internal/shape/ShapeBpmnElement'; import { BpmnEventKind, ShapeBpmnCallActivityKind, ShapeBpmnElementKind, ShapeBpmnEventBasedGatewayKind, ShapeBpmnEventDefinitionKind, ShapeBpmnMarkerKind, ShapeBpmnSubProcessKind, ShapeUtil, } from '../../../../model/bpmn/internal'; import { eventDefinitionKinds } from '../../../../model/bpmn/internal/shape/utils'; import { AssociationFlow, SequenceFlow } from '../../../../model/bpmn/internal/edge/flows'; import { AssociationDirectionKind, FlowKind, SequenceFlowKind } from '../../../../model/bpmn/internal/edge/kinds'; import { TProcess } from '../../../../model/bpmn/json/baseElement/rootElement/rootElement'; import { TBoundaryEvent, TCatchEvent, TThrowEvent } from '../../../../model/bpmn/json/baseElement/flowNode/event'; import { TActivity, TCallActivity, TSubProcess } from '../../../../model/bpmn/json/baseElement/flowNode/activity/activity'; import { TLane, TLaneSet } from '../../../../model/bpmn/json/baseElement/baseElement'; import { TFlowNode, TSequenceFlow } from '../../../../model/bpmn/json/baseElement/flowElement'; import { TAssociation, TGroup, TTextAnnotation } from '../../../../model/bpmn/json/baseElement/artifact'; import { ConvertedElements } from './utils'; import { TEventBasedGateway } from '../../../../model/bpmn/json/baseElement/flowNode/gateway'; import { TReceiveTask } from '../../../../model/bpmn/json/baseElement/flowNode/activity/task'; import { ensureIsArray } from '../../../helpers/array-utils'; import { ParsingMessageCollector } from '../../parsing-messages'; import { BoundaryEventNotAttachedToActivityWarning, LaneUnknownFlowNodeRefWarning } from '../warnings'; interface EventDefinition { kind: ShapeBpmnEventDefinitionKind; counter: number; } type FlowNode = TFlowNode | TActivity | TReceiveTask | TEventBasedGateway | TTextAnnotation; /** * @internal */ export default class ProcessConverter { private defaultSequenceFlowIds: string[] = []; constructor(private convertedElements: ConvertedElements, private parsingMessageCollector: ParsingMessageCollector) {} deserialize(processes: string | TProcess | (string | TProcess)[]): void { ensureIsArray(processes).forEach(process => this.parseProcess(process)); } private parseProcess(process: TProcess): void { this.convertedElements.registerProcess(new ShapeBpmnElement(process.id, process.name, ShapeBpmnElementKind.POOL)); this.buildProcessInnerElements(process); } private buildProcessInnerElements(process: TProcess | TSubProcess): void { const processId = process.id; // flow nodes ShapeUtil.flowNodeKinds() .filter(kind => kind != ShapeBpmnElementKind.EVENT_BOUNDARY) .forEach(kind => this.buildFlowNodeBpmnElements(processId, process[kind], kind)); // process boundary events afterwards as we need its parent activity to be available when building it this.buildFlowNodeBpmnElements(processId, process.boundaryEvent, ShapeBpmnElementKind.EVENT_BOUNDARY); // containers this.buildLaneBpmnElements(processId, process[ShapeBpmnElementKind.LANE]); this.buildLaneSetBpmnElements(processId, process['laneSet']); // flows this.buildSequenceFlows(process[FlowKind.SEQUENCE_FLOW]); this.buildAssociationFlows(process[FlowKind.ASSOCIATION_FLOW]); } private buildFlowNodeBpmnElements(processId: string, bpmnElements: Array<FlowNode> | FlowNode, kind: ShapeBpmnElementKind): void { ensureIsArray(bpmnElements).forEach(bpmnElement => { let shapeBpmnElement; if (ShapeUtil.isEvent(kind)) { shapeBpmnElement = this.buildShapeBpmnEvent(bpmnElement, kind as BpmnEventKind, processId); } else if (ShapeUtil.isActivity(kind)) { shapeBpmnElement = this.buildShapeBpmnActivity(bpmnElement, kind, processId); } else if (kind == ShapeBpmnElementKind.GATEWAY_EVENT_BASED) { const eventBasedGatewayBpmnElement = bpmnElement as TEventBasedGateway; shapeBpmnElement = new ShapeBpmnEventBasedGateway( bpmnElement.id, eventBasedGatewayBpmnElement.name, processId, eventBasedGatewayBpmnElement.instantiate, ShapeBpmnEventBasedGatewayKind[eventBasedGatewayBpmnElement.eventGatewayType], ); } else if (kind == ShapeBpmnElementKind.GROUP) { shapeBpmnElement = this.convertedElements.buildShapeBpmnGroup(bpmnElement as TGroup, processId); } else { // @ts-ignore We know that the text & name fields are not on all types, but it's already tested const name = kind === ShapeBpmnElementKind.TEXT_ANNOTATION ? bpmnElement.text : bpmnElement.name; // @ts-ignore We know that the instantiate field is not on all types, but it's already tested shapeBpmnElement = new ShapeBpmnElement(bpmnElement.id, name, kind, processId, bpmnElement.instantiate); } // @ts-ignore We know that the default field is not on all types, but it's already tested const defaultFlow = bpmnElement.default; if (ShapeUtil.isWithDefaultSequenceFlow(kind) && defaultFlow) { this.defaultSequenceFlowIds.push(defaultFlow); } if (shapeBpmnElement) { this.convertedElements.registerFlowNode(shapeBpmnElement); } }); } private buildShapeBpmnActivity(bpmnElement: TActivity, kind: ShapeBpmnElementKind, processId: string): ShapeBpmnActivity { const markers = ProcessConverter.buildMarkers(bpmnElement); if (ShapeUtil.isSubProcess(kind)) { return this.buildShapeBpmnSubProcess(bpmnElement, processId, markers); } if (!ShapeUtil.isCallActivity(kind)) { // @ts-ignore return new ShapeBpmnActivity(bpmnElement.id, bpmnElement.name, kind, processId, bpmnElement.instantiate, markers); } return this.buildShapeBpmnCallActivity(bpmnElement, processId, markers); } private buildShapeBpmnCallActivity(bpmnElement: TActivity, processId: string, markers: ShapeBpmnMarkerKind[]): ShapeBpmnCallActivity { const globalTaskKind = this.convertedElements.findGlobalTask((bpmnElement as TCallActivity).calledElement); if (!globalTaskKind) { return new ShapeBpmnCallActivity(bpmnElement.id, bpmnElement.name, ShapeBpmnCallActivityKind.CALLING_PROCESS, processId, markers); } return new ShapeBpmnCallActivity(bpmnElement.id, bpmnElement.name, ShapeBpmnCallActivityKind.CALLING_GLOBAL_TASK, processId, markers, globalTaskKind); } private static buildMarkers(bpmnElement: TActivity): ShapeBpmnMarkerKind[] { const markers: ShapeBpmnMarkerKind[] = []; // @ts-ignore We know that the standardLoopCharacteristics field is not on all types, but it's already tested const standardLoopCharacteristics = bpmnElement.standardLoopCharacteristics; // @ts-ignore We know that the multiInstanceLoopCharacteristics field is not on all types, but it's already tested const multiInstanceLoopCharacteristics = ensureIsArray(bpmnElement.multiInstanceLoopCharacteristics, true)[0]; if (standardLoopCharacteristics || standardLoopCharacteristics === '') { markers.push(ShapeBpmnMarkerKind.LOOP); } else if (multiInstanceLoopCharacteristics && multiInstanceLoopCharacteristics.isSequential) { markers.push(ShapeBpmnMarkerKind.MULTI_INSTANCE_SEQUENTIAL); } else if ((multiInstanceLoopCharacteristics && !multiInstanceLoopCharacteristics.isSequential) || multiInstanceLoopCharacteristics === '') { markers.push(ShapeBpmnMarkerKind.MULTI_INSTANCE_PARALLEL); } return markers; } private buildShapeBpmnEvent(bpmnElement: TCatchEvent | TThrowEvent, elementKind: BpmnEventKind, processId: string): ShapeBpmnEvent { const eventDefinitions = this.getEventDefinitions(bpmnElement); const numberOfEventDefinitions = eventDefinitions.map(eventDefinition => eventDefinition.counter).reduce((counter, it) => counter + it, 0); // do we have a None Event? if (numberOfEventDefinitions == 0 && ShapeUtil.canHaveNoneEvent(elementKind)) { return new ShapeBpmnEvent(bpmnElement.id, bpmnElement.name, elementKind, ShapeBpmnEventDefinitionKind.NONE, processId); } if (numberOfEventDefinitions == 1) { const eventDefinitionKind = eventDefinitions[0].kind; if (ShapeUtil.isBoundaryEvent(elementKind)) { return this.buildShapeBpmnBoundaryEvent(bpmnElement as TBoundaryEvent, eventDefinitionKind); } if (ShapeUtil.isStartEvent(elementKind)) { return new ShapeBpmnStartEvent(bpmnElement.id, bpmnElement.name, eventDefinitionKind, processId, bpmnElement.isInterrupting); } return new ShapeBpmnEvent(bpmnElement.id, bpmnElement.name, elementKind, eventDefinitionKind, processId); } } private buildShapeBpmnBoundaryEvent(bpmnElement: TBoundaryEvent, eventDefinitionKind: ShapeBpmnEventDefinitionKind): ShapeBpmnBoundaryEvent { const parent = this.convertedElements.findFlowNode(bpmnElement.attachedToRef); if (ShapeUtil.isActivity(parent?.kind)) { return new ShapeBpmnBoundaryEvent(bpmnElement.id, bpmnElement.name, eventDefinitionKind, bpmnElement.attachedToRef, bpmnElement.cancelActivity); } else { this.parsingMessageCollector.warning(new BoundaryEventNotAttachedToActivityWarning(bpmnElement.id, bpmnElement.attachedToRef, parent?.kind)); } } /** * Get the list of eventDefinitions hold by the Event bpmElement * * @param bpmnElement The BPMN element from the XML data which represents a BPMN Event */ private getEventDefinitions(bpmnElement: TCatchEvent | TThrowEvent): EventDefinition[] { const eventDefinitions = new Map<ShapeBpmnEventDefinitionKind, number>(); eventDefinitionKinds.forEach(eventDefinitionKind => { // sometimes eventDefinition is simple and therefore it is parsed as empty string "", in that case eventDefinition will be converted to an empty object const eventDefinition = bpmnElement[eventDefinitionKind + 'EventDefinition']; const counter = ensureIsArray(eventDefinition, true).length; eventDefinitions.set(eventDefinitionKind, counter); }); ensureIsArray<string>(bpmnElement.eventDefinitionRef).forEach(eventDefinitionRef => { const kind = this.convertedElements.findEventDefinitionOfDefinition(eventDefinitionRef); eventDefinitions.set(kind, eventDefinitions.get(kind) + 1); }); return Array.from(eventDefinitions.keys()) .map(kind => ({ kind, counter: eventDefinitions.get(kind) })) .filter(eventDefinition => eventDefinition.counter > 0); } private buildShapeBpmnSubProcess(bpmnElement: TSubProcess, processId: string, markers: ShapeBpmnMarkerKind[]): ShapeBpmnSubProcess { this.buildSubProcessInnerElements(bpmnElement); if (!bpmnElement.triggeredByEvent) { return new ShapeBpmnSubProcess(bpmnElement.id, bpmnElement.name, ShapeBpmnSubProcessKind.EMBEDDED, processId, markers); } return new ShapeBpmnSubProcess(bpmnElement.id, bpmnElement.name, ShapeBpmnSubProcessKind.EVENT, processId, markers); } private buildSubProcessInnerElements(subProcess: TSubProcess): void { this.buildProcessInnerElements(subProcess); } private buildLaneSetBpmnElements(processId: string, laneSets: Array<TLaneSet> | TLaneSet): void { ensureIsArray(laneSets).forEach(laneSet => this.buildLaneBpmnElements(processId, laneSet.lane)); } private buildLaneBpmnElements(processId: string, lanes: Array<TLane> | TLane): void { ensureIsArray(lanes).forEach(lane => { this.convertedElements.registerLane(new ShapeBpmnElement(lane.id, lane.name, ShapeBpmnElementKind.LANE, processId)); this.assignParentOfLaneFlowNodes(lane); if (lane.childLaneSet?.lane) { this.buildLaneBpmnElements(lane.id, lane.childLaneSet.lane); } }); } private assignParentOfLaneFlowNodes(lane: TLane): void { ensureIsArray<string>(lane.flowNodeRef).forEach(flowNodeRef => { const shapeBpmnElement = this.convertedElements.findFlowNode(flowNodeRef); const laneId = lane.id; if (shapeBpmnElement) { if (!ShapeUtil.isBoundaryEvent(shapeBpmnElement.kind)) { shapeBpmnElement.parentId = laneId; } } else { this.parsingMessageCollector.warning(new LaneUnknownFlowNodeRefWarning(laneId, flowNodeRef)); } }); } private buildSequenceFlows(bpmnElements: Array<TSequenceFlow> | TSequenceFlow): void { ensureIsArray(bpmnElements).forEach(sequenceFlow => { const kind = this.getSequenceFlowKind(sequenceFlow); this.convertedElements.registerSequenceFlow(new SequenceFlow(sequenceFlow.id, sequenceFlow.name, sequenceFlow.sourceRef, sequenceFlow.targetRef, kind)); }); } private buildAssociationFlows(bpmnElements: Array<TAssociation> | TAssociation): void { ensureIsArray(bpmnElements).forEach(association => { // TODO Remove associationDirection conversion type when we merge/simplify internal model with BPMN json model const direction = association.associationDirection as unknown as AssociationDirectionKind; this.convertedElements.registerAssociationFlow(new AssociationFlow(association.id, undefined, association.sourceRef, association.targetRef, direction)); }); } private getSequenceFlowKind(sequenceFlow: TSequenceFlow): SequenceFlowKind { if (this.defaultSequenceFlowIds.includes(sequenceFlow.id)) { return SequenceFlowKind.DEFAULT; } else { const sourceShapeBpmnElement = this.convertedElements.findFlowNode(sequenceFlow.sourceRef); if (sourceShapeBpmnElement && ShapeUtil.isWithDefaultSequenceFlow(sourceShapeBpmnElement.kind) && sequenceFlow.conditionExpression) { if (ShapeUtil.isActivity(sourceShapeBpmnElement.kind)) { return SequenceFlowKind.CONDITIONAL_FROM_ACTIVITY; } else { return SequenceFlowKind.CONDITIONAL_FROM_GATEWAY; } } } return SequenceFlowKind.NORMAL; } }
the_stack
/// <reference types="activex-interop" /> declare namespace WIA { /** String versions of globally unique identifiers (GUIDs) that identify common Device and Item commands. */ const enum CommandID { wiaCommandChangeDocument = '{04E725B0-ACAE-11D2-A093-00C04F72DC3C}', wiaCommandDeleteAllItems = '{E208C170-ACAD-11D2-A093-00C04F72DC3C}', wiaCommandSynchronize = '{9B26B7B2-ACAD-11D2-A093-00C04F72DC3C}', wiaCommandTakePicture = '{AF933CAC-ACAD-11D2-A093-00C04F72DC3C}', wiaCommandUnloadDocument = '{1F3B3D8E-ACAE-11D2-A093-00C04F72DC3C}', } /** String versions of globally unique identifiers (GUIDs) that identify DeviceManager events. */ const enum EventID { wiaEventDeviceConnected = '{A28BBADE-64B6-11D2-A231-00C04FA31809}', wiaEventDeviceDisconnected = '{143E4E83-6497-11D2-A231-00C04FA31809}', wiaEventItemCreated = '{4C8F4EF5-E14F-11D2-B326-00C04F68CE61}', wiaEventItemDeleted = '{1D22A559-E14F-11D2-B326-00C04F68CE61}', wiaEventScanEmailImage = '{C686DCEE-54F2-419E-9A27-2FC7F2E98F9E}', wiaEventScanFaxImage = '{C00EB793-8C6E-11D2-977A-0000F87A926F}', wiaEventScanFilmImage = '{9B2B662C-6185-438C-B68B-E39EE25E71CB}', wiaEventScanImage = '{A6C5A715-8C6E-11D2-977A-0000F87A926F}', wiaEventScanImage2 = '{FC4767C1-C8B3-48A2-9CFA-2E90CB3D3590}', wiaEventScanImage3 = '{154E27BE-B617-4653-ACC5-0FD7BD4C65CE}', wiaEventScanImage4 = '{A65B704A-7F3C-4447-A75D-8A26DFCA1FDF}', wiaEventScanOCRImage = '{9D095B89-37D6-4877-AFED-62A297DC6DBE}', wiaEventScanPrintImage = '{B441F425-8C6E-11D2-977A-0000F87A926F}', } /** String versions of globally unique identifiers (GUIDs) that indicate the file format of an image. */ const enum FormatID { wiaFormatBMP = '{B96B3CAB-0728-11D3-9D7B-0000F81EF32E}', wiaFormatGIF = '{B96B3CB0-0728-11D3-9D7B-0000F81EF32E}', wiaFormatJPEG = '{B96B3CAE-0728-11D3-9D7B-0000F81EF32E}', wiaFormatPNG = '{B96B3CAF-0728-11D3-9D7B-0000F81EF32E}', wiaFormatTIFF = '{B96B3CB1-0728-11D3-9D7B-0000F81EF32E}', } /** Miscellaneous string constants */ const enum Miscellaneous { wiaAnyDeviceID = '*', wiaIDUnknown = '{00000000-0000-0000-0000-000000000000}', } /** * The WiaDeviceType enumeration specifies the type of device attached to a user's computer. Use the Type property on the DeviceInfo object or the Device * object to obtain these values from the device. */ const enum WiaDeviceType { CameraDeviceType = 2, ScannerDeviceType = 1, UnspecifiedDeviceType = 0, VideoDeviceType = 3, } /** * A DeviceEvent's type is composed of bits from the WiaEventFlags enumeration. You can test a DeviceEvent's type by using the AND operation with * DeviceEvent.Type and a member from the WiaEventFlags enumeration. */ const enum WiaEventFlag { ActionEvent = 2, NotificationEvent = 1, } /** The WiaImageBias enumeration helps specify what type of data the image is intended to represent. */ const enum WiaImageBias { MaximizeQuality = 131072, MinimizeSize = 65536, } /** The WiaImageIntent enumeration helps specify what type of data the image is intended to represent. */ const enum WiaImageIntent { ColorIntent = 1, GrayscaleIntent = 2, TextIntent = 4, UnspecifiedIntent = 0, } /** * The WiaImagePropertyType enumeration specifies the type of the value of an image property. Image properties can be found in the Properties collection * of an ImageFile object. */ const enum WiaImagePropertyType { ByteImagePropertyType = 1001, LongImagePropertyType = 1004, RationalImagePropertyType = 1006, StringImagePropertyType = 1002, UndefinedImagePropertyType = 1000, UnsignedIntegerImagePropertyType = 1003, UnsignedLongImagePropertyType = 1005, UnsignedRationalImagePropertyType = 1007, VectorOfBytesImagePropertyType = 1101, VectorOfLongsImagePropertyType = 1103, VectorOfRationalsImagePropertyType = 1105, VectorOfUndefinedImagePropertyType = 1100, VectorOfUnsignedIntegersImagePropertyType = 1102, VectorOfUnsignedLongsImagePropertyType = 1104, VectorOfUnsignedRationalsImagePropertyType = 1106, } /** * An Item's type is composed of bits from the WiaItemFlags enumeration. You can test an Item's type by using the AND operation with * Item.Properties("Item Flags") and a member from the WiaItemFlags enumeration. */ const enum WiaItemFlag { AnalyzeItemFlag = 16, AudioItemFlag = 32, BurstItemFlag = 2048, DeletedItemFlag = 128, DeviceItemFlag = 64, DisconnectedItemFlag = 256, FileItemFlag = 2, FolderItemFlag = 4, FreeItemFlag = 0, GeneratedItemFlag = 16384, HasAttachmentsItemFlag = 32768, HPanoramaItemFlag = 512, ImageItemFlag = 1, RemovedItemFlag = -2147483648, RootItemFlag = 8, StorageItemFlag = 4096, TransferItemFlag = 8192, VideoItemFlag = 65536, VPanoramaItemFlag = 1024, } /** * The WiaPropertyType enumeration specifies the type of the value of an item property. Item properties can be found in the Properties collection of a * Device or Item object. */ const enum WiaPropertyType { BooleanPropertyType = 1, BytePropertyType = 2, ClassIDPropertyType = 15, CurrencyPropertyType = 12, DatePropertyType = 13, DoublePropertyType = 11, ErrorCodePropertyType = 7, FileTimePropertyType = 14, HandlePropertyType = 18, IntegerPropertyType = 3, LargeIntegerPropertyType = 8, LongPropertyType = 5, ObjectPropertyType = 17, SinglePropertyType = 10, StringPropertyType = 16, UnsignedIntegerPropertyType = 4, UnsignedLargeIntegerPropertyType = 9, UnsignedLongPropertyType = 6, UnsupportedPropertyType = 0, VariantPropertyType = 19, VectorOfBooleansPropertyType = 101, VectorOfBytesPropertyType = 102, VectorOfClassIDsPropertyType = 115, VectorOfCurrenciesPropertyType = 112, VectorOfDatesPropertyType = 113, VectorOfDoublesPropertyType = 111, VectorOfErrorCodesPropertyType = 107, VectorOfFileTimesPropertyType = 114, VectorOfIntegersPropertyType = 103, VectorOfLargeIntegersPropertyType = 108, VectorOfLongsPropertyType = 105, VectorOfSinglesPropertyType = 110, VectorOfStringsPropertyType = 116, VectorOfUnsignedIntegersPropertyType = 104, VectorOfUnsignedLargeIntegersPropertyType = 109, VectorOfUnsignedLongsPropertyType = 106, VectorOfVariantsPropertyType = 119, } /** * The WiaSubType enumeration specifies more detail about the property value. Use the SubType property on the Property object to obtain these values for * the property. */ const enum WiaSubType { FlagSubType = 3, ListSubType = 2, RangeSubType = 1, UnspecifiedSubType = 0, } /** * The CommonDialog control is an invisible-at-runtime control that contains all the methods that display a User Interface. A CommonDialog control can be * created using "WIA.CommonDialog" in a call to CreateObject or by dropping a CommonDialog on a form. */ class CommonDialog { private constructor(); private 'WIA.CommonDialog_typekey': CommonDialog; /** * Displays one or more dialog boxes that enable the user to acquire an image from a hardware device for image acquisition and returns an ImageFile * object on success, otherwise Nothing * @param WIA.WiaDeviceType [DeviceType=0] * @param WIA.WiaImageIntent [Intent=0] * @param WIA.WiaImageBias [Bias=131072] * @param string [FormatID='{00000000-0000-0000-0000-000000000000}'] * @param boolean [AlwaysSelectDevice=false] * @param boolean [UseCommonUI=true] * @param boolean [CancelError=false] */ ShowAcquireImage(DeviceType?: WiaDeviceType, Intent?: WiaImageIntent, Bias?: WiaImageBias, FormatID?: string, AlwaysSelectDevice?: boolean, UseCommonUI?: boolean, CancelError?: boolean): ImageFile | null; /** Launches the Windows Scanner and Camera Wizard and returns Nothing. Future versions may return a collection of ImageFile objects. */ ShowAcquisitionWizard(Device: Device): null; /** * Displays the properties dialog box for the specified Device * @param boolean [CancelError=false] */ ShowDeviceProperties(Device: Device, CancelError?: boolean): void; /** * Displays the properties dialog box for the specified Item * @param boolean [CancelError=false] */ ShowItemProperties(Item: Item, CancelError?: boolean): void; /** Launches the Photo Printing Wizard with the absolute path of a specific file or Vector of absolute paths to files */ ShowPhotoPrintingWizard(Files: string | Vector<string>): void; /** * Displays a dialog box that enables the user to select a hardware device for image acquisition. Returns the selected Device object on success, * otherwise Nothing * @param WIA.WiaDeviceType [DeviceType=0] * @param boolean [AlwaysSelectDevice=false] * @param boolean [CancelError=false] */ ShowSelectDevice(DeviceType?: WiaDeviceType, AlwaysSelectDevice?: boolean, CancelError?: boolean): Device | null; /** * Displays a dialog box that enables the user to select an item for transfer from a hardware device for image acquisition. Returns the selection as an * Items collection on success, otherwise Nothing * @param WIA.WiaImageIntent [Intent=0] * @param WIA.WiaImageBias [Bias=131072] * @param boolean [SingleSelect=true] * @param boolean [UseCommonUI=true] * @param boolean [CancelError=false] */ ShowSelectItems(Device: Device, Intent?: WiaImageIntent, Bias?: WiaImageBias, SingleSelect?: boolean, UseCommonUI?: boolean, CancelError?: boolean): Items | null; /** * Displays a progress dialog box while transferring the specified Item to the local machine. See Item.Transfer for additional information. * @param string [FormatID='{00000000-0000-0000-0000-000000000000}'] * @param boolean [CancelError=false] */ ShowTransfer(Item: Item, FormatID?: string, CancelError?: boolean): ImageFile; } /** The Device object represents an active connection to an imaging device. */ class Device { private constructor(); private 'WIA.Device_typekey': Device; /** A collection of all commands for this imaging device */ readonly Commands: DeviceCommands; /** Returns the DeviceID for this Device */ readonly DeviceID: string; /** A collection of all events for this imaging device */ readonly Events: DeviceEvents; /** * Issues the command specified by CommandID to the imaging device. CommandIDs are device dependent. Valid CommandIDs for this Device are contained in * the Commands collection. */ ExecuteCommand(CommandID: string): Item; /** Returns the Item object specified by ItemID if it exists */ GetItem(ItemID: string): Item; /** A collection of all items for this imaging device */ readonly Items: Items; /** A collection of all properties for this imaging device */ readonly Properties: Properties; /** Returns the Type of Device */ readonly Type: WiaDeviceType; } /** The DeviceCommand object describes a CommandID that can be used when calling ExecuteCommand on a Device or Item object. */ class DeviceCommand { private constructor(); private 'WIA.DeviceCommand_typekey': DeviceCommand; /** Returns the commandID for this Command */ readonly CommandID: string; /** Returns the command Description */ readonly Description: string; /** Returns the command Name */ readonly Name: string; } /** * The DeviceCommands object is a collection of all the supported DeviceCommands for an imaging device. See the Commands property of a Device or Item * object for more details on determining the collection of supported device commands. */ interface DeviceCommands { /** Returns the number of members in the collection */ readonly Count: number; /** Returns the specified item in the collection by position */ Item(Index: number): DeviceCommand; /** Returns the specified item in the collection by position */ (Index: number): DeviceCommand; } /** The DeviceEvent object describes an EventID that can be used when calling RegisterEvent or RegisterPersistentEvent on a DeviceManager object. */ class DeviceEvent { private constructor(); private 'WIA.DeviceEvent_typekey': DeviceEvent; /** Returns the event Description */ readonly Description: string; /** Returns the EventID for this Event */ readonly EventID: string; /** Returns the event Name */ readonly Name: string; /** Returns the Type of this Event */ readonly Type: WiaEventFlag; } /** * The DeviceEvents object is a collection of all the supported DeviceEvent for an imaging device. See the Events property of a Device object for more * details on determining the collection of supported device events. */ interface DeviceEvents { /** Returns the number of members in the collection */ readonly Count: number; /** Returns the specified item in the collection by position */ Item(Index: number): DeviceEvent; /** Returns the specified item in the collection by position */ (Index: number): DeviceEvent; } /** * The DeviceInfo object is a container that describes the unchanging (static) properties of an imaging device that is currently connected to the * computer. */ class DeviceInfo { private constructor(); private 'WIA.DeviceInfo_typekey': DeviceInfo; /** Establish a connection with this device and return a Device object */ Connect(): Device; /** Returns the DeviceID for this Device */ readonly DeviceID: string; /** A collection of all properties for this imaging device that are applicable when the device is not connected */ readonly Properties: Properties; /** Returns the Type of Device */ readonly Type: WiaDeviceType; } /** * The DeviceInfos object is a collection of all the imaging devices currently connected to the computer. See the DeviceInfos property on the * DeviceManager object for detail on accessing the DeviceInfos object. */ interface DeviceInfos { /** Returns the number of members in the collection */ readonly Count: number; /** Returns the specified item in the collection either by position or Device ID */ Item(Index: number | string): DeviceInfo; /** Returns the specified item in the collection either by position or Device ID */ (Index: number | string): DeviceInfo; } /** * The DeviceManager control is an invisible-at-runtime control that manages the imaging devices connected to the computer. A DeviceManager control can * be created using "WIA.DeviceManager" in a call to CreateObject or by dropping a DeviceManager on a form. */ class DeviceManager { private constructor(); private 'WIA.DeviceManager_typekey': DeviceManager; /** A collection of all imaging devices connected to this computer */ readonly DeviceInfos: DeviceInfos; /** * Registers the specified EventID for the specified DeviceID. If DeviceID is "*" then OnEvent will be called whenever the event specified occurs for any * device. Otherwise, OnEvent will only be called if the event specified occurs on the device specified. * @param string [DeviceID='*'] */ RegisterEvent(EventID: string, DeviceID?: string): void; /** * Registers the specified Command to launch when the specified EventID for the specified DeviceID occurs. Command can be either a ClassID or the full * path name and the appropriate command-line arguments needed to invoke the application. * @param string [DeviceID='*'] */ RegisterPersistentEvent(Command: string, Name: string, Description: string, Icon: string, EventID: string, DeviceID?: string): void; /** * Unregisters the specified EventID for the specified DeviceID. UnregisterEvent should only be called for EventID and DeviceID for which you called * RegisterEvent. * @param string [DeviceID='*'] */ UnregisterEvent(EventID: string, DeviceID?: string): void; /** * Unregisters the specified Command for the specified EventID for the specified DeviceID. UnregisterPersistentEvent should only be called for the * Command, Name, Description, Icon, EventID and DeviceID for which you called RegisterPersistentEvent. * @param string [DeviceID='*'] */ UnregisterPersistentEvent(Command: string, Name: string, Description: string, Icon: string, EventID: string, DeviceID?: string): void; } /** * The Filter object represents a unit of modification on an ImageFile. To use a Filter, add it to the Filters collection, then set the filter's * properties and finally use the Apply method of the ImageProcess object to filter an ImageFile. */ class Filter { private constructor(); private 'WIA.Filter_typekey': Filter; /** Returns a Description of what the filter does */ readonly Description: string; /** Returns the FilterID for this Filter */ readonly FilterID: string; /** Returns the Filter Name */ readonly Name: string; /** A collection of all properties for this filter */ readonly Properties: Properties; } /** * The FilterInfo object is a container that describes a Filter object without requiring a Filter to be Added to the process chain. See the FilterInfos * property on the ImageProcess object for details on accessing FilterInfo objects. */ class FilterInfo { private constructor(); private 'WIA.FilterInfo_typekey': FilterInfo; /** Returns a technical Description of what the filter does and how to use it in a filter chain */ readonly Description: string; /** Returns the FilterID for this filter */ readonly FilterID: string; /** Returns the FilterInfo Name */ readonly Name: string; } /** * The FilterInfos object is a collection of all the available FilterInfo objects. See the FilterInfos property on the ImageProcess object for detail on * accessing the FilterInfos object. */ interface FilterInfos { /** Returns the number of members in the collection */ readonly Count: number; /** Returns the specified item in the collection either by position or name */ Item(Index: number | string): FilterInfo; /** Returns the specified item in the collection either by position or name */ (Index: number | string): FilterInfo; } /** The Filters object is a collection of the Filters that will be applied to an ImageFile when you call the Apply method on the ImageProcess object. */ interface Filters { /** * Appends/Inserts a new Filter of the specified FilterID into a Filter collection * @param number [Index=0] */ Add(FilterID: string, Index?: number): void; /** Returns the number of members in the collection */ readonly Count: number; /** Returns the specified item in the collection by position or FilterID */ Item(Index: number): Filter; /** Removes the designated filter */ Remove(Index: number): void; /** Returns the specified item in the collection by position or FilterID */ (Index: number): Filter; } /** * The Formats object is a collection of supported FormatIDs that you can use when calling Transfer on an Item object or ShowTransfer on a CommonDialog * object for this Item. */ interface Formats { /** Returns the number of members in the collection */ readonly Count: number; /** Returns the specified item in the collection by position */ Item(Index: number): string; /** Returns the specified item in the collection by position */ (Index: number): string; } /** * The ImageFile object is a container for images transferred to your computer when you call Transfer or ShowTransfer. It also supports image files * through LoadFile. An ImageFile object can be created using "WIA.ImageFile" in a call to CreateObject. */ class ImageFile { private constructor(); private 'WIA.ImageFile_typekey': ImageFile; /** Returns/Sets the current frame in the image */ ActiveFrame: number; /** Returns the raw image bits as a Vector of Long values */ readonly ARGBData: Vector; /** Returns the raw image file as a Vector of Bytes */ readonly FileData: Vector; /** Returns the file extension for this image file type */ readonly FileExtension: string; /** Returns the FormatID for this file type */ readonly FormatID: string; /** Returns the number of frames in the image */ readonly FrameCount: number; /** Returns the Height of the image in pixels */ readonly Height: number; /** Returns the Horizontal pixels per inch of the image */ readonly HorizontalResolution: number; /** Indicates if the pixel format has an alpha component */ readonly IsAlphaPixelFormat: boolean; /** Indicates whether the image is animated */ readonly IsAnimated: boolean; /** Indicates if the pixel format is extended (16 bits/channel) */ readonly IsExtendedPixelFormat: boolean; /** Indicates if the pixel data is an index into a palette or the actual color data */ readonly IsIndexedPixelFormat: boolean; /** Loads the ImageFile object with the specified File */ LoadFile(Filename: string): void; /** Returns the depth of the pixels of the image in bits per pixel */ readonly PixelDepth: number; /** A collection of all properties for this image */ readonly Properties: Properties; /** Save the ImageFile object to the specified File */ SaveFile(Filename: string): void; /** Returns the Vertical pixels per inch of the image */ readonly VerticalResolution: number; /** Returns the Width of the image in pixels */ readonly Width: number; } /** The ImageProcess object manages the filter chain. An ImageProcess object can be created using "WIA.ImageProcess" in a call to CreateObject. */ class ImageProcess { private constructor(); private 'WIA.ImageProcess_typekey': ImageProcess; /** Takes the specified ImageFile and returns the new ImageFile with all the filters applied on success */ Apply(Source: ImageFile): ImageFile; /** A collection of all available filters */ readonly FilterInfos: FilterInfos; /** A collection of the filters to be applied in this process */ readonly Filters: Filters; } /** * The Item object is a container for an item on an imaging device object. See the Items property on the Device or Item object for details on accessing * Item objects. */ class Item { private constructor(); private 'WIA.Item_typekey': Item; /** A collection of all commands for this item */ readonly Commands: DeviceCommands; /** Issues the command specified by CommandID. CommandIDs are device dependent. Valid CommandIDs for this Item are contained in the Commands collection. */ ExecuteCommand(CommandID: string): Item; /** A collection of all supported format types for this item */ readonly Formats: Formats; /** Returns the ItemID for this Item */ readonly ItemID: string; /** A collection of all child items for this item */ readonly Items: Items; /** A collection of all properties for this item */ readonly Properties: Properties; /** * Returns an ImageFile object, in this version, in the format specified in FormatID if supported, otherwise using the preferred format for this imaging * device. Future versions may return a collection of ImageFile objects. * @param string [FormatID='{00000000-0000-0000-0000-000000000000}'] */ Transfer(FormatID?: string): ImageFile; } /** The Items object contains a collection of Item objects. See the Items property on the Device or Item object for details on accessing the Items object. */ // tslint:disable-next-line interface-name interface Items { /** Adds a new Item with the specified Name and Flags. The Flags value is created by using the OR operation with members of the WiaItemFlags enumeration. */ Add(Name: string, Flags: number): void; /** Returns the number of members in the collection */ readonly Count: number; /** Returns the specified item in the collection by position */ Item(Index: number): Item; /** Removes the designated Item */ Remove(Index: number): void; /** Returns the specified item in the collection by position */ (Index: number): Item; } /** * The Properties object is a collection of all the Property objects associated with a given Device, DeviceInfo, Filter, ImageFile or Item object. See * the Properties property on any of these objects for detail on accessing the Properties object. */ interface Properties { /** Returns the number of members in the collection */ readonly Count: number; /** Indicates whether the specified Property exists in the collection */ Exists(Index: number | string): boolean; /** Returns the specified item in the collection either by position or name. */ Item(Index: number | string): Property; /** Returns the specified item in the collection either by position or name. */ (Index: number | string): Property; } /** * The Property object is a container for a property associated with a Device, DeviceInfo, Filter, ImageFile or Item object. See the Properties property * on any of these objects for details on accessing Property objects. */ class Property { private constructor(); private 'WIA.Property_typekey': Property; /** Indicates whether the Property Value is read only */ readonly IsReadOnly: boolean; /** Indicates whether the Property Value is a vector */ readonly IsVector: boolean; /** Returns the Property Name */ readonly Name: string; /** Returns the PropertyID of this Property */ readonly PropertyID: number; /** Returns the SubType of the Property, if any */ readonly SubType: WiaSubType; /** Returns the default Property Value if the SubType is not UnspecifiedSubType */ readonly SubTypeDefault: any; /** Returns the maximum valid Property Value if the SubType is RangeSubType */ readonly SubTypeMax: number; /** Returns the minimum valid Property Value if the SubType is RangeSubType */ readonly SubTypeMin: number; /** Returns the step increment of Property Values if the SubType is RangeSubType */ readonly SubTypeStep: number; /** Returns a Vector of valid Property Values if the SubType is ListSubType or valid flag Values that can be ored together if the SubType is FlagSubType */ readonly SubTypeValues: Vector; /** Returns either a WiaPropertyType or a WiaImagePropertyType */ readonly Type: number; /** Returns/Sets the Property Value */ Value: any; } /** * The Rational object is a container for the rational values found in Exif tags. It is a supported element type of the Vector object and may be created * using "WIA.Rational" in a call to CreateObject. */ class Rational { private constructor(); private 'WIA.Rational_typekey': Rational; /** Returns/Sets the Rational Value Denominator */ Denominator: number; /** Returns/Sets the Rational Value Numerator */ Numerator: number; /** Returns the Rational Value as a Double */ readonly Value: number; } /** * The Vector object is a collection of values of the same type. It is used throughout the library in many different ways. The Vector object may be * created using "WIA.Vector" in a call to CreateObject. */ interface Vector<TItem = any> { /** * If Index is not zero, Inserts a new element into the Vector collection before the specified Index. If Index is zero, Appends a new element to the * Vector collection. * @param number [Index=0] */ Add(Value: TItem, Index?: number): void; /** Returns/Sets the Vector of Bytes as an array of bytes */ BinaryData: SafeArray; /** Removes all elements. */ Clear(): void; /** Returns the number of members in the vector */ readonly Count: number; /** Returns/Sets the Vector of Integers from a Date */ Date: VarDate; /** * Used to get the Thumbnail property of an ImageFile which is an image file, The thumbnail property of an Item which is RGB data, or creating an * ImageFile from raw ARGB data. Returns an ImageFile object on success. See the Picture method for more details. * @param number [Width=0] * @param number [Height=0] */ ImageFile(Width?: number, Height?: number): ImageFile; /** Returns the specified item in the vector by position */ Item(Index: number): TItem; /** * If the Vector of Bytes contains an image file, then Width and Height are ignored. Otherwise a Vector of Bytes must be RGB data and a Vector of Longs * must be ARGB data. Returns a Picture object on success. See the ImageFile method for more details. * @param number [Width=0] * @param number [Height=0] */ Picture(Width?: number, Height?: number): any; /** Removes the designated element and returns it if successful */ Remove(Index: number): TItem | null; /** * Stores the string Value into the Vector of Bytes including the NULL terminator. Value may be truncated unless Resizable is True. The string will be * stored as an ANSI string unless Unicode is True, in which case it will be stored as a Unicode string. * @param boolean [Resizable=true] * @param boolean [Unicode=true] */ SetFromString(Value: string, Resizable?: boolean, Unicode?: boolean): void; /** * Returns a Vector of Bytes as a String * @param boolean [Unicode=true] */ String(Unicode?: boolean): string; /** Returns the specified item in the vector by position */ (Index: number): TItem; } } interface ActiveXObject { on(obj: WIA.DeviceManager, event: 'OnEvent', argNames: ['EventID', 'DeviceID', 'ItemID'], handler: ( this: WIA.DeviceManager, parameter: { readonly EventID: string, readonly DeviceID: string, readonly ItemID: string }) => void): void; set<TItem>(obj: WIA.Vector<TItem>, propertyName: 'Item', parameterTypes: [number], newValue: TItem): void; } interface ActiveXObjectNameMap { 'WIA.CommonDialog': WIA.CommonDialog; 'WIA.DeviceManager': WIA.DeviceManager; 'WIA.ImageFile': WIA.ImageFile; 'WIA.ImageProcess': WIA.ImageProcess; 'WIA.Rational': WIA.Rational; 'WIA.Vector': WIA.Vector; }
the_stack
import type { RegExpVisitor } from "regexpp/visitor" import type { CapturingGroup, Element } from "regexpp/ast" import type { RegExpContext } from "../utils" import { createRule, defineRegexpVisitor } from "../utils" import { createTypeTracker } from "../utils/type-tracker" import type { KnownMethodCall, ReferenceElement } from "../utils/ast-utils" import { getParent, parseReplacements, getStaticValue, extractExpressionReferences, isKnownMethodCall, } from "../utils/ast-utils" import type { PatternReplaceRange } from "../utils/ast-utils/pattern-source" import type { Expression, Literal } from "estree" import type { Rule } from "eslint" import { mention } from "../utils/mention" import { getFirstConsumedCharPlusAfter, getPossiblyConsumedChar, } from "../utils/regexp-ast" import { getLengthRange, isZeroLength, FirstConsumedChars, } from "regexp-ast-analysis" import type { CharSet } from "refa" type ReplaceReference = { ref: string | number; range?: [number, number] } type ReplaceReferences = { // Reference at the starting position. // e.g. // '$1str$2' -> { ref: 1, range: [0,2] } // '$<foo>str$<bar>' -> { ref: 'foo', range: [0,6] } // 'str$1' -> null // 'str$1str$2' -> null startRef: ReplaceReference | null // Reference at the ending position. // e.g. // '$1str$2' -> { ref: 2, range: [5,7] } // '$<foo>str$<bar>' -> { ref: 'bar', range: [9,15] } // '$1str' -> null // '$1str$2str' -> null endRef: ReplaceReference | null // All references including the above. allRefs: ReplaceReference[] } /** * Holds all replacement reference data. * * If the same RegExp instance is used for replacement in 2 places, the number of data in `list` is 2. */ class ReplaceReferencesList { private readonly list: ReplaceReferences[] /** Reference name at the starting position. */ public readonly startRefName?: string | number /** Reference name at the ending position. */ public readonly endRefName?: string | number /** All reference names except at the starting position. */ public readonly otherThanStartRefNames: Set<string | number> /** All reference names except at the starting position. */ public readonly otherThanEndRefNames: Set<string | number> public constructor(list: ReplaceReferences[]) { this.list = list this.startRefName = list[0].startRef?.ref this.endRefName = list[0].endRef?.ref const otherThanStartRefNames = new Set<string | number>() const otherThanEndRefNames = new Set<string | number>() for (const { startRef, endRef, allRefs } of this.list) { for (const ref of allRefs) { if (ref !== startRef) { otherThanStartRefNames.add(ref.ref) } if (ref !== endRef) { otherThanEndRefNames.add(ref.ref) } } } this.otherThanStartRefNames = otherThanStartRefNames this.otherThanEndRefNames = otherThanEndRefNames } public *[Symbol.iterator](): Iterator<ReplaceReferences> { yield* this.list } } const enum SideEffect { startRef, endRef, } /** * Gets the type of side effect when replacing the capture group for the given element. * * There are no side effects if the following conditions are met: * * - Some elements other than the start capturing group have disjoints to the start capturing group. * - The last element and the start consume character have disjoint. */ function getSideEffectsWhenReplacingCapturingGroup( elements: readonly Element[], start: CapturingGroup | undefined, end: CapturingGroup | undefined, { flags }: RegExpContext, ): Set<SideEffect> { const result = new Set<SideEffect>() if (start) { const { char } = getPossiblyConsumedChar(start, flags) if (!hasDisjoint(char, elements.slice(1))) { result.add(SideEffect.startRef) } else { const last = elements[elements.length - 1] const lastChar = FirstConsumedChars.toLook( getFirstConsumedCharPlusAfter(last, "rtl", flags), ) if (!lastChar.char.isDisjointWith(char)) { result.add(SideEffect.startRef) } } } if (end && flags.global) { const first = elements[0] if (first) { const { char } = getPossiblyConsumedChar(end, flags) const firstChar = FirstConsumedChars.toLook( getFirstConsumedCharPlusAfter(first, "ltr", flags), ) if (!firstChar.char.isDisjointWith(char)) { result.add(SideEffect.endRef) } } } return result /** Checks whether the given target element has disjoint in elements. */ function hasDisjoint(target: CharSet, targetElements: Element[]) { for (const element of targetElements) { if (isConstantLength(element)) { const elementChars = getPossiblyConsumedChar(element, flags) if (elementChars.char.isEmpty) { continue } if (elementChars.char.isDisjointWith(target)) { return true } } else { const elementLook = FirstConsumedChars.toLook( getFirstConsumedCharPlusAfter(element, "ltr", flags), ) return elementLook.char.isDisjointWith(target) } } return false } /** Checks whether the given element is constant length. */ function isConstantLength(target: Element): boolean { const range = getLengthRange(target) return Boolean(range && range.min === range.max) } } /** * Parse option */ function parseOption( userOption: | { strictTypes?: boolean } | undefined, ) { let strictTypes = true if (userOption) { if (userOption.strictTypes != null) { strictTypes = userOption.strictTypes } } return { strictTypes, } } export default createRule("prefer-lookaround", { meta: { docs: { description: "prefer lookarounds over capturing group that do not replace", category: "Stylistic Issues", recommended: false, }, fixable: "code", schema: [ { type: "object", properties: { strictTypes: { type: "boolean" }, }, additionalProperties: false, }, ], messages: { preferLookarounds: "These capturing groups can be replaced with lookaround assertions ({{expr1}} and {{expr2}}).", prefer: "This capturing group can be replaced with a {{kind}} ({{expr}}).", }, type: "suggestion", }, create(context) { const { strictTypes } = parseOption(context.options[0]) const typeTracer = createTypeTracker(context) /** * Create visitor */ function createVisitor( regexpContext: RegExpContext, ): RegExpVisitor.Handlers { const { regexpNode, patternAst } = regexpContext if ( patternAst.alternatives.length > 1 || patternAst.alternatives[0].elements.length < 2 ) { return {} } const replaceReferenceList: ReplaceReferences[] = [] for (const ref of extractExpressionReferences( regexpNode, context, )) { if (ref.type === "argument") { if ( !isKnownMethodCall(ref.callExpression, { replace: 2, replaceAll: 2, }) ) { // Calls other than replace. return {} } const replaceReference = getReplaceReferenceFromCallExpression( ref.callExpression, ) if (!replaceReference) { // Unknown call or replacement where lookarounds cannot be used. return {} } replaceReferenceList.push(replaceReference) } else if (ref.type === "member") { const parent = getParent(ref.memberExpression) if ( parent?.type === "CallExpression" && isKnownMethodCall(parent, { test: 1, }) && !regexpContext.flags.global ) { // Using it in the `test` method has no effect on rewriting the regex. continue } // Cannot trace. return {} } else { // Cannot trace. return {} } } if (!replaceReferenceList.length) { // No pattern is used. return {} } const replaceReference = replaceReferenceList[0] if ( replaceReferenceList.some( (target) => target.startRef?.ref !== replaceReference.startRef?.ref || target.endRef?.ref !== replaceReference.endRef?.ref, ) ) { // Lookaround cannot be used as it is used in various replacements. return {} } return createVerifyVisitor( regexpContext, new ReplaceReferencesList(replaceReferenceList), ) } /** * Get the replace reference info from given call expression */ function getReplaceReferenceFromCallExpression( node: KnownMethodCall, ): ReplaceReferences | null { if ( strictTypes ? !typeTracer.isString(node.callee.object) : !typeTracer.maybeString(node.callee.object) ) { // The callee object is not a string. return null } const replacementNode = node.arguments[1] if (replacementNode.type === "Literal") { return getReplaceReferenceFromLiteralReplacementArgument( replacementNode, ) } return getReplaceReferenceFromNonLiteralReplacementArgument( replacementNode, ) } /** * Get the replace reference info from given literal replacement argument */ function getReplaceReferenceFromLiteralReplacementArgument( node: Literal, ): ReplaceReferences | null { if (typeof node.value !== "string") { return null } const replacements = parseReplacements(context, node) let startRef: ReplaceReference | null = null let endRef: ReplaceReference | null = null const start = replacements[0] if (start?.type === "ReferenceElement") { startRef = start } const end = replacements[replacements.length - 1] if (end?.type === "ReferenceElement") { endRef = end } if (!startRef && !endRef) { // Not using a capturing group at start or end. return null } return { startRef, endRef, allRefs: replacements.filter( (e): e is ReferenceElement => e.type === "ReferenceElement", ), } } /** * Get the replace reference info from given non-literal replacement argument */ function getReplaceReferenceFromNonLiteralReplacementArgument( node: Expression, ): ReplaceReferences | null { const evaluated = getStaticValue(context, node) if (!evaluated || typeof evaluated.value !== "string") { // The replacement string cannot be determined. return null } const refRegex = /\$(?<ref>[1-9]\d*|<(?<named>[^>]+)>)/gu const allRefs: ReplaceReference[] = [] let startRef: ReplaceReference | null = null let endRef: ReplaceReference | null = null let re while ((re = refRegex.exec(evaluated.value))) { const ref = { ref: re.groups!.named ? re.groups!.named : Number(re.groups!.ref), } if (re.index === 0) { startRef = ref } if (refRegex.lastIndex === evaluated.value.length) { endRef = ref } allRefs.push(ref) } if (!startRef && !endRef) { // Not using a capturing group at start or end. return null } return { startRef, endRef, allRefs, } } /** * Create visitor for verify capturing groups */ function createVerifyVisitor( regexpContext: RegExpContext, replaceReferenceList: ReplaceReferencesList, ): RegExpVisitor.Handlers { type RefState = { capturingGroups: CapturingGroup[] isUseOther?: boolean capturingNum: number } const startRefState: RefState = { capturingGroups: [], capturingNum: -1, } const endRefState: RefState = { capturingGroups: [], capturingNum: -1, } let refNum = 0 return { onCapturingGroupEnter(cgNode) { refNum++ processForState( replaceReferenceList.startRefName, replaceReferenceList.otherThanStartRefNames, startRefState, ) processForState( replaceReferenceList.endRefName, replaceReferenceList.otherThanEndRefNames, endRefState, ) /** Process state */ function processForState( refName: string | number | undefined, otherThanRefNames: Set<string | number>, state: RefState, ) { if (refName === refNum || refName === cgNode.name) { state.capturingGroups.push(cgNode) state.capturingNum = refNum // Flags the capturing group referenced in `refName` if it is also referenced elsewhere. state.isUseOther ||= Boolean( otherThanRefNames.has(refNum) || (cgNode.name && otherThanRefNames.has(cgNode.name)), ) } } }, onPatternLeave(pNode) { // verify const alt = pNode.alternatives[0] let reportStart = null if ( !startRefState.isUseOther && startRefState.capturingGroups.length === 1 && // It will not be referenced from more than one, but check it just in case. startRefState.capturingGroups[0] === alt.elements[0] && !isZeroLength(startRefState.capturingGroups[0]) ) { const capturingGroup = startRefState.capturingGroups[0] reportStart = { capturingGroup, expr: `(?<=${capturingGroup.alternatives .map((a) => a.raw) .join("|")})`, } } let reportEnd = null if ( !endRefState.isUseOther && endRefState.capturingGroups.length === 1 && // It will not be referenced from more than one, but check it just in case. endRefState.capturingGroups[0] === alt.elements[alt.elements.length - 1] && !isZeroLength(endRefState.capturingGroups[0]) ) { const capturingGroup = endRefState.capturingGroups[0] reportEnd = { capturingGroup, expr: `(?=${capturingGroup.alternatives .map((a) => a.raw) .join("|")})`, } } const sideEffects = getSideEffectsWhenReplacingCapturingGroup( alt.elements, reportStart?.capturingGroup, reportEnd?.capturingGroup, regexpContext, ) if (sideEffects.has(SideEffect.startRef)) { reportStart = null } if (sideEffects.has(SideEffect.endRef)) { reportEnd = null } if (reportStart && reportEnd) { const fix = buildFixer( regexpContext, [reportStart, reportEnd], replaceReferenceList, (target) => { if ( target.allRefs.some( (ref) => ref !== target.startRef && ref !== target.endRef, ) ) { // If the capturing group is used for something other than the replacement refs, it cannot be fixed. return null } return [ target.startRef?.range, target.endRef?.range, ] }, ) for (const report of [reportStart, reportEnd]) { context.report({ loc: regexpContext.getRegexpLocation( report.capturingGroup, ), messageId: "preferLookarounds", data: { expr1: mention(reportStart.expr), expr2: mention(reportEnd.expr), }, fix, }) } } else if (reportStart) { const fix = buildFixer( regexpContext, [reportStart], replaceReferenceList, (target) => { if ( target.allRefs.some( (ref) => ref !== target.startRef, ) ) { // If the capturing group is used for something other than the replacement refs, it cannot be fixed. return null } return [target.startRef?.range] }, ) context.report({ loc: regexpContext.getRegexpLocation( reportStart.capturingGroup, ), messageId: "prefer", data: { kind: "lookbehind assertion", expr: mention(reportStart.expr), }, fix, }) } else if (reportEnd) { const fix = buildFixer( regexpContext, [reportEnd], replaceReferenceList, (target) => { if ( target.allRefs.some((ref) => { if ( ref === target.endRef || typeof ref.ref !== "number" ) { return false } return ( endRefState.capturingNum <= ref.ref ) }) ) { // If the capturing group with a large num is used, it cannot be fixed. return null } return [target.endRef?.range] }, ) context.report({ loc: regexpContext.getRegexpLocation( reportEnd.capturingGroup, ), messageId: "prefer", data: { kind: "lookahead assertion", expr: mention(reportEnd.expr), }, fix, }) } }, } } /** * Build fixer function */ function buildFixer( regexpContext: RegExpContext, replaceCapturingGroups: { capturingGroup: CapturingGroup expr: string }[], replaceReferenceList: ReplaceReferencesList, getRemoveRanges: ( replaceReference: ReplaceReferences, ) => Iterable<[number, number] | undefined> | null, ): ((fixer: Rule.RuleFixer) => Rule.Fix[]) | null { const removeRanges: [number, number][] = [] for (const replaceReference of replaceReferenceList) { const targetRemoveRanges = getRemoveRanges(replaceReference) if (!targetRemoveRanges) { return null } for (const range of targetRemoveRanges) { if (!range) { return null } removeRanges.push(range) } } const replaces: { replaceRange: PatternReplaceRange expr: string }[] = [] for (const { capturingGroup, expr } of replaceCapturingGroups) { const replaceRange = regexpContext.patternSource.getReplaceRange(capturingGroup) if (!replaceRange) { return null } replaces.push({ replaceRange, expr, }) } return (fixer) => { const list: { offset: number; fix: () => Rule.Fix }[] = [] for (const removeRange of removeRanges) { list.push({ offset: removeRange[0], fix: () => fixer.removeRange(removeRange), }) } for (const { replaceRange, expr } of replaces) { list.push({ offset: replaceRange.range[0], fix: () => replaceRange.replace(fixer, expr), }) } return list .sort((a, b) => a.offset - b.offset) .map((item) => item.fix()) } } return defineRegexpVisitor(context, { createVisitor, }) }, })
the_stack
import { BehaviorSubject, Observable, Subject, } from "rxjs"; import { bufferCount, distinctUntilChanged, filter, first, map, pairwise, publishReplay, refCount, scan, share, startWith, switchMap, tap, withLatestFrom, } from "rxjs/operators"; import { FrameGenerator } from "./FrameGenerator"; import { State } from "./State"; import { StateContext } from "./StateContext"; import { TransitionMode } from "./TransitionMode"; import { AnimationFrame } from "./interfaces/AnimationFrame"; import { EulerRotation } from "./interfaces/EulerRotation"; import { IStateContext } from "./interfaces/IStateContext"; import { LngLat } from "../api/interfaces/LngLat"; import { Camera } from "../geo/Camera"; import { Image } from "../graph/Image"; import { Transform } from "../geo/Transform"; import { LngLatAlt } from "../api/interfaces/LngLatAlt"; import { SubscriptionHolder } from "../util/SubscriptionHolder"; interface IContextOperation { (context: IStateContext): IStateContext; } export class StateService { private _start$: Subject<void>; private _frame$: Subject<number>; private _contextOperation$: BehaviorSubject<IContextOperation>; private _context$: Observable<IStateContext>; private _fps$: Observable<number>; private _state$: Observable<State>; private _currentState$: Observable<AnimationFrame>; private _lastState$: Observable<AnimationFrame>; private _currentImage$: Observable<Image>; private _currentImageExternal$: Observable<Image>; private _currentCamera$: Observable<Camera>; private _currentId$: BehaviorSubject<string>; private _currentTransform$: Observable<Transform>; private _reference$: Observable<LngLatAlt>; private _inMotionOperation$: Subject<boolean>; private _inMotion$: Observable<boolean>; private _inTranslationOperation$: Subject<boolean>; private _inTranslation$: Observable<boolean>; private _appendImage$: Subject<Image> = new Subject<Image>(); private _frameGenerator: FrameGenerator; private _frameId: number; private _fpsSampleRate: number; private _subscriptions: SubscriptionHolder = new SubscriptionHolder(); constructor( initialState: State, transitionMode?: TransitionMode) { const subs = this._subscriptions; this._start$ = new Subject<void>(); this._frame$ = new Subject<number>(); this._fpsSampleRate = 30; this._contextOperation$ = new BehaviorSubject<IContextOperation>( (context: IStateContext): IStateContext => { return context; }); this._context$ = this._contextOperation$.pipe( scan( (context: IStateContext, operation: IContextOperation): IStateContext => { return operation(context); }, new StateContext(initialState, transitionMode)), publishReplay(1), refCount()); this._state$ = this._context$.pipe( map( (context: IStateContext): State => { return context.state; }), distinctUntilChanged(), publishReplay(1), refCount()); this._fps$ = this._start$.pipe( switchMap( (): Observable<number> => { return this._frame$.pipe( bufferCount(1, this._fpsSampleRate), map( (): number => { return new Date().getTime(); }), pairwise(), map( (times: [number, number]): number => { return Math.max(20, 1000 * this._fpsSampleRate / (times[1] - times[0])); }), startWith(60)); }), share()); this._currentState$ = this._frame$.pipe( withLatestFrom( this._fps$, this._context$, (frameId: number, fps: number, context: IStateContext): [number, number, IStateContext] => { return [frameId, fps, context]; }), filter( (fc: [number, number, IStateContext]): boolean => { return fc[2].currentImage != null; }), tap( (fc: [number, number, IStateContext]): void => { fc[2].update(fc[1]); }), map( (fc: [number, number, IStateContext]): AnimationFrame => { return { fps: fc[1], id: fc[0], state: fc[2] }; }), share()); this._lastState$ = this._currentState$.pipe( publishReplay(1), refCount()); let imageChanged$ = this._currentState$.pipe( distinctUntilChanged( undefined, (f: AnimationFrame): string => { return f.state.currentImage.id; }), publishReplay(1), refCount()); let imageChangedSubject$ = new Subject<AnimationFrame>(); subs.push(imageChanged$ .subscribe(imageChangedSubject$)); this._currentId$ = new BehaviorSubject<string>(null); subs.push(imageChangedSubject$.pipe( map( (f: AnimationFrame): string => { return f.state.currentImage.id; })) .subscribe(this._currentId$)); this._currentImage$ = imageChangedSubject$.pipe( map( (f: AnimationFrame): Image => { return f.state.currentImage; }), publishReplay(1), refCount()); this._currentCamera$ = imageChangedSubject$.pipe( map( (f: AnimationFrame): Camera => { return f.state.currentCamera; }), publishReplay(1), refCount()); this._currentTransform$ = imageChangedSubject$.pipe( map( (f: AnimationFrame): Transform => { return f.state.currentTransform; }), publishReplay(1), refCount()); this._reference$ = imageChangedSubject$.pipe( map( (f: AnimationFrame): LngLatAlt => { return f.state.reference; }), distinctUntilChanged( (r1: LngLat, r2: LngLat): boolean => { return r1.lat === r2.lat && r1.lng === r2.lng; }, (reference: LngLatAlt): LngLat => { return { lat: reference.lat, lng: reference.lng }; }), publishReplay(1), refCount()); this._currentImageExternal$ = imageChanged$.pipe( map( (f: AnimationFrame): Image => { return f.state.currentImage; }), publishReplay(1), refCount()); subs.push(this._appendImage$.pipe( map( (image: Image) => { return (context: IStateContext): IStateContext => { context.append([image]); return context; }; })) .subscribe(this._contextOperation$)); this._inMotionOperation$ = new Subject<boolean>(); subs.push(imageChanged$.pipe( map( (): boolean => { return true; })) .subscribe(this._inMotionOperation$)); subs.push(this._inMotionOperation$.pipe( distinctUntilChanged(), filter( (moving: boolean): boolean => { return moving; }), switchMap( (): Observable<boolean> => { return this._currentState$.pipe( filter( (frame: AnimationFrame): boolean => { return frame.state.imagesAhead === 0; }), map( (frame: AnimationFrame): [Camera, number] => { return [frame.state.camera.clone(), frame.state.zoom]; }), pairwise(), map( (pair: [[Camera, number], [Camera, number]]): boolean => { let c1: Camera = pair[0][0]; let c2: Camera = pair[1][0]; let z1: number = pair[0][1]; let z2: number = pair[1][1]; return c1.diff(c2) > 1e-5 || Math.abs(z1 - z2) > 1e-5; }), first( (changed: boolean): boolean => { return !changed; })); })) .subscribe(this._inMotionOperation$)); this._inMotion$ = this._inMotionOperation$.pipe( distinctUntilChanged(), publishReplay(1), refCount()); this._inTranslationOperation$ = new Subject<boolean>(); subs.push(imageChanged$.pipe( map( (): boolean => { return true; })) .subscribe(this._inTranslationOperation$)); subs.push(this._inTranslationOperation$.pipe( distinctUntilChanged(), filter( (inTranslation: boolean): boolean => { return inTranslation; }), switchMap( (): Observable<boolean> => { return this._currentState$.pipe( filter( (frame: AnimationFrame): boolean => { return frame.state.imagesAhead === 0; }), map( (frame: AnimationFrame): THREE.Vector3 => { return frame.state.camera.position.clone(); }), pairwise(), map( (pair: [THREE.Vector3, THREE.Vector3]): boolean => { return pair[0].distanceToSquared(pair[1]) !== 0; }), first( (changed: boolean): boolean => { return !changed; })); })) .subscribe(this._inTranslationOperation$)); this._inTranslation$ = this._inTranslationOperation$.pipe( distinctUntilChanged(), publishReplay(1), refCount()); subs.push(this._state$.subscribe(() => { /*noop*/ })); subs.push(this._currentImage$.subscribe(() => { /*noop*/ })); subs.push(this._currentCamera$.subscribe(() => { /*noop*/ })); subs.push(this._currentTransform$.subscribe(() => { /*noop*/ })); subs.push(this._reference$.subscribe(() => { /*noop*/ })); subs.push(this._currentImageExternal$.subscribe(() => { /*noop*/ })); subs.push(this._lastState$.subscribe(() => { /*noop*/ })); subs.push(this._inMotion$.subscribe(() => { /*noop*/ })); subs.push(this._inTranslation$.subscribe(() => { /*noop*/ })); this._frameId = null; this._frameGenerator = new FrameGenerator(window); } public get currentState$(): Observable<AnimationFrame> { return this._currentState$; } public get currentImage$(): Observable<Image> { return this._currentImage$; } public get currentId$(): Observable<string> { return this._currentId$; } public get currentImageExternal$(): Observable<Image> { return this._currentImageExternal$; } public get currentCamera$(): Observable<Camera> { return this._currentCamera$; } public get currentTransform$(): Observable<Transform> { return this._currentTransform$; } public get state$(): Observable<State> { return this._state$; } public get reference$(): Observable<LngLatAlt> { return this._reference$; } public get inMotion$(): Observable<boolean> { return this._inMotion$; } public get inTranslation$(): Observable<boolean> { return this._inTranslation$; } public get appendImage$(): Subject<Image> { return this._appendImage$; } public dispose(): void { this.stop(); this._subscriptions.unsubscribe(); } public custom(): void { this._inMotionOperation$.next(true); this._invokeContextOperation((context: IStateContext) => { context.custom(); }); } public earth(): void { this._inMotionOperation$.next(true); this._invokeContextOperation((context: IStateContext) => { context.earth(); }); } public traverse(): void { this._inMotionOperation$.next(true); this._invokeContextOperation((context: IStateContext) => { context.traverse(); }); } public wait(): void { this._invokeContextOperation((context: IStateContext) => { context.wait(); }); } public waitInteractively(): void { this._invokeContextOperation((context: IStateContext) => { context.waitInteractively(); }); } public appendImagess(images: Image[]): void { this._invokeContextOperation((context: IStateContext) => { context.append(images); }); } public prependImages(images: Image[]): void { this._invokeContextOperation((context: IStateContext) => { context.prepend(images); }); } public removeImages(n: number): void { this._invokeContextOperation((context: IStateContext) => { context.remove(n); }); } public clearImages(): void { this._invokeContextOperation((context: IStateContext) => { context.clear(); }); } public clearPriorImages(): void { this._invokeContextOperation((context: IStateContext) => { context.clearPrior(); }); } public cutImages(): void { this._invokeContextOperation((context: IStateContext) => { context.cut(); }); } public setImages(images: Image[]): void { this._invokeContextOperation((context: IStateContext) => { context.set(images); }); } public setViewMatrix(matrix: number[]): void { this._inMotionOperation$.next(true); this._invokeContextOperation((context: IStateContext) => { context.setViewMatrix(matrix); }); } public rotate(delta: EulerRotation): void { this._inMotionOperation$.next(true); this._invokeContextOperation((context: IStateContext) => { context.rotate(delta); }); } public rotateUnbounded(delta: EulerRotation): void { this._inMotionOperation$.next(true); this._invokeContextOperation((context: IStateContext) => { context.rotateUnbounded(delta); }); } public rotateWithoutInertia(delta: EulerRotation): void { this._inMotionOperation$.next(true); this._invokeContextOperation((context: IStateContext) => { context.rotateWithoutInertia(delta); }); } public rotateBasic(basicRotation: number[]): void { this._inMotionOperation$.next(true); this._invokeContextOperation((context: IStateContext) => { context.rotateBasic(basicRotation); }); } public rotateBasicUnbounded(basicRotation: number[]): void { this._inMotionOperation$.next(true); this._invokeContextOperation((context: IStateContext) => { context.rotateBasicUnbounded(basicRotation); }); } public rotateBasicWithoutInertia(basicRotation: number[]): void { this._inMotionOperation$.next(true); this._invokeContextOperation((context: IStateContext) => { context.rotateBasicWithoutInertia(basicRotation); }); } public rotateToBasic(basic: number[]): void { this._inMotionOperation$.next(true); this._invokeContextOperation((context: IStateContext) => { context.rotateToBasic(basic); }); } public move(delta: number): void { this._inMotionOperation$.next(true); this._invokeContextOperation((context: IStateContext) => { context.move(delta); }); } public moveTo(position: number): void { this._inMotionOperation$.next(true); this._invokeContextOperation((context: IStateContext) => { context.moveTo(position); }); } public dolly(delta: number): void { this._inMotionOperation$.next(true); this._invokeContextOperation((context: IStateContext) => { context.dolly(delta); }); } public orbit(rotation: EulerRotation): void { this._inMotionOperation$.next(true); this._invokeContextOperation((context: IStateContext) => { context.orbit(rotation); }); } public truck(direction: number[]): void { this._inMotionOperation$.next(true); this._invokeContextOperation((context: IStateContext) => { context.truck(direction); }); } /** * Change zoom level while keeping the reference point position approximately static. * * @parameter {number} delta - Change in zoom level. * @parameter {Array<number>} reference - Reference point in basic coordinates. */ public zoomIn(delta: number, reference: number[]): void { this._inMotionOperation$.next(true); this._invokeContextOperation((context: IStateContext) => { context.zoomIn(delta, reference); }); } public getCenter(): Observable<number[]> { return this._lastState$.pipe( first(), map( (frame: AnimationFrame): number[] => { return (<IStateContext>frame.state).getCenter(); })); } public getZoom(): Observable<number> { return this._lastState$.pipe( first(), map( (frame: AnimationFrame): number => { return frame.state.zoom; })); } public setCenter(center: number[]): void { this._inMotionOperation$.next(true); this._invokeContextOperation((context: IStateContext) => { context.setCenter(center); }); } public setSpeed(speed: number): void { this._invokeContextOperation((context: IStateContext) => { context.setSpeed(speed); }); } public setTransitionMode(mode: TransitionMode): void { this._invokeContextOperation((context: IStateContext) => { context.setTransitionMode(mode); }); } public setZoom(zoom: number): void { this._inMotionOperation$.next(true); this._invokeContextOperation((context: IStateContext) => { context.setZoom(zoom); }); } public start(): void { if (this._frameId == null) { this._start$.next(null); this._frameId = this._frameGenerator.requestAnimationFrame(this._frame.bind(this)); this._frame$.next(this._frameId); } } public stop(): void { if (this._frameId != null) { this._frameGenerator.cancelAnimationFrame(this._frameId); this._frameId = null; } } private _invokeContextOperation(action: (context: IStateContext) => void): void { this._contextOperation$ .next( (context: IStateContext): IStateContext => { action(context); return context; }); } private _frame(): void { this._frameId = this._frameGenerator.requestAnimationFrame(this._frame.bind(this)); this._frame$.next(this._frameId); } }
the_stack
import { Accessory, AccessoryEventTypes, Bridge, Categories, Characteristic, CharacteristicEventTypes, Controller, ControllerIdentifier, ControllerServiceMap, Service, uuid } from '..'; class TestController implements Controller { controllerId(): ControllerIdentifier { return "test-id"; } constructServices(): ControllerServiceMap { const lightService = new Service.Lightbulb('', ''); const switchService = new Service.Switch('', ''); return { light: lightService, switch: switchService, }; } initWithServices(serviceMap: ControllerServiceMap): void | ControllerServiceMap { // serviceMap will be altered here to test update procedure delete serviceMap["switch"]; serviceMap.light = new Service.LightSensor('', ''); serviceMap.outlet = new Service.Outlet('', ''); return serviceMap; } configureServices(): void {} handleControllerRemoved(): void { } } describe('Accessory', () => { describe('#constructor()', () => { it('should identify itself with a valid UUID', () => { const accessory = new Accessory('Test', uuid.generate('Foo')); const VALUE = true; accessory.getService(Service.AccessoryInformation)! .getCharacteristic(Characteristic.Identify)! .on(CharacteristicEventTypes.SET, (value: any, callback: any) => { expect(value).toEqual(VALUE); }); }); it('should fail to load with no display name', () => { expect(() => { new Accessory('', ''); }).toThrow('non-empty displayName'); }); it('should fail to load with no UUID', () => { expect(() => { new Accessory('Test', ''); }).toThrow('valid UUID'); }); it('should fail to load with an invalid UUID', () => { expect(() => { new Accessory('Test', 'test'); }).toThrow('not a valid UUID'); }); }); describe("characteristicWarning", () => { it("should emit characteristic warning", () => { let accessory = new Accessory("Test Accessory", uuid.generate("Test")); let handler = jest.fn(); accessory.on(AccessoryEventTypes.CHARACTERISTIC_WARNING, handler); let service = accessory.addService(Service.Lightbulb, "Light"); let on = service.getCharacteristic(Characteristic.On); on.updateValue({}); expect(handler).toHaveBeenCalledTimes(1) }); it("should forward characteristic on bridged accessory", () => { let bridge = new Bridge("Test bridge", uuid.generate("bridge test")); let accessory = new Accessory("Test Accessory", uuid.generate("Test")); bridge.addBridgedAccessory(accessory); let handler = jest.fn(); bridge.on(AccessoryEventTypes.CHARACTERISTIC_WARNING, handler); accessory.on(AccessoryEventTypes.CHARACTERISTIC_WARNING, handler); let service = accessory.addService(Service.Lightbulb, "Light"); let on = service.getCharacteristic(Characteristic.On); on.updateValue({}); expect(handler).toHaveBeenCalledTimes(2) }); it("should run without characteristic warning handler", () => { let accessory = new Accessory("Test Accessory", uuid.generate("Test")); let service = accessory.addService(Service.Lightbulb, "Light"); let on = service.getCharacteristic(Characteristic.On); on.updateValue({}); }); }); describe('#serialize', () => { it('should serialize accessory', () => { const accessory = new Accessory("TestAccessory", uuid.generate("foo")); accessory.category = Categories.LIGHTBULB; const lightService = new Service.Lightbulb("TestLight", "subtype"); const switchService = new Service.Switch("TestSwitch", "subtype"); lightService.addLinkedService(switchService); accessory.addService(lightService); accessory.addService(switchService); const json = Accessory.serialize(accessory); expect(json.displayName).toEqual(accessory.displayName); expect(json.UUID).toEqual(accessory.UUID); expect(json.category).toEqual(Categories.LIGHTBULB); expect(json.services).toBeDefined(); expect(json.services.length).toEqual(3); // 2 above + accessory information service expect(json.linkedServices).toBeDefined(); expect(Object.keys(json.linkedServices!)).toEqual([lightService.UUID + "subtype"]); expect(Object.values(json.linkedServices!)).toEqual([[switchService.UUID + "subtype"]]); }); }); describe('#deserialize', () => { it('should deserialize legacy json from homebridge', () => { const json = JSON.parse('{"plugin":"homebridge-samplePlatform","platform":"SamplePlatform",' + '"displayName":"2020-01-17T18:45:41.049Z","UUID":"dc3951d8-662e-46f7-b6fe-d1b5b5e1a995","category":1,' + '"context":{},"linkedServices":{"0000003E-0000-1000-8000-0026BB765291":[],"00000043-0000-1000-8000-0026BB765291":[]},' + '"services":[{"UUID":"0000003E-0000-1000-8000-0026BB765291","characteristics":[' + '{"displayName":"Identify","UUID":"00000014-0000-1000-8000-0026BB765291",' + '"props":{"format":"bool","unit":null,"minValue":null,"maxValue":null,"minStep":null,"perms":["pw"]},' + '"value":false,"eventOnlyCharacteristic":false},{"displayName":"Manufacturer","UUID":"00000020-0000-1000-8000-0026BB765291",' + '"props":{"format":"string","unit":null,"minValue":null,"maxValue":null,"minStep":null,"perms":["pr"]},' + '"value":"Default-Manufacturer","eventOnlyCharacteristic":false},{"displayName":"Model",' + '"UUID":"00000021-0000-1000-8000-0026BB765291","props":{"format":"string","unit":null,"minValue":null,' + '"maxValue":null,"minStep":null,"perms":["pr"]},"value":"Default-Model","eventOnlyCharacteristic":false},' + '{"displayName":"Name","UUID":"00000023-0000-1000-8000-0026BB765291","props":{"format":"string","unit":null,' + '"minValue":null,"maxValue":null,"minStep":null,"perms":["pr"]},"value":"2020-01-17T18:45:41.049Z",' + '"eventOnlyCharacteristic":false},{"displayName":"Serial Number","UUID":"00000030-0000-1000-8000-0026BB765291",' + '"props":{"format":"string","unit":null,"minValue":null,"maxValue":null,"minStep":null,"perms":["pr"]},' + '"value":"Default-SerialNumber","eventOnlyCharacteristic":false},{"displayName":"Firmware Revision",' + '"UUID":"00000052-0000-1000-8000-0026BB765291","props":{"format":"string","unit":null,"minValue":null,' + '"maxValue":null,"minStep":null,"perms":["pr"]},"value":"","eventOnlyCharacteristic":false},' + '{"displayName":"Product Data","UUID":"00000220-0000-1000-8000-0026BB765291","props":{"format":"data",' + '"unit":null,"minValue":null,"maxValue":null,"minStep":null,"perms":["pr"]},"value":null,' + '"eventOnlyCharacteristic":false}]},{"displayName":"Test Light","UUID":"00000043-0000-1000-8000-0026BB765291",' + '"characteristics":[{"displayName":"Name","UUID":"00000023-0000-1000-8000-0026BB765291",' + '"props":{"format":"string","unit":null,"minValue":null,"maxValue":null,"minStep":null,"perms":["pr"]},' + '"value":"Test Light","eventOnlyCharacteristic":false},{"displayName":"On",' + '"UUID":"00000025-0000-1000-8000-0026BB765291","props":{"format":"bool","unit":null,"minValue":null,' + '"maxValue":null,"minStep":null,"perms":["pr","pw","ev"]},"value":false,"eventOnlyCharacteristic":false}]}]}'); const accessory = Accessory.deserialize(json); expect(accessory.displayName).toEqual(json.displayName); expect(accessory.UUID).toEqual(json.UUID); expect(accessory.category).toEqual(json.category); expect(accessory.services).toBeDefined(); expect(accessory.services.length).toEqual(2); }); it('should deserialize complete json', () => { // json for a light accessory const json = JSON.parse('{"displayName":"TestAccessory","UUID":"0beec7b5-ea3f-40fd-bc95-d0dd47f3c5bc",' + '"category":5,"services":[{"UUID":"0000003E-0000-1000-8000-0026BB765291","hiddenService":false,' + '"primaryService":false,"characteristics":[{"displayName":"Identify","UUID":"00000014-0000-1000-8000-0026BB765291",' + '"props":{"format":"bool","unit":null,"minValue":null,"maxValue":null,"minStep":null,"perms":["pw"]},' + '"value":false,"accessRestrictedToAdmins":[],"eventOnlyCharacteristic":false},' + '{"displayName":"Manufacturer","UUID":"00000020-0000-1000-8000-0026BB765291",' + '"props":{"format":"string","unit":null,"minValue":null,"maxValue":null,"minStep":null,"perms":["pr"]},' + '"value":"Default-Manufacturer","accessRestrictedToAdmins":[],"eventOnlyCharacteristic":false},' + '{"displayName":"Model","UUID":"00000021-0000-1000-8000-0026BB765291",' + '"props":{"format":"string","unit":null,"minValue":null,"maxValue":null,"minStep":null,"perms":["pr"]},' + '"value":"Default-Model","accessRestrictedToAdmins":[],"eventOnlyCharacteristic":false},' + '{"displayName":"Name","UUID":"00000023-0000-1000-8000-0026BB765291",' + '"props":{"format":"string","unit":null,"minValue":null,"maxValue":null,"minStep":null,"perms":["pr"]},' + '"value":"TestAccessory","accessRestrictedToAdmins":[],"eventOnlyCharacteristic":false},' + '{"displayName":"Serial Number","UUID":"00000030-0000-1000-8000-0026BB765291","props":{"format":"string",' + '"unit":null,"minValue":null,"maxValue":null,"minStep":null,"perms":["pr"]},"value":"Default-SerialNumber",' + '"accessRestrictedToAdmins":[],"eventOnlyCharacteristic":false},{"displayName":"Firmware Revision",' + '"UUID":"00000052-0000-1000-8000-0026BB765291","props":{"format":"string","unit":null,"minValue":null,' + '"maxValue":null,"minStep":null,"perms":["pr"]},"value":"1.0","accessRestrictedToAdmins":[],' + '"eventOnlyCharacteristic":false},{"displayName":"Product Data",' + '"UUID":"00000220-0000-1000-8000-0026BB765291","props":{"format":"data","unit":null,"minValue":null,' + '"maxValue":null,"minStep":null,"perms":["pr"]},"value":null,"accessRestrictedToAdmins":[],' + '"eventOnlyCharacteristic":false}],"optionalCharacteristics":[{"displayName":"Hardware Revision",' + '"UUID":"00000053-0000-1000-8000-0026BB765291","props":{"format":"string","unit":null,"minValue":null,' + '"maxValue":null,"minStep":null,"perms":["pr"]},"value":"","accessRestrictedToAdmins":[],' + '"eventOnlyCharacteristic":false},{"displayName":"Accessory Flags","UUID":"000000A6-0000-1000-8000-0026BB765291",' + '"props":{"format":"uint32","unit":null,"minValue":null,"maxValue":null,"minStep":null,"perms":["pr","ev"]},' + '"value":0,"accessRestrictedToAdmins":[],"eventOnlyCharacteristic":false}]},' + '{"displayName":"TestLight","UUID":"00000043-0000-1000-8000-0026BB765291",' + '"subtype":"subtype","hiddenService":false,"primaryService":false,' + '"characteristics":[{"displayName":"Name","UUID":"00000023-0000-1000-8000-0026BB765291",' + '"props":{"format":"string","unit":null,"minValue":null,"maxValue":null,"minStep":null,"perms":["pr"]},' + '"value":"TestLight","accessRestrictedToAdmins":[],"eventOnlyCharacteristic":false},' + '{"displayName":"On","UUID":"00000025-0000-1000-8000-0026BB765291",' + '"props":{"format":"bool","unit":null,"minValue":null,"maxValue":null,"minStep":null,"perms":["pr","pw","ev"]},' + '"value":false,"accessRestrictedToAdmins":[],"eventOnlyCharacteristic":false}],' + '"optionalCharacteristics":[{"displayName":"Brightness","UUID":"00000008-0000-1000-8000-0026BB765291",' + '"props":{"format":"int","unit":"percentage","minValue":0,"maxValue":100,"minStep":1,' + '"perms":["pr","pw","ev"]},"value":0,"accessRestrictedToAdmins":[],"eventOnlyCharacteristic":false},' + '{"displayName":"Hue","UUID":"00000013-0000-1000-8000-0026BB765291",' + '"props":{"format":"float","unit":"arcdegrees","minValue":0,"maxValue":360,"minStep":1,"perms":["pr","pw","ev"]},' + '"value":0,"accessRestrictedToAdmins":[],"eventOnlyCharacteristic":false},' + '{"displayName":"Saturation","UUID":"0000002F-0000-1000-8000-0026BB765291","props":{"format":"float",' + '"unit":"percentage","minValue":0,"maxValue":100,"minStep":1,"perms":["pr","pw","ev"]},"value":0,' + '"accessRestrictedToAdmins":[],"eventOnlyCharacteristic":false},{"displayName":"Name",' + '"UUID":"00000023-0000-1000-8000-0026BB765291","props":{"format":"string","unit":null,' + '"minValue":null,"maxValue":null,"minStep":null,"perms":["pr"]},"value":"",' + '"accessRestrictedToAdmins":[],"eventOnlyCharacteristic":false},' + '{"displayName":"Color Temperature","UUID":"000000CE-0000-1000-8000-0026BB765291",' + '"props":{"format":"uint32","unit":null,"minValue":140,"maxValue":500,"minStep":1,"perms":["pr","pw","ev"]},' + '"value":140,"accessRestrictedToAdmins":[],"eventOnlyCharacteristic":false}]},' + '{"displayName":"TestSwitch","UUID":"00000049-0000-1000-8000-0026BB765291","subtype":"subtype",' + '"hiddenService":false,"primaryService":false,"characteristics":[{"displayName":"Name",' + '"UUID":"00000023-0000-1000-8000-0026BB765291","props":{"format":"string","unit":null,"minValue":null,' + '"maxValue":null,"minStep":null,"perms":["pr"]},"value":"TestSwitch","accessRestrictedToAdmins":[],' + '"eventOnlyCharacteristic":false},{"displayName":"On","UUID":"00000025-0000-1000-8000-0026BB765291",' + '"props":{"format":"bool","unit":null,"minValue":null,"maxValue":null,"minStep":null,' + '"perms":["pr","pw","ev"]},"value":false,"accessRestrictedToAdmins":[],"eventOnlyCharacteristic":false}],' + '"optionalCharacteristics":[{"displayName":"Name","UUID":"00000023-0000-1000-8000-0026BB765291",' + '"props":{"format":"string","unit":null,"minValue":null,"maxValue":null,"minStep":null,"perms":["pr"]},' + '"value":"","accessRestrictedToAdmins":[],"eventOnlyCharacteristic":false}]}],' + '"linkedServices":{"00000043-0000-1000-8000-0026BB765291subtype":["00000049-0000-1000-8000-0026BB765291subtype"]}}'); const accessory = Accessory.deserialize(json); expect(accessory.displayName).toEqual(json.displayName); expect(accessory.UUID).toEqual(json.UUID); expect(accessory.category).toEqual(json.category); expect(accessory.services).toBeDefined(); expect(accessory.services.length).toEqual(3); expect(accessory.getService(Service.Lightbulb)).toBeDefined(); expect(accessory.getService(Service.Lightbulb)!.linkedServices.length).toEqual(1); expect(accessory.getService(Service.Lightbulb)!.linkedServices[0].UUID).toEqual(Service.Switch.UUID); }); it('should deserialize controllers and remove/add/replace services correctly', function () { const accessory = new Accessory('TestAccessory', uuid.generate("test-controller-accessory")); accessory.configureController(new TestController()); const serialized = Accessory.serialize(accessory); const restoredAccessory = Accessory.deserialize(serialized); restoredAccessory.configureController(new TestController()); // restore Controller; expect(restoredAccessory.services).toBeDefined(); expect(restoredAccessory.services.length).toEqual(3); // accessory information, light sensor, outlet expect(restoredAccessory.getService(Service.Lightbulb)).toBeUndefined(); expect(restoredAccessory.getService(Service.LightSensor)).toBeDefined(); expect(restoredAccessory.getService(Service.Outlet)).toBeDefined(); expect(restoredAccessory.getService(Service.Switch)).toBeUndefined(); }); }); });
the_stack
import { Injectable, Autowired } from '@opensumi/di'; import { ILogger, isOSX, Emitter, Event, CommandRegistry, ContributionProvider, IDisposable, Disposable, formatLocalize, CommandService, isUndefined, } from '@opensumi/ide-core-common'; import { ContextKeyExpression } from '@opensumi/monaco-editor-core/esm/vs/platform/contextkey/common/contextkey'; import { IContextKeyService } from '../context-key'; import { KeyboardLayoutService } from '../keyboard/keyboard-layout-service'; import { KeyCode, KeySequence, Key, SpecialCases } from '../keyboard/keys'; import { StatusBarAlignment, IStatusBarService } from '../services'; export enum KeybindingScope { DEFAULT, USER, WORKSPACE, END, } // ref: https://github.com/Microsoft/vscode/blob/97fc588e65bedcb1113baeddd2f67237e52c8c63/src/vs/platform/keybinding/common/keybindingsRegistry.ts#L56 // 快捷键第一优先级,在开天中将对该值 * 100 作为快捷键的优先级参数 priority export enum KeybindingWeight { Default = 0, // 不传入 priority 则默认为 0 EditorCore = 1, EditorContrib = 100, WorkbenchContrib = 200, BuiltinExtension = 300, ExternalExtension = 400, } export namespace KeybindingScope { export const length = KeybindingScope.END - KeybindingScope.DEFAULT; } export namespace Keybinding { /** * 返回带有绑定的字符串表达式 * 仅序列化关键的快捷键及command * 在快捷时被使用 * * @param binding 按键绑定的字符串表达式. */ export function stringify(binding: Keybinding): string { const copy: Keybinding = { command: binding.command, keybinding: binding.keybinding, }; return JSON.stringify(copy); } // 判断一个对象是否为Keybinding对象 export function is(arg: Keybinding | any): arg is Keybinding { return !!arg && arg === Object(arg) && 'command' in arg && 'keybinding' in arg; } } export namespace KeybindingsResultCollection { export class KeybindingsResult { public full: Keybinding[] = []; public partial: Keybinding[] = []; public shadow: Keybinding[] = []; /** * 合并KeybindingsResult至this * * @param other * @return this */ public merge(other: KeybindingsResult): KeybindingsResult { this.full.push(...other.full); this.partial.push(...other.partial); this.shadow.push(...other.shadow); return this; } /** * 返回一个新的过滤后的 KeybindingsResult * * @param fn 过滤函数 * @return KeybindingsResult */ public filter(fn: (binding: Keybinding) => boolean): KeybindingsResult { const result = new KeybindingsResult(); result.full = this.full.filter(fn); result.partial = this.partial.filter(fn); result.shadow = this.shadow.filter(fn); return result; } } } export interface Keybinding { // 命令ID command: string; // 快捷键字符串 keybinding: string; /** * https://code.visualstudio.com/docs/getstarted/keybindings#_when-clause-contexts */ when?: string | ContextKeyExpression; // Command执行参数 args?: any; // 快捷键匹配的优先级 priority?: number; } export interface ResolvedKeybinding extends Keybinding { /** * KeyboardLayoutService会根据用户的键盘布局来转换keybinding得到最终在UI中使用的值 * 如果尚未调用KeyboardLayoutService来解析键绑定,则该值为unfedined。 */ resolved?: KeySequence; } export interface ScopedKeybinding extends ResolvedKeybinding { scope?: KeybindingScope; } export const KeybindingContribution = Symbol('KeybindingContribution'); export interface KeybindingContribution { registerKeybindings(keybindings: KeybindingRegistry): void; } export const KeybindingRegistry = Symbol('KeybindingRegistry'); export interface KeybindingRegistry { initialize(): Promise<any>; registerKeybinding(binding: Keybinding, scope?: KeybindingScope): IDisposable; registerKeybindings(bindings: Keybinding[], scope?: KeybindingScope): IDisposable; unregisterKeybinding(keyOrBinding: Keybinding | string, scope?: KeybindingScope): void; resolveKeybinding(binding: ResolvedKeybinding): KeyCode[]; containsKeybinding(bindings: Keybinding[], binding: Keybinding): boolean; containsKeybindingInScope(binding: Keybinding, scope?: KeybindingScope): boolean; validateKeybinding(bindings: Keybinding[], binding: Keybinding): string; validateKeybindingInScope(binding: Keybinding, scope?: KeybindingScope): string; acceleratorFor(keybinding: Keybinding, separator: string): string[]; acceleratorForSequence(keySequence: KeySequence, separator: string): string[]; acceleratorForKeyCode(keyCode: KeyCode, separator: string): string; acceleratorForKey(key: Key): string; acceleratorForKeyString(keyString: string, separator?: string): string; getKeybindingsForKeySequence( keySequence: KeySequence, event?: KeyboardEvent, ): KeybindingsResultCollection.KeybindingsResult; getKeybindingsForCommand(commandId: string): ScopedKeybinding[]; getScopedKeybindingsForCommand(scope: KeybindingScope, commandId: string): Keybinding[]; isEnabled(binding: Keybinding, event: KeyboardEvent): boolean; isPseudoCommand(commandId: string): boolean; resetKeybindings(): void; onKeybindingsChanged: Event<{ affectsCommands: string[] }>; } export const keybindingServicePath = '/services/keybindings'; export const KeybindingService = Symbol('KeybindingService'); export interface KeybindingService { /** * 根据传入的键盘事件执行对应的 Command */ run(event: KeyboardEvent): void; /** * 根据传入的键盘事件处理对应的快捷键修饰符 */ resolveModifierKey(event: KeyboardEvent): void; /** * 根据传入的键盘事件返回对应的快捷键文本 */ convert(event: KeyboardEvent, separator?: string): string; /** * 清空键盘事件队列 */ clearConvert(): void; /** * 转化monaco传入的when为可识别的字符串 * @param when */ convertMonacoWhen(when: any): string; } @Injectable() export class KeybindingRegistryImpl implements KeybindingRegistry, KeybindingService { // 该伪命令用于让事件冒泡,使事件不被Keybinding消费掉 public static readonly PASSTHROUGH_PSEUDO_COMMAND = 'passthrough'; protected readonly keymaps: Keybinding[][] = [...Array(KeybindingScope.length)].map(() => []); protected keySequence: KeySequence = []; private keySequenceTimer; protected convertKeySequence: KeySequence = []; protected modifierKeySequence: KeySequence = []; private modifierKeySequenceTimer; public static KEYSEQUENCE_TIMEOUT = 5000; public static MODIFIER_KEYSEQUENCE_TIMEOUT = 300; @Autowired(KeyboardLayoutService) protected readonly keyboardLayoutService: KeyboardLayoutService; @Autowired(KeybindingContribution) private readonly keybindingContributionProvider: ContributionProvider<KeybindingContribution>; @Autowired(CommandRegistry) protected readonly commandRegistry: CommandRegistry; @Autowired(CommandService) protected readonly commandService: CommandService; @Autowired(ILogger) protected readonly logger: ILogger; @Autowired(IContextKeyService) protected readonly whenContextService: IContextKeyService; @Autowired(IStatusBarService) protected readonly statusBar: IStatusBarService; public async initialize(): Promise<void> { await this.keyboardLayoutService.initialize(); this.keyboardLayoutService.onKeyboardLayoutChanged(() => { this.clearResolvedKeybindings(); }); // 从模块中获取的KeybindingContribution for (const contribution of this.keybindingContributionProvider.getContributions()) { contribution.registerKeybindings(this); } } protected keybindingsChanged = new Emitter<{ affectsCommands: string[] }>(); /** * 由于不同的键盘布局发生更改时触发的事件。 */ get onKeybindingsChanged() { return this.keybindingsChanged.event; } /** * 注册默认 Keybinding, 支持指定作用域 * @param binding */ public registerKeybinding(binding: Keybinding, scope: KeybindingScope = KeybindingScope.DEFAULT): IDisposable { return this.doRegisterKeybinding(binding, scope); } /** * 注册默认 Keybindings, 支持指定作用域 * @param bindings */ public registerKeybindings(bindings: Keybinding[], scope: KeybindingScope = KeybindingScope.DEFAULT): IDisposable { return this.doRegisterKeybindings(bindings, scope); } /** * 用于转换monaco内置的RawContextKey * @param when */ public convertMonacoWhen(when: string | ContextKeyExpression | undefined) { if (!when) { return ''; } if (typeof when === 'string') { return when; } return when.serialize(); } /** * 注销 Keybinding * @param binding */ public unregisterKeybinding(binding: Keybinding, scope?: KeybindingScope): void; // eslint-disable-next-line @typescript-eslint/unified-signatures public unregisterKeybinding(key: string, scope?: KeybindingScope): void; public unregisterKeybinding( keyOrBinding: Keybinding | string, scope: KeybindingScope = KeybindingScope.DEFAULT, ): void { const key = Keybinding.is(keyOrBinding) ? keyOrBinding.keybinding : keyOrBinding; const keymap = this.keymaps[scope]; let bindings; // 当传入的keybinding存在when条件时,严格匹配 if (Keybinding.is(keyOrBinding) && !!keyOrBinding.when) { bindings = keymap.filter( (el) => this.isKeybindingEqual(el.keybinding, keyOrBinding.keybinding) && this.isKeybindingWhenEqual(el.when, keyOrBinding.when), ); } else { bindings = keymap.filter((el) => this.isKeybindingEqual(el.keybinding, key)); } bindings.forEach((binding) => { const idx = keymap.indexOf(binding); if (idx >= 0) { keymap.splice(idx, 1); } }); } // 判断两个when是否相等 private isKeybindingWhenEqual(when1?: string | ContextKeyExpression, when2?: string | ContextKeyExpression) { return this.convertMonacoWhen(when1) === this.convertMonacoWhen(when2); } // 判断两个快捷键是否相等 // 如 ⌘ 默认等价于 cmd, ctrlcmd private isKeybindingEqual(preKeybinding: string, nextKeybinding: string) { return this.acceleratorForSequenceKeyString(preKeybinding) === this.acceleratorForSequenceKeyString(nextKeybinding); } // 解析快捷键字符串为统一的结果 private acceleratorForSequenceKeyString(key: string) { const keyCodeStrings = key.split(' '); const keySequence: KeySequence = keyCodeStrings.map((key) => KeyCode.parse(key)); return this.acceleratorForSequence(keySequence, '+').join(' '); } /** * 执行注册多个Keybinding * @param bindings * @param scope */ protected doRegisterKeybindings(bindings: Keybinding[], scope: KeybindingScope = KeybindingScope.DEFAULT) { const toDispose = new Disposable(); for (const binding of bindings) { toDispose.addDispose(this.doRegisterKeybinding(binding, scope)); } return toDispose; } /** * 执行注册单个Keybinding * @param binding * @param scope */ protected doRegisterKeybinding(binding: Keybinding, scope: KeybindingScope = KeybindingScope.DEFAULT): IDisposable { try { this.resolveKeybinding(binding, true); this.keymaps[scope].unshift(binding); } catch (error) { this.logger.warn(`Could not register keybinding:\n ${Keybinding.stringify(binding)}\n${error}`); } this.keybindingsChanged.fire({ affectsCommands: [binding.command] }); return { dispose: () => { this.unregisterKeybinding(binding, scope); }, }; } /** * 通过调用KeyboardLayoutService来设置给定的ResolvedKeybinding中的`resolved`属性。 * @param binding */ public resolveKeybinding(binding: ResolvedKeybinding, disableCache?: boolean): KeyCode[] { if (!binding.resolved || disableCache) { const sequence = KeySequence.parse(binding.keybinding); binding.resolved = sequence.map((code) => this.keyboardLayoutService.resolveKeyCode(code)); } return binding.resolved; } /** * 清除已注册的Keybinding中所有`resolved`属性,以便调用KeyboardLayoutService再次为他们赋值 * 当用户的键盘布局发生变化时,执行该方法 */ protected clearResolvedKeybindings(): void { for (let i = KeybindingScope.DEFAULT; i < KeybindingScope.END; i++) { const bindings = this.keymaps[i]; for (const binding of bindings) { const bd: ResolvedKeybinding = binding; bd.resolved = undefined; } } } /** * 检查Keybindings列表中的keySequence冲突 * @param bindings * @param binding */ public containsKeybinding(bindings: Keybinding[], binding: Keybinding): boolean { const bindingKeySequence = this.resolveKeybinding(binding); const collisions = this.getKeySequenceCollisions(bindings, bindingKeySequence).filter( (b) => b.when === binding.when, ); if (collisions.full.length > 0) { this.logger.warn( 'Collided keybinding is ignored; ', Keybinding.stringify(binding), ' collided with ', collisions.full.map((b) => Keybinding.stringify(b)).join(', '), ); return true; } if (collisions.partial.length > 0) { this.logger.warn( 'Shadowing keybinding is ignored; ', Keybinding.stringify(binding), ' shadows ', collisions.partial.map((b) => Keybinding.stringify(b)).join(', '), ); return true; } if (collisions.shadow.length > 0) { this.logger.warn( 'Shadowed keybinding is ignored; ', Keybinding.stringify(binding), ' would be shadowed by ', collisions.shadow.map((b) => Keybinding.stringify(b)).join(', '), ); return true; } return false; } /** * 检查Keybindings列表中的keySequence冲突 * 直接返回错误信息,无错误则返回空字符串 * @param bindings * @param binding */ public validateKeybinding(bindings: Keybinding[], binding: Keybinding): string { const bindingKeySequence = this.resolveKeybinding(binding); const collisions = this.getKeySequenceCollisions(bindings, bindingKeySequence).filter( (b) => b.when === binding.when, ); if (collisions.full.length > 0) { const collision = collisions.full[0]; const command = this.commandRegistry.getCommand(collision.command); return formatLocalize( 'keymaps.keybinding.full.collide', `${command ? command?.label || command.id : collision.command}${ collision.when ? `{${collision.when}}` : collision.when }`, ); } if (collisions.partial.length > 0) { const collision = collisions.partial[0]; const command = this.commandRegistry.getCommand(collision.command); return formatLocalize( 'keymaps.keybinding.partial.collide', `${command ? command?.label || command.id : collision.command}${ collision.when ? `{${collision.when}}` : collision.when }`, ); } if (collisions.shadow.length > 0) { const collision = collisions.shadow[0]; const command = this.commandRegistry.getCommand(collision.command); return formatLocalize( 'keymaps.keybinding.shadow.collide', `${command ? command?.label || command.id : collision.command}${ collision.when ? `{${collision.when}}` : collision.when }`, ); } return ''; } /** * 检查在特定Scope下是否包含该Keybinding * * @param binding * @param scope */ public containsKeybindingInScope(binding: Keybinding, scope: KeybindingScope = KeybindingScope.USER): boolean { return this.containsKeybinding(this.keymaps[scope], binding); } /** * 检查在特定Scope下是否包含该Keybinding * 返回冲突信息 * 无冲突返回空字符串 * @param binding * @param scope */ public validateKeybindingInScope(binding: Keybinding, scope: KeybindingScope = KeybindingScope.USER): string { return this.validateKeybinding(this.keymaps[scope], binding); } /** * 返回用户可见的Keybinding数据 * @param keybinding * @param separator */ public acceleratorFor(keybinding: Keybinding, separator = ' '): string[] { const bindingKeySequence = this.resolveKeybinding(keybinding); return this.acceleratorForSequence(bindingKeySequence, separator); } /** * 从KeySequence中返回用户可见的Keybinding * @param keySequence * @param separator */ public acceleratorForSequence(keySequence: KeySequence, separator = ' '): string[] { return keySequence.map((keyCode) => this.acceleratorForKeyCode(keyCode, separator)); } public acceleratorForKeyString(keyString: string, separator = ' '): string { const keyCode = KeyCode.parse(keyString); return this.acceleratorForKeyCode(keyCode, separator); } /** * 将字符串转化为首字母大写 * @param str 字符串 */ private capitalizeFirstLetter(str: string) { return str.replace(/^\S/, function (s) { return s.toUpperCase(); }); } /** * 返回用户可读的组合按键指令文本 (带修饰符) * @param keyCode * @param separator */ public acceleratorForKeyCode(keyCode: KeyCode, separator = ' '): string { const keyCodeResult: string[] = []; if (keyCode.ctrl) { keyCodeResult.push(this.capitalizeFirstLetter(SpecialCases.CTRL)); } if (keyCode.alt) { keyCodeResult.push(this.capitalizeFirstLetter(SpecialCases.ALT)); } if (keyCode.shift) { keyCodeResult.push(this.capitalizeFirstLetter(SpecialCases.SHIFT)); } if (keyCode.meta) { if (isOSX) { keyCodeResult.push(SpecialCases.MACMETA); } else { keyCodeResult.push(this.capitalizeFirstLetter(SpecialCases.META)); } } if (keyCode.key) { keyCodeResult.push(this.acceleratorForKey(keyCode.key)); } return keyCodeResult.join(separator); } /** * 根据Key返回可读文本 * @param key */ public acceleratorForKey(key: Key): string { if (isOSX) { if (key === Key.ARROW_LEFT) { return SpecialCases.ARROW_LEFT; } if (key === Key.ARROW_RIGHT) { return SpecialCases.ARROW_RIGHT; } if (key === Key.ARROW_UP) { return SpecialCases.ARROW_UP; } if (key === Key.ARROW_DOWN) { return SpecialCases.ARROW_DOWN; } if (key === Key.BACKSPACE) { return SpecialCases.BACKSPACE; } if (key === Key.ENTER) { return SpecialCases.ENTER; } } const keyString = this.keyboardLayoutService.getKeyboardCharacter(key); if ( (key.keyCode >= Key.KEY_A.keyCode && key.keyCode <= Key.KEY_Z.keyCode) || (key.keyCode >= Key.F1.keyCode && key.keyCode <= Key.F24.keyCode) ) { return keyString.toUpperCase(); } else if (keyString.length > 1) { return keyString.charAt(0).toUpperCase() + keyString.slice(1); } else if (keyString === ' ') { // 空格需要额外使用特殊字符进行展示,否则直接输出 `' '` 会比较疑惑 return SpecialCases.SPACE.charAt(0).toUpperCase() + SpecialCases.SPACE.slice(1); } else { return keyString; } } /** * 查找绑定列表中的键序列的冲突(无错误,是否冲突都会返回结果) * @param bindings * @param candidate */ protected getKeySequenceCollisions( bindings: Keybinding[], candidate: KeySequence, ): KeybindingsResultCollection.KeybindingsResult { const result = new KeybindingsResultCollection.KeybindingsResult(); for (const binding of bindings) { try { const bindingKeySequence = this.resolveKeybinding(binding); const compareResult = KeySequence.compare(candidate, bindingKeySequence); switch (compareResult) { case KeySequence.CompareResult.FULL: { result.full.push(binding); break; } case KeySequence.CompareResult.PARTIAL: { result.partial.push(binding); break; } case KeySequence.CompareResult.SHADOW: { result.shadow.push(binding); break; } } } catch (error) { this.logger.warn(error); } } return result; } /** * 获取与KeySequence完全匹配或部分匹配的键绑定列表 * 列表按优先级排序 (见sortKeybindingsByPriority方法) * @param keySequence */ public getKeybindingsForKeySequence( keySequence: KeySequence, event?: KeyboardEvent, ): KeybindingsResultCollection.KeybindingsResult { const result = new KeybindingsResultCollection.KeybindingsResult(); for (let scope = KeybindingScope.END; --scope >= KeybindingScope.DEFAULT; ) { const matches = this.getKeySequenceCollisions(this.keymaps[scope], keySequence); matches.full = matches.full.sort(this.sortKeybindingsByPriority); matches.partial = matches.partial.sort(this.sortKeybindingsByPriority); result.merge(matches); } // 如果组合键不可用,去掉组合键的功能 const partial = result.partial.filter((binding) => this.isEnabled(binding, event)); result.partial = partial; return result; } /** * 获取与commandId相关联的键绑定 * @param commandId */ public getKeybindingsForCommand(commandId: string): ScopedKeybinding[] { const result: ScopedKeybinding[] = []; for (let scope = KeybindingScope.END - 1; scope >= KeybindingScope.DEFAULT; scope--) { this.keymaps[scope].forEach((binding) => { const command = this.commandRegistry.getCommand(binding.command); if (command) { if (command.id === commandId) { result.push({ ...binding, scope }); } } }); if (result.length > 0) { return result; } } return result; } /** * 返回在特定Scope下与commandId关联的键值对列表 * @param scope * @param commandId */ public getScopedKeybindingsForCommand(scope: KeybindingScope, commandId: string): Keybinding[] { const result: Keybinding[] = []; if (scope >= KeybindingScope.END) { return []; } this.keymaps[scope].forEach((binding) => { const command = this.commandRegistry.getCommand(binding.command); if (command && command.id === commandId) { result.push(binding); } }); return result; } /** * 按优先级顺序对键值绑定进行排序 * * 具有When判定的键绑定比没有的优先级更高 * 当均具备 when 时,采用 priority 进行优先级判断 * @param keybindings */ private sortKeybindingsByPriority(a: Keybinding, b: Keybinding) { if (a.when && !b.when) { return -1; } if (!a.when && b.when) { return 1; } const compA = isUndefined(a.priority) ? KeybindingWeight.Default : a.priority; const compB = isUndefined(b.priority) ? KeybindingWeight.Default : b.priority; return compB - compA; } protected isActive(binding: Keybinding): boolean { // passthrough命令始终处于活动状态 (无法在命令注册表中找到)) if (this.isPseudoCommand(binding.command)) { return true; } const command = this.commandRegistry.getCommand(binding.command); return !!command && !!this.commandRegistry.getActiveHandler(command.id); } /** * 只有在没有上下文(全局上下文)或者我们处于该上下文中时才执行 * @param binding * @param event */ public isEnabled(binding: Keybinding, event?: KeyboardEvent): boolean { if (binding.when && !this.whenContextService.match(binding.when, event && (event.target as HTMLElement))) { return false; } return true; } /** * 判断是否为PASSTHROUGH_PSEUDO_COMMAND * @param commandId */ public isPseudoCommand(commandId: string): boolean { return commandId === KeybindingRegistryImpl.PASSTHROUGH_PSEUDO_COMMAND; } /** * 重置所有Scope下的键绑定(仅保留映射的默认键绑定) */ public resetKeybindings(): void { for (let i = KeybindingScope.DEFAULT + 1; i < KeybindingScope.END; i++) { this.keymaps[i] = []; } } /** * 用于处理诸如 Shift + Shift, Ctrl + Ctrl, Alt + Alt 等快捷键 * 参考: https://github.com/microsoft/vscode/pull/115190 * * @param event 键盘事件 */ public resolveModifierKey(event: KeyboardEvent): void { if (event.defaultPrevented) { return; } if (this.modifierKeySequenceTimer) { clearTimeout(this.modifierKeySequenceTimer); } const keyCode = KeyCode.createKeyCode(event); // 当传入的 KeyCode 不是修饰符时,不处理 if (!keyCode.isModifierOnly()) { return; } this.modifierKeySequence.push(keyCode); const bindings = this.getKeybindingsForKeySequence(this.modifierKeySequence, event); if (this.tryKeybindingExecution(bindings.full, event)) { this.modifierKeySequence = []; } else if (bindings.partial.length > 0) { // 堆积 modifierKeySequence, 用于实现组合键 event.preventDefault(); event.stopPropagation(); this.modifierKeySequenceTimer = setTimeout(() => { this.modifierKeySequence = []; }, KeybindingRegistryImpl.MODIFIER_KEYSEQUENCE_TIMEOUT); } else { this.modifierKeySequence = []; } } /** * 执行匹配键盘事件的命令 * * @param event 键盘事件 */ public run(event: KeyboardEvent): void { if (event.defaultPrevented) { return; } if (this.keySequenceTimer) { clearTimeout(this.keySequenceTimer); } const keyCode = KeyCode.createKeyCode(event); // 当传入的 KeyCode 仅为修饰符,忽略,等待下次输入 if (keyCode.isModifierOnly()) { return; } this.keyboardLayoutService.validateKeyCode(keyCode); this.keySequence.push(keyCode); const bindings = this.getKeybindingsForKeySequence(this.keySequence, event); if (this.tryKeybindingExecution(bindings.full, event)) { this.keySequence = []; this.statusBar.removeElement('keybinding-status'); } else if (bindings.partial.length > 0) { // 堆积keySequence, 用于实现组合键 event.preventDefault(); event.stopPropagation(); this.statusBar.addElement('keybinding-status', { text: formatLocalize('keybinding.combination.tip', this.acceleratorForSequence(this.keySequence, '+')), alignment: StatusBarAlignment.LEFT, priority: 2, }); this.keySequenceTimer = setTimeout(() => { this.keySequence = []; this.statusBar.removeElement('keybinding-status'); }, KeybindingRegistryImpl.KEYSEQUENCE_TIMEOUT); } else { this.keySequence = []; this.statusBar.removeElement('keybinding-status'); } } /** * 返回快捷键文本 * * @param event 键盘事件 */ public convert(event: KeyboardEvent, separator = ' '): string { const keyCode = KeyCode.createKeyCode(event); // 当传入的Keycode仅为修饰符,返回上次输出结果 if (keyCode.isModifierOnly()) { return this.acceleratorForSequence(this.convertKeySequence, '+').join(separator); } this.keyboardLayoutService.validateKeyCode(keyCode); if (this.convertKeySequence.length <= 1) { this.convertKeySequence.push(keyCode); } else { this.convertKeySequence = [keyCode]; } return this.acceleratorForSequence(this.convertKeySequence, '+').join(separator); } /** * 清空键盘事件队列 */ public clearConvert() { this.convertKeySequence = []; } /** * 尝试执行Keybinding * @param bindings * @param event * @return 命令执行成功时返回true,否则为false */ protected tryKeybindingExecution(bindings: Keybinding[], event: KeyboardEvent): boolean { if (bindings.length === 0) { return false; } for (const binding of bindings) { if (this.isEnabled(binding, event)) { if (this.isPseudoCommand(binding.command)) { // 让事件冒泡 return true; } else { const command = this.commandRegistry.getCommand(binding.command); if (command) { this.commandService .executeCommand(command.id, binding.args) .catch((err) => this.logger.error('Failed to execute command:', err, binding.command)); /* 如果键绑定在上下文中但命令是可用状态下我们仍然在这里停止处理 */ event.preventDefault(); event.stopPropagation(); return true; } } return false; } } return false; } } export const NO_KEYBINDING_NAME = 'no_keybinding';
the_stack
import { commands, scm, window, Uri, Disposable, SourceControl, SourceControlResourceState, SourceControlResourceGroup, Event, EventEmitter, ProviderResult, workspace } from 'vscode'; import { Model } from './scm/Model'; import { Resource } from './scm/Resource'; import { Status } from './scm/Status'; import { mapEvent } from './Utils'; import { FileType } from './scm/FileTypes'; import { IPerforceConfig, matchConfig } from './PerforceService'; import * as Path from 'path'; import { PerforceCommands } from './PerforceCommands'; export class PerforceSCMProvider { private compatibilityMode: string; private wksFolder: Uri; private config: IPerforceConfig; private disposables: Disposable[] = []; dispose(): void { this.disposables.forEach(d => d.dispose()); this.disposables = []; } private static instances: PerforceSCMProvider[] = []; private _model: Model; get onDidChange(): Event<this> { return mapEvent(this._model.onDidChange, () => this); } public get resources(): SourceControlResourceGroup[] { return this._model.ResourceGroups; } public get id(): string { return 'perforce'; } public get label(): string { return 'Perforce'; } public get count(): number { const countBadge = workspace.getConfiguration('perforce').get<string>('countBadge'); let statuses = this._model.ResourceGroups.reduce((a, b) => a.concat( b.resourceStates.reduce((c,d) => c.concat( (d as Resource).status ), [])), []); // Don't count MOVE_DELETE as we already count MOVE_ADD switch (countBadge) { case 'off': return 0; case 'all-but-shelved': return statuses.filter(s => s != Status.SHELVE && s != Status.MOVE_DELETE).length; case 'all': default: return statuses.filter(s => s != Status.MOVE_DELETE).length; } } get sourceControl(): SourceControl { return this._model._sourceControl; } get stateContextKey(): string { if (workspace.workspaceFolders == undefined) { return 'norepo'; } return 'idle' } constructor(config: IPerforceConfig, wksFolder: Uri, compatibilityMode: string) { this.compatibilityMode = compatibilityMode; this.wksFolder = wksFolder; this.config = config; this.Initialize(); } public Initialize() { this._model = new Model(this.config, this.wksFolder, this.compatibilityMode); PerforceSCMProvider.instances.push(this); this._model._sourceControl = scm.createSourceControl(this.id, this.label, Uri.file(this.config.localDir)); this._model._sourceControl.quickDiffProvider = this; this._model._sourceControl.acceptInputCommand = { command: 'perforce.processChangelist', title: 'Process Changelist', arguments: [this._model._sourceControl]}; // Hook up the model change event to trigger our own event this._model.onDidChange(this.onDidModelChange, this, this.disposables); this._model.Refresh(); this._model._sourceControl.inputBox.value = ''; this._model._sourceControl.inputBox.placeholder = "Message (press {0} to create changelist)" } public static registerCommands() { // SCM commands commands.registerCommand('perforce.Refresh', PerforceSCMProvider.Refresh); commands.registerCommand('perforce.info', PerforceSCMProvider.Info); commands.registerCommand('perforce.Sync', PerforceSCMProvider.Sync); commands.registerCommand('perforce.openFile', PerforceSCMProvider.OpenFile); commands.registerCommand('perforce.openResource', PerforceSCMProvider.Open); commands.registerCommand('perforce.submitDefault', PerforceSCMProvider.SubmitDefault); commands.registerCommand('perforce.processChangelist', PerforceSCMProvider.ProcessChangelist); commands.registerCommand('perforce.editChangelist', PerforceSCMProvider.EditChangelist); commands.registerCommand('perforce.describe', PerforceSCMProvider.Describe); commands.registerCommand('perforce.submitChangelist', PerforceSCMProvider.Submit); commands.registerCommand('perforce.revertChangelist', PerforceSCMProvider.Revert); commands.registerCommand('perforce.revertUnchangedChangelist', PerforceSCMProvider.RevertUnchanged); commands.registerCommand('perforce.shelveunshelve', PerforceSCMProvider.ShelveOrUnshelve); commands.registerCommand('perforce.revertFile', PerforceSCMProvider.Revert); commands.registerCommand('perforce.revertUnchangedFile', PerforceSCMProvider.RevertUnchanged); commands.registerCommand('perforce.reopenFile', PerforceSCMProvider.ReopenFile); } private onDidModelChange(): void { this._model._sourceControl.count = this.count; commands.executeCommand('setContext', 'perforceState', this.stateContextKey); } private static GetInstance(uri: Uri | null): PerforceSCMProvider { if (!uri) { return PerforceSCMProvider.instances ? PerforceSCMProvider.instances[0] : null; } else { const wksFolder = workspace.getWorkspaceFolder(uri); if (wksFolder) { for (let provider of PerforceSCMProvider.instances) { if ( matchConfig(provider.config, wksFolder.uri) ) { return provider; } } } } return null; } public static OpenFile(...resourceStates: SourceControlResourceState[]) { const selection = resourceStates.filter(s => s instanceof Resource) as Resource[]; const preview = selection.length == 1; for (const resource of selection) { commands.executeCommand<void>("vscode.open", resource.uri, {preview}); } }; public static Open(...resourceStates: SourceControlResourceState[]) { const selection = resourceStates.filter(s => s instanceof Resource) as Resource[]; for (const resource of selection) { PerforceSCMProvider.open(resource); } }; public static Sync(sourceControl: SourceControl) { const perforceProvider = PerforceSCMProvider.GetInstance(sourceControl ? sourceControl.rootUri : null); perforceProvider._model.Sync(); }; public static Refresh(sourceControl: SourceControl) { const perforceProvider = PerforceSCMProvider.GetInstance(sourceControl ? sourceControl.rootUri : null); perforceProvider._model.Refresh(); }; public static RefreshAll() { for (let provider of PerforceSCMProvider.instances) { provider._model.Refresh(); } }; public static Info(sourceControl: SourceControl) { let provider = PerforceSCMProvider.GetInstance(sourceControl ? sourceControl.rootUri : null); provider._model.Info(); }; public static ProcessChangelist(sourceControl: SourceControl) { let provider = PerforceSCMProvider.GetInstance(sourceControl ? sourceControl.rootUri : null); provider._model.ProcessChangelist(); }; public static async EditChangelist(input: SourceControlResourceGroup) { let model: Model = input['model']; if (model) { model.EditChangelist(input); } }; public static async Describe(input: SourceControlResourceGroup) { let model: Model = input['model']; if (model) { model.Describe(input); } }; public static async SubmitDefault(sourceControl: SourceControl) { let provider = PerforceSCMProvider.GetInstance(sourceControl ? sourceControl.rootUri : null); provider._model.SubmitDefault(); }; public static async Submit(input: SourceControlResourceGroup) { let model: Model = input['model']; if (model) { model.Submit(input); } }; public static async Revert(arg: Resource | SourceControlResourceGroup, ...resourceStates: SourceControlResourceState[]) { if (arg instanceof Resource) { let resources = [...resourceStates as Resource[], arg as Resource]; for (const resource of resources) { resource.model.Revert(resource); } } else { let group = arg as SourceControlResourceGroup; let model: Model = group['model']; model.Revert(group); } }; public static async RevertUnchanged(arg: Resource | SourceControlResourceGroup, ...resourceStates: SourceControlResourceState[]) { if (arg instanceof Resource) { let resources = [...resourceStates as Resource[], arg as Resource]; for (const resource of resources) { resource.model.Revert(resource, true); } } else { let group = arg as SourceControlResourceGroup; let model: Model = group['model']; model.Revert(group, true); } }; public static async ShelveOrUnshelve(...resourceStates: SourceControlResourceState[]): Promise<void> { const selection = resourceStates.filter(s => s instanceof Resource) as Resource[]; for (const resource of selection) { resource.model.ShelveOrUnshelve(resource); } }; public static async ReopenFile(arg?: Resource | Uri, ...resourceStates: SourceControlResourceState[]): Promise<void> { let resources: Resource[] | undefined = undefined; if (arg instanceof Uri) { // const resource = this.getSCMResource(arg); // if (resource !== undefined) { // resources = [resource]; // } console.log('ReopenFile: ' + arg.toString()); return; } else { let resource: Resource | undefined = undefined; if (arg instanceof Resource) { resource = arg; } else { //resource = this.getSCMResource(); console.log('ReopenFile: should never happen'); return; } if (resource) { resources = [...resourceStates as Resource[], resource]; } } if (!resources || resources.length == 0) { return; } await resources[0].model.ReopenFile(resources); }; provideOriginalResource(uri: Uri): ProviderResult<Uri> { if (uri.scheme !== 'file') { return; } return uri.with({ scheme: 'perforce', authority: 'print', query: '-q' }); } /** * This is the default action when an resource is clicked in the viewlet. * For ADD, AND UNDELETE just show the local file. * For DELETE just show the server file. * For EDIT AND RENAME show the diff window (server on left, local on right). */ private static open(resource: Resource): void { if(resource.FileType.base === FileType.BINARY) { const uri = resource.uri.with({ scheme: 'perforce', authority: 'fstat' }); workspace.openTextDocument(uri) .then(doc => window.showTextDocument(doc)); return; } const left: Uri = PerforceSCMProvider.getLeftResource(resource); const right: Uri = PerforceSCMProvider.getRightResource(resource); const title: string = PerforceSCMProvider.getTitle(resource); if (!left) { if (!right) { // TODO console.error("Status not supported: "+ resource.status.toString() ); return; } commands.executeCommand<void>("vscode.open", right); return; } commands.executeCommand<void>("vscode.diff", left, right, title); return; } // Gets the uri for the previous version of the file. private static getLeftResource(resource: Resource): Uri | undefined { switch (resource.status) { case Status.EDIT: return resource.uri.with({ scheme: 'perforce', authority: 'print', query: '-q' }); } } // Gets the uri for the current version of the file (except for deleted files). private static getRightResource(resource: Resource): Uri | undefined { switch (resource.status) { case Status.ADD: case Status.EDIT: case Status.MOVE_ADD: return resource.uri; case Status.MOVE_DELETE: case Status.DELETE: return resource.uri.with({ scheme: 'perforce', authority: 'print', query: '-q' }); } } private static getTitle(resource: Resource): string { const basename = Path.basename(resource.uri.fsPath); switch (resource.status) { case Status.EDIT: return `${basename} - Diff Against Most Recent Revision`; } return ''; } }
the_stack
import convertBitRange from './convertbitrange'; const dxt4to8 = convertBitRange(4, 8); const dxt5to8 = convertBitRange(5, 8); const dxt6to8 = convertBitRange(6, 8); const dx1colors = new Uint8Array(16); const dx3colors = new Uint8Array(12); const dx5alphas = new Uint8Array(8); const red = new Uint8Array(8); const green = new Uint8Array(8); function dx1Colors(out: Uint8Array, color0: number, color1: number): void { const r0 = ((color0 >> 11) & 31) * dxt5to8; const g0 = ((color0 >> 5) & 63) * dxt6to8; const b0 = (color0 & 31) * dxt5to8; const r1 = ((color1 >> 11) & 31) * dxt5to8; const g1 = ((color1 >> 5) & 63) * dxt6to8; const b1 = (color1 & 31) * dxt5to8; // Minimum and maximum colors. out[0] = r0; out[1] = g0; out[2] = b0; out[3] = 255; out[4] = r1; out[5] = g1; out[6] = b1; out[7] = 255; // Interpolated colors. if (color0 > color1) { out[8] = (5 * r0 + 3 * r1) >> 3; out[9] = (5 * g0 + 3 * g1) >> 3; out[10] = (5 * b0 + 3 * b1) >> 3; out[11] = 255; out[12] = (5 * r1 + 3 * r0) >> 3; out[13] = (5 * g1 + 3 * g0) >> 3; out[14] = (5 * b1 + 3 * b0) >> 3; out[15] = 255; } else { out[8] = (r0 + r1) >> 1; out[9] = (g0 + g1) >> 1; out[10] = (b0 + b1) >> 1; out[11] = 255; out[12] = 0; out[13] = 0; out[14] = 0; out[15] = 0; } } function dx3Colors(out: Uint8Array, color0: number, color1: number): void { const r0 = ((color0 >> 11) & 31) * dxt5to8; const g0 = ((color0 >> 5) & 63) * dxt6to8; const b0 = (color0 & 31) * dxt5to8; const r1 = ((color1 >> 11) & 31) * dxt5to8; const g1 = ((color1 >> 5) & 63) * dxt6to8; const b1 = (color1 & 31) * dxt5to8; // Minimum and maximum colors. out[0] = r0; out[1] = g0; out[2] = b0; out[3] = r1; out[4] = g1; out[5] = b1; // Interpolated colors. out[6] = (5 * r0 + 3 * r1) >> 3; out[7] = (5 * g0 + 3 * g1) >> 3; out[8] = (5 * b0 + 3 * b1) >> 3; out[9] = (5 * r1 + 3 * r0) >> 3; out[10] = (5 * g1 + 3 * g0) >> 3; out[11] = (5 * b1 + 3 * b0) >> 3; } function dx5Alphas(out: Uint8Array, alpha0: number, alpha1: number): void { // Minimum and maximum alphas. out[0] = alpha0; out[1] = alpha1; // Interpolated alphas. if (alpha0 > alpha1) { out[2] = (54 * alpha0 + 9 * alpha1) >> 6; out[3] = (45 * alpha0 + 18 * alpha1) >> 6; out[4] = (36 * alpha0 + 27 * alpha1) >> 6; out[5] = (27 * alpha0 + 36 * alpha1) >> 6; out[6] = (18 * alpha0 + 45 * alpha1) >> 6; out[7] = (9 * alpha0 + 54 * alpha1) >> 6; } else { out[2] = (12 * alpha0 + 3 * alpha1) >> 4; out[3] = (9 * alpha0 + 6 * alpha1) >> 4; out[4] = (6 * alpha0 + 9 * alpha1) >> 4; out[5] = (3 * alpha0 + 12 * alpha1) >> 4; out[6] = 0; out[7] = 255; } } function rgColors(out: Uint8Array, color0: number, color1: number): void { // Minimum and maximum red colors. out[0] = color0; out[1] = color1; // Interpolated red colors. if (color0 > color1) { out[2] = (6 * color0 + 1 * color1) / 7; out[3] = (5 * color0 + 2 * color1) / 7; out[4] = (4 * color0 + 3 * color1) / 7; out[5] = (3 * color0 + 4 * color1) / 7; out[6] = (2 * color0 + 5 * color1) / 7; out[7] = (1 * color0 + 6 * color1) / 7; } else { out[2] = (4 * color0 + 1 * color1) / 5; out[3] = (3 * color0 + 2 * color1) / 5; out[4] = (2 * color0 + 3 * color1) / 5; out[5] = (1 * color0 + 4 * color1) / 5; out[6] = 0; out[7] = 1; } } /** * Decodes DXT1 data to a Uint8Array typed array with 8-8-8-8 RGBA bits. * * DXT1 is also known as BC1. */ export function decodeDxt1(src: Uint8Array, width: number, height: number): Uint8Array { const dst = new Uint8Array(width * height * 4); for (let blockY = 0, blockHeight = height / 4; blockY < blockHeight; blockY++) { for (let blockX = 0, blockWidth = width / 4; blockX < blockWidth; blockX++) { const i = 8 * (blockY * blockWidth + blockX); // Get the color values. dx1Colors(dx1colors, src[i] + 256 * src[i + 1], src[i + 2] + 256 * src[i + 3]); // The offset to the first pixel in the destination. const dstI = (blockY * 16) * width + blockX * 16; // All 32 color bits. const bits = src[i + 4] | (src[i + 5] << 8) | (src[i + 6] << 16) | (src[i + 7] << 24); for (let row = 0; row < 4; row++) { const rowOffset = row * 8; const dstOffset = dstI + row * width * 4; for (let column = 0; column < 4; column++) { const dstIndex = dstOffset + column * 4; const colorOffset = ((bits >> (rowOffset + column * 2)) & 3) * 4; dst[dstIndex + 0] = dx1colors[colorOffset + 0]; dst[dstIndex + 1] = dx1colors[colorOffset + 1]; dst[dstIndex + 2] = dx1colors[colorOffset + 2]; dst[dstIndex + 3] = dx1colors[colorOffset + 3]; } } } } return dst; } /** * Decodes DXT3 data to a Uint8Array typed array with 8-8-8-8 RGBA bits. * * DXT3 is also known as BC2. */ export function decodeDxt3(src: Uint8Array, width: number, height: number): Uint8Array { const dst = new Uint8Array(width * height * 4); const rowBytes = width * 4; for (let blockY = 0, blockHeight = height / 4; blockY < blockHeight; blockY++) { for (let blockX = 0, blockWidth = width / 4; blockX < blockWidth; blockX++) { const i = 16 * (blockY * blockWidth + blockX); // Get the color values. dx3Colors(dx3colors, src[i + 8] + 256 * src[i + 9], src[i + 10] + 256 * src[i + 11]); let dstI = (blockY * 16) * width + blockX * 16; for (let row = 0; row < 4; row++) { // Get 16 bits of alpha indices. const alphaBits = src[i + row * 2] + 256 * src[i + 1 + row * 2]; // Get 8 bits of color indices. const colorBits = src[i + 12 + row]; for (let column = 0; column < 4; column++) { const dstIndex = dstI + column * 4; const colorIndex = ((colorBits >> (column * 2)) & 3) * 3; dst[dstIndex + 0] = dx3colors[colorIndex + 0]; dst[dstIndex + 1] = dx3colors[colorIndex + 1]; dst[dstIndex + 2] = dx3colors[colorIndex + 2]; dst[dstIndex + 3] = ((alphaBits >> (column * 4)) & 0xf) * dxt4to8; } dstI += rowBytes; } } } return dst; } /** * Decodes DXT5 data to a Uint8Array typed array with 8-8-8-8 RGBA bits. * * DXT5 is also known as BC3. */ export function decodeDxt5(src: Uint8Array, width: number, height: number): Uint8Array { const dst = new Uint8Array(width * height * 4); const rowBytes = width * 4; for (let blockY = 0, blockHeight = height / 4; blockY < blockHeight; blockY++) { for (let blockX = 0, blockWidth = width / 4; blockX < blockWidth; blockX++) { const i = 16 * (blockY * blockWidth + blockX); // Get the alpha values. dx5Alphas(dx5alphas, src[i], src[i + 1]); // Get the color values. dx3Colors(dx3colors, src[i + 8] + 256 * src[i + 9], src[i + 10] + 256 * src[i + 11]); // The offset to the first pixel in the destination. let dstI = (blockY * 16) * width + blockX * 16; // The outer loop is only needed because JS bitwise operators only work on 32bit integers, while the alpha flags contain 48 bits. // Processing is instead done in two blocks, where each one handles 24 bits, or two rows of 4 pixels. for (let block = 0; block < 2; block++) { const alphaOffset = i + 2 + block * 3; const colorOffset = i + 12 + block * 2; // 24 alpha bits. const alphaBits = src[alphaOffset] + 256 * (src[alphaOffset + 1] + 256 * src[alphaOffset + 2]); // Go over two rows. for (let row = 0; row < 2; row++) { const colorBits = src[colorOffset + row]; // Go over four columns. for (let column = 0; column < 4; column++) { const dstIndex = dstI + column * 4; const colorIndex = ((colorBits >> (column * 2)) & 3) * 3; const alphaIndex = (alphaBits >> (row * 12 + column * 3)) & 7; // Set the pixel. dst[dstIndex + 0] = dx3colors[colorIndex + 0]; dst[dstIndex + 1] = dx3colors[colorIndex + 1]; dst[dstIndex + 2] = dx3colors[colorIndex + 2]; dst[dstIndex + 3] = dx5alphas[alphaIndex]; } // Next row. dstI += rowBytes; } } } } return dst; } /** * Decodes RGTC data to a Uint8Array typed array with 8-8 RG bits. * * RGTC is also known as BC5, ATI2, and 3Dc. */ export function decodeRgtc(src: Uint8Array, width: number, height: number): Uint8Array { const dst = new Uint8Array(width * height * 2); const rowBytes = width * 2; for (let blockY = 0, blockHeight = height / 4; blockY < blockHeight; blockY++) { for (let blockX = 0, blockWidth = width / 4; blockX < blockWidth; blockX++) { const i = 16 * (blockY * blockWidth + blockX); // Get the red colors. rgColors(red, src[i], src[i + 1]); // Get the green colors. rgColors(green, src[i + 8], src[i + 9]); // The offset to the first pixel in the destination. let dstI = (blockY * 8) * width + blockX * 8; // Split to two blocks of two rows, because there are 48 color bits. for (let block = 0; block < 2; block++) { const blockOffset = i + block * 3; // Get 24 bits of the color indices. const redbits = src[blockOffset + 2] + 256 * (src[blockOffset + 3] + 256 * src[blockOffset + 4]); const greenbits = src[blockOffset + 10] + 256 * (src[blockOffset + 11] + 256 * src[blockOffset + 12]); for (let row = 0; row < 2; row++) { const rowOffset = row * 4; for (let column = 0; column < 4; column++) { const dstOffset = dstI + column * 2; const shifts = 3 * (rowOffset + column); dst[dstOffset + 1] = red[(redbits >> shifts) & 7]; dst[dstOffset + 2] = green[(greenbits >> shifts) & 7]; } // Next row. dstI += rowBytes; } } } } return dst; }
the_stack
import { getLanguageSetting } from './sql-setting'; const keepTokenType = new Set([ 'KEYWORD', 'BRACKET', 'SEMI', 'VARIABLE', 'NUMBER', ]); // We look for actual table name table these keyword const tableKeyWord = new Set(['table', 'from', 'join', 'into']); // if it is 'distinct from' then it is not referencing a table const previousTokenKeyWordDenylistMap = { from: new Set(['distinct']), }; const dmlKeyWord = new Set([ 'select', 'insert', 'update', 'delete', 'with', 'create', 'alter', 'drop', ]); // we ignore these keywords and keep looking const continueTableSearchKeyWord = new Set([ 'if', 'not', 'exists', 'formatted', 'repair', 'partitions', 'extended', ]); // We find table if it is these statements, but they have to be at the front // since desc can also mean descending const initialStatementTableKeyWord = new Set([ 'describe', 'desc', 'show', 'msck', ]); // We find table if it is these statements const initialStatementKeyWord = new Set([ ...dmlKeyWord, ...initialStatementTableKeyWord, 'use', ]); // There are 3 context: none, table, column const contextSensitiveKeyWord = { select: 'column', from: 'table', where: 'column', by: 'column', // insert, table: 'table', update: 'table', join: 'table', set: 'column', desc: 'table', describe: 'table', // delete: 'table', limit: 'none', }; type TokenType = | 'NUMBER' | 'STRING' | 'COMMENT' | 'OPERATOR' | 'PUNCTUATION' | 'BRACKET' | 'SEMI' | 'COMMA' | 'TEMPLATED_TAG' | 'TEMPLATED_BLOCK' | 'URL' | 'VARIABLE' | 'WORD' | 'KEYWORD' | 'BOOL' | 'TYPE' | 'UNKNOWN'; function getTokenTypeMatcher( language: string ): Array<{ name: TokenType; regex: RegExp[] }> { const languageSetting = getLanguageSetting(language); return [ { name: 'NUMBER', regex: [ /^0x[0-9a-f]+/, // HEX /^x'[0-9a-f]+'/, // HEX /^b'[01]+'/, // BINARY /^0b[01]+/, // BINARY /^[0-9]+(\.[0-9]+)?([e][-+]?[0-9])?/, // DECIMAL /^{( )*(d|t|ts)( )*'[^']*'( )*}/, // DATE /^{( )*(d|t|ts)( )*"[^"]*"( )*}/, // DATE /^[0-9]+/, ], }, { name: 'STRING', regex: [/^"(\\.|[^"])*"/, /^'(\\.|[^'])*'/, /^['"]/], }, { name: 'COMMENT', regex: [/^--.*/, /^\/\*/], }, { name: 'OPERATOR', regex: [languageSetting.operatorChars], }, { name: 'PUNCTUATION', regex: [languageSetting.punctuationChars], }, { name: 'BRACKET', regex: [/^[\(\)\[\]]/], }, { name: 'SEMI', regex: [/^;/], }, { name: 'COMMA', regex: [/^,/], }, { name: 'TEMPLATED_TAG', regex: languageSetting.placeholderVariable ? [/^{{.*?}}/, languageSetting.placeholderVariable] : [/^{{.*?}}/], }, { name: 'TEMPLATED_BLOCK', regex: [/^{%.*?%}/, /^{#.*?#}/, /^#.*?/], }, { name: 'URL', regex: [/^[a-z0-9]+:\/\/[A-Za-z0-9_.\-~/]+/], }, { name: 'VARIABLE', regex: [/^(\w+|`.*`)(?:\.(\w+|`.*`)?)+/], }, { name: 'WORD', regex: [/^\w+/], }, ]; } export interface IRange { from: CodeMirror.Position; to: CodeMirror.Position; } export interface ILinterWarning extends IRange { message: string; severity: 'error' | 'warning'; } export interface ILineage { references: Record<number, TableToken[]>; aliases: Record<number, Record<string, TableToken>>; } export interface ICodeAnalysis { lineage: ILineage; editorLines?: Line[]; contextFreeLinterWarnings?: ILinterWarning[]; } class StringStream { public text: string; public pos: number; public constructor(s: string) { this.text = s; this.pos = 0; } public next() { if (!this.eol()) { return this.text.charAt(this.pos++); } } public peek() { return this.text.charAt(this.pos); } public eol() { return this.pos >= this.text.length; } public match(pattern, consume = false) { const match = this.text.slice(this.pos).match(pattern); if (match && consume) { this.pos += match.index + match[0].length; } return match; } public eatSpace() { const start = this.pos; while (/\s/.test(this.text.charAt(this.pos))) { this.pos++; } return this.pos - start; } public goToEnd() { this.pos = this.text.length; } } export interface IToken { type: TokenType; text: string; line: number; start: number; end: number; bracketIndex?: number; } export class TableToken { public schema: string; public name: string; public line: number; public start: number; public end: number; public constructor(schema: string, table: string, token: IToken) { this.schema = schema; this.name = table; this.line = token.line; this.start = token.start; this.end = token.end; } } class Line { public statements: Array<[number, number]>; public contexts: Array<[number, string]>; public constructor(initialStatement: number, initialContext: string) { this.statements = [[0, initialStatement]]; this.contexts = [[0, initialContext]]; } } function sanitizeTable(tableToken: IToken, defaultSchema: string) { const stream = new StringStream(tableToken.text); const parts = []; while (!stream.eol()) { const match = stream.match(/^([_\w\d]+|`.*`)\.?/, true); if (match[1]) { let part = match[1]; if (part.charAt(0) === '`') { // remove first and last char part = part.slice(1, -1); } parts.push(part); } } let schema: string = null; let table: string = null; let success = true; if (parts.length === 1) { schema = defaultSchema; table = parts[0]; } else if (parts.length === 2) { schema = parts[0]; table = parts[1]; } else { console.error('Erroneous Input'); console.error(tableToken); success = false; } return { schema: (schema || '').toLowerCase(), table: (table || '').toLowerCase(), success, }; } function categorizeWord(token: IToken, language: string) { const languageSetting = getLanguageSetting(language); const s = token.text.toLowerCase(); if (languageSetting.keywords.has(s)) { token.type = 'KEYWORD'; token.text = s; } else if (languageSetting.bool.has(s)) { token.type = 'BOOL'; } else if (languageSetting.type.has(s)) { token.type = 'TYPE'; } else { token.type = 'VARIABLE'; } } function makeTokenizer(language: string, includeUnknown: boolean) { const tokenTypes = getTokenTypeMatcher(language); const tokenizeString = ( token: IToken, stream: StringStream, tokens: IToken[] ) => { let previousEscape = false; const quote = token.text.charAt(0); const start = stream.pos; while (!stream.eol()) { const ch = stream.next(); if (!previousEscape) { if (ch === quote) { // End const end = stream.pos; token.text = token.text + stream.text.slice(start, end); token.end = end; tokens.push(token); return tokenizeBase; } else if (ch === '\\') { previousEscape = true; } } else { previousEscape = false; } } token.text = token.text + stream.text.slice(start) + '\n'; return tokenizeString.bind(null, token); }; const tokenizeComment = ( token: IToken, stream: StringStream, tokens: IToken[] ) => { const start = stream.pos; if (!stream.eol()) { const match = stream.match(/\*\//, true); if (match) { const end = stream.pos; token.text = token.text + stream.text.slice(start, end); token.end = end; tokens.push(token); return tokenizeBase; } } token.text = token.text + stream.text.slice(start) + '\n'; stream.goToEnd(); return tokenizeComment.bind(null, token); }; const tokenizeBase = ( stream: StringStream, tokens: IToken[], lineNum: number ) => { stream.eatSpace(); let token: IToken = null; const tokenFound = tokenTypes.some( ({ name: tokenType, regex: tokenRegexs }) => tokenRegexs.some((tokenRegex) => { const match = stream.match(tokenRegex, true); if (match) { const end = stream.pos; const start = stream.pos - match[0].length; token = { type: tokenType, text: match[0], line: lineNum, start, end, }; return true; } }) ); if (tokenFound) { if (token.type === 'WORD') { categorizeWord(token, language); } else if (token.type === 'STRING' && token.text.length === 1) { // Multi-line string! // Change the mode to tokenizeString instead! return tokenizeString.bind(null, token); } else if (token.type === 'COMMENT' && token.text === '/*') { return tokenizeComment.bind(null, token); } tokens.push(token); } else { const unknownToken = stream.next(); if (includeUnknown && unknownToken) { tokens.push({ type: 'UNKNOWN', text: unknownToken, line: lineNum, start: stream.pos - 1, end: stream.pos, }); } } return tokenizeBase; }; return tokenizeBase; } export function tokenize( code: string, options: { language?: string; includeUnknown?: boolean } = {} ) { const lines = code.split('\n'); const tokens: IToken[] = []; let tokenizer = makeTokenizer( options.language ?? 'presto', !!options.includeUnknown ); lines.forEach((line, lineNum) => { const stream = new StringStream(line); while (!stream.eol()) { tokenizer = tokenizer(stream, tokens, lineNum); } }); return tokens; } export function simpleParse(tokens: IToken[]) { const statements: IToken[][] = []; let bracketStack = []; let statement: IToken[] = []; tokens.forEach((token) => { if (keepTokenType.has(token.type)) { if (token.type === 'BRACKET') { statement.push(token); if (token.text === '(' || token.text === '[') { bracketStack.push(token); } else if (bracketStack.length > 0) { // ) or ] const correspondingToken = bracketStack.pop(); correspondingToken.bracketIndex = statement.length - 1; } } else if (token.type === 'SEMI') { bracketStack.forEach((bracketToken) => { bracketToken.bracketIndex = statement.length - 1; }); statements.push(statement); statement = []; bracketStack = []; } else { statement.push(token); } } }); if (statement.length > 0) { bracketStack.forEach((token) => { token.bracketIndex = statement.length - 1; }); statements.push(statement); } return statements; } export function getQueryLinePosition(query: string): number[] { // Return start char position of every line // the return array is always 1 + number of lines in query return query .split('\n') .map((line) => line.length) .reduce( (arr, lineLength) => { arr.push(lineLength + 1 + arr[arr.length - 1]); return arr; }, [0] ); } // selectedRange is either null or { from: {line:num, ch:num}, to: {line:num, ch:num}} export function getStatementRanges( query: string, selectedRange: IRange = null, language?: string ) { language = language || 'hive'; // Calculate char position of beginning of each line let queryStartPos = 0; let queryEndPos = query.length; if (selectedRange) { const queryLineLength = getQueryLinePosition(query); queryStartPos = queryLineLength[selectedRange.from.line] + selectedRange.from.ch; queryEndPos = queryLineLength[selectedRange.to.line] + selectedRange.to.ch; } const selectedQuery = query.substring(queryStartPos, queryEndPos); const lineLength = getQueryLinePosition(selectedQuery); const tokens = tokenize(selectedQuery, { language }); // a list of statement tokens const tokenStatements: IToken[][] = []; // a list of tokens let tokenStatement: IToken[] = []; tokens.forEach((token) => { if (token.type === 'SEMI') { tokenStatements.push(tokenStatement); tokenStatement = []; } else if (token.type === 'COMMENT') { // ignore comment tokens } else { tokenStatement.push(token); } }); if (tokenStatement.length > 0) { tokenStatements.push(tokenStatement); } // Now crop ranges for each statement const statementRanges = tokenStatements .filter((statement) => statement.length > 0) // Filter out empty statements .map((statement) => { const firstToken = statement[0]; const lastToken = statement[statement.length - 1]; const firstCharPosition = lineLength[firstToken.line] + firstToken.start; const lastCharPosition = lineLength[lastToken.line] + lastToken.end; return [ firstCharPosition + queryStartPos, lastCharPosition + queryStartPos, ]; }); return statementRanges; } export function getSelectedQuery(query: string, selectedRange: IRange = null) { const statementRanges = selectedRange ? getStatementRanges(query, selectedRange) : []; const queryRange = statementRanges.length > 0 ? [ statementRanges[0][0], statementRanges[statementRanges.length - 1][1], ] : null; const selectedQuery = queryRange ? query.slice(queryRange[0], queryRange[1]) : query; return selectedQuery; } export function getQueryAsExplain(query: string, language?: string) { const statementRanges = getStatementRanges(query, null, language); const statements = statementRanges.map( (range) => query.slice(range[0], range[1]) + ';' ); return statements.map((statement) => 'EXPLAIN ' + statement).join('\n'); } function isKeywordToken(token: IToken) { return token.type === 'KEYWORD'; } function isKeywordTokenWithText(token: IToken, text: string) { return isKeywordToken(token) && token.text === text; } export function findWithStatementPlaceholder(statement: IToken[]) { const placeholders: string[] = []; const firstToken = statement[0]; if (firstToken && isKeywordTokenWithText(firstToken, 'with')) { let tokenIndex = 1; while (tokenIndex < statement.length) { const token = statement[tokenIndex++]; if (token.type === 'VARIABLE') { placeholders.push(token.text); } else if (token.type === 'BRACKET' && token.bracketIndex) { tokenIndex = token.bracketIndex + 1; } else if (isKeywordToken(token) && dmlKeyWord.has(token.text)) { break; } } } return placeholders; } export function findTableReferenceAndAlias(statements: IToken[][]) { let defaultSchema = 'default'; const references: Record<number, TableToken[]> = {}; const aliases: Record<number, Record<string, TableToken>> = {}; statements.forEach((statement, statementNum) => { if (statement.length === 0) { return; } let tokenCounter = 0; let firstToken = statement[tokenCounter++]; // If the query starts with EXPLAIN // ignore that word and skip again to the rest if ( isKeywordTokenWithText(firstToken, 'explain') && statement.length > 1 ) { firstToken = statement[tokenCounter++]; } if ( !isKeywordToken(firstToken) || !initialStatementKeyWord.has(firstToken.text) ) { return; } if (firstToken.text === 'use') { const secondToken = statement[tokenCounter++]; if (secondToken && secondToken.type === 'VARIABLE') { defaultSchema = secondToken.text; } } else { const placeholders: Set<string> = new Set( findWithStatementPlaceholder(statement) ); const tables: IToken[] = []; const tableAlias: Record<string, IToken> = {}; // Whether or not the context inside a () is a subquery // start with True because the checks above const backetQueryContextStack: boolean[] = [true]; let tableSearchMode = false; let lastTableIndex = -1; statement.forEach((token, tokenIndex) => { const nextToken = statement[tokenIndex + 1]; const prevToken = statement[tokenIndex - 1]; if (token.type === 'BRACKET') { if (token.text === '(' || token.text === '[') { if (!nextToken) { // no need for more analysis if the loop is going to end return; } backetQueryContextStack.push( isKeywordToken(nextToken) && initialStatementKeyWord.has(nextToken.text) ); tableSearchMode = false; } else { // ) or ] backetQueryContextStack.pop(); } return; } // If the content inside the bracket is not going to be // a query, then we skip all until we meet the enclosing bracket if ( !backetQueryContextStack[backetQueryContextStack.length - 1] ) { return; } if (isKeywordToken(token)) { if (tokenIndex === 0) { tableSearchMode = initialStatementTableKeyWord.has( token.text ); } else if ( tableKeyWord.has(token.text) && !( isKeywordToken(prevToken) && previousTokenKeyWordDenylistMap[token.text]?.has( prevToken?.text ) ) ) { tableSearchMode = true; } else if (!continueTableSearchKeyWord.has(token.text)) { tableSearchMode = false; } } else if (token.type === 'VARIABLE') { if (tableSearchMode) { const isActualTable = !placeholders.has(token.text); if (isActualTable) { tables.push(token); lastTableIndex = tokenIndex; } tableSearchMode = false; } else if (tokenIndex > 0) { // check alias // example: select * from table_a as aa; const hasAsAlias = isKeywordTokenWithText(prevToken, 'as') && lastTableIndex + 2 === tokenIndex; // example: select * from table_a aa; const hasSpaceAlias = lastTableIndex + 1 === tokenIndex; if (hasAsAlias || hasSpaceAlias) { const tableToken = tables[tables.length - 1]; const isActualTable = !placeholders.has(token.text); if (isActualTable) { tableAlias[token.text] = tableToken; } } } } }); // Post Process Tables to find the correct name const processedTables = []; tables.forEach((tableToken) => { const { schema, table, success } = sanitizeTable( tableToken, defaultSchema ); if (success) { processedTables.push( new TableToken(schema, table, tableToken) ); } }); references[statementNum] = processedTables; // Post Process Alias to link them to table const processedAlias: Record<string, TableToken> = {}; Object.keys(tableAlias).forEach((alias) => { const tableToken = tableAlias[alias]; const { schema, table, success } = sanitizeTable( tableToken, defaultSchema ); if (success) { processedAlias[alias] = new TableToken( schema, table, tableToken ); } }); aliases[statementNum] = processedAlias; } }); return { references, aliases, }; } export function getEditorLines(statements: IToken[][]) { const lines: Line[] = []; let lastLine = 0; statements.forEach((statement, statementNum) => { let context = 'none'; const contextStack: string[] = []; statement.forEach((token) => { const tokenLine = token.line; let tokenPosition = token.end; for (let lineNum = lastLine; lineNum <= tokenLine; lineNum++) { if (lines[lineNum] == null) { lines[lineNum] = new Line(statementNum, context); } } let needToUpdateLine = false; if ( isKeywordToken(token) && token.text in contextSensitiveKeyWord ) { context = contextSensitiveKeyWord[token.text]; tokenPosition += 1; needToUpdateLine = true; } else if (token.type === 'SEMI') { context = 'none'; needToUpdateLine = true; } else if (token.type === 'BRACKET') { if (token.text === '(' || token.text === '[') { contextStack.push(context); // However context don't change because there may be the case of // select count(* <---) needToUpdateLine = true; } else if (contextStack.length > 0) { // ) or ] context = contextStack.pop(); needToUpdateLine = true; } } else if (context === 'table' && token.type === 'VARIABLE') { context = 'none'; tokenPosition += 1; needToUpdateLine = true; } if (needToUpdateLine) { const { statements: currentStatements, contexts: currentContexts, } = lines[tokenLine]; const lastStatement = currentStatements[currentStatements.length - 1]; const lastContext = currentContexts[currentContexts.length - 1]; if (lastStatement[0] === tokenPosition) { lastStatement[1] = statementNum; } else if (lastStatement[1] !== statementNum) { currentStatements.push([tokenPosition, statementNum]); } if (lastContext[0] === tokenPosition) { lastContext[1] = context; } else if (lastContext[1] !== context) { currentContexts.push([tokenPosition, context]); } } lastLine = tokenLine; }); }); return lines; }
the_stack
import type { BaseProvider } from '@ethersproject/providers'; import type { Signer } from '@ethersproject/abstract-signer'; import invariant from 'tiny-invariant'; import warning from 'tiny-warning'; import { buildOrder as _buildOrder, signOrder as _signOrder, fillSignedOrder as _fillSignedOrder, approveAsset as _approveAsset, verifyOrderSignature as _verifyOrderSignature, getApprovalStatus as _getApprovalStatus, cancelOrder as _cancelOrder, cancelOrders as _cancelOrders, estimateGasForFillOrder as _estimateGasForFillOrder, estimateGasForApproval as _estimateGasForApproval, cancelOrdersUpToNow as _cancelOrdersUpToNow, getOrderInfo as _getOrderInfo, getAssetsFromOrder as _getAssetsFromOrder, hashOrder as _hashOrder, } from './pure'; import { getEipDomain, normalizeOrder as _normalizeOrder, } from '../../utils/v3/order'; import type { ApprovalOverrides, BuildOrderAdditionalConfig, FillOrderOverrides, INftSwapV3, } from './INftSwapV3'; import { SupportedChainIdsV3, EIP712_TYPES, Order, OrderInfoV3, OrderStatusV3, OrderStatusCodeLookup, SignedOrder, SupportedTokenTypes, SwappableAsset, AddressesForChainV3, BigNumberish, ERC20AssetDataSerialized, AssetProxyId, SigningOptionsV3, } from './types'; import { ExchangeContract, ExchangeContract__factory, Forwarder__factory, } from '../../contracts'; import { convertAssetsToInternalFormat, convertAssetToInternalFormat, decodeAssetData, } from '../../utils/v3/asset-data'; import { getProxyAddressForErcType, getForwarderAddress, getWrappedNativeToken, } from '../../utils/v3/default-addresses'; import { DEFAUTLT_GAS_BUFFER_MULTIPLES } from '../../utils/v3/gas-buffer'; import { sleep } from '../../utils/sleep'; import addresses from './addresses.json'; import { PayableOverrides, TransactionOverrides } from '../common/types'; export interface NftSwapConfig { exchangeContractAddress?: string; erc20ProxyContractAddress?: string; erc721ProxyContractAddress?: string; erc1155ProxyContractAddress?: string; forwarderContractAddress?: string; wrappedNativeTokenContractAddress?: string; gasBufferMultiples?: { [chainId: number]: number }; } /** * NftSwap Convenience class to swap between ERC20, ERC721, and ERC1155. Primary entrypoint for swapping. */ class NftSwapV3 implements INftSwapV3 { public provider: BaseProvider; public signer: Signer | undefined; public chainId: number; public exchangeContract: ExchangeContract; public exchangeContractAddress: string; public erc20ProxyContractAddress: string; public erc721ProxyContractAddress: string; public erc1155ProxyContractAddress: string; public wrappedNativeTokenContractAddress: string | null; public forwarderContractAddress: string | null; public gasBufferMultiples: { [chainId: number]: number } | null; constructor( provider: BaseProvider, signer: Signer, chainId?: number, additionalConfig?: NftSwapConfig ) { this.provider = provider; this.signer = signer; this.chainId = chainId ?? (this.provider._network.chainId as SupportedChainIdsV3); const chainDefaultContractAddresses: AddressesForChainV3 | undefined = addresses[this.chainId as SupportedChainIdsV3]; const zeroExExchangeContractAddress = additionalConfig?.exchangeContractAddress ?? chainDefaultContractAddresses?.exchange; warning( chainDefaultContractAddresses, `Default contract addresses missing for chain ${this.chainId}. Supply ExchangeContract and Asset Proxy contracts manually via additionalConfig argument` ); this.exchangeContractAddress = zeroExExchangeContractAddress; this.erc20ProxyContractAddress = additionalConfig?.erc20ProxyContractAddress ?? getProxyAddressForErcType(SupportedTokenTypes.ERC20, this.chainId); this.erc721ProxyContractAddress = additionalConfig?.erc721ProxyContractAddress ?? getProxyAddressForErcType(SupportedTokenTypes.ERC721, this.chainId); this.erc1155ProxyContractAddress = additionalConfig?.erc1155ProxyContractAddress ?? getProxyAddressForErcType(SupportedTokenTypes.ERC1155, this.chainId); this.forwarderContractAddress = additionalConfig?.forwarderContractAddress ?? getForwarderAddress(this.chainId) ?? null; this.wrappedNativeTokenContractAddress = additionalConfig?.wrappedNativeTokenContractAddress ?? getWrappedNativeToken(this.chainId) ?? null; invariant( this.exchangeContractAddress, '0x V3 Exchange Contract Address not set. Exchange Contract is required to load NftSwap' ); warning( this.erc20ProxyContractAddress, 'ERC20Proxy Contract Address not set, ERC20 swaps will not work' ); warning( this.erc721ProxyContractAddress, 'ERC721Proxy Contract Address not set, ERC721 swaps will not work' ); warning( this.erc1155ProxyContractAddress, 'ERC20Proxy Contract Address not set, ERC1155 swaps will not work' ); warning( this.forwarderContractAddress, 'Forwarder Contract Address not set, native token fills will not work' ); warning( this.wrappedNativeTokenContractAddress, 'WETH Contract Address not set, SDK cannot automatically check if order can be filled with native token' ); warning(this.signer, 'No Signer provided; Read-only mode only.'); // Initialize Exchange contract so we can interact with it easily. this.exchangeContract = ExchangeContract__factory.connect( zeroExExchangeContractAddress, signer ?? provider ); this.gasBufferMultiples = additionalConfig?.gasBufferMultiples ?? DEFAUTLT_GAS_BUFFER_MULTIPLES; } public cancelOrder = async (order: Order) => { return _cancelOrder(this.exchangeContract, order); }; /** * * @param order : 0x Order; * @param timeoutInMs : Timeout in millisecond to give up listening for order fill * @param throwIfStatusOtherThanFillableOrFilled : Option to throw if status changes from fillable to anything other than 'filled' (e.g 'cancelled') * @returns OrderInfo if status change in order, or null if timed out */ public waitUntilOrderFilledOrCancelled = async ( order: Order, timeoutInMs: number = 60 * 1000, pollOrderStatusFrequencyInMs: number = 10_000, throwIfStatusOtherThanFillableOrFilled: boolean = false ): Promise<OrderInfoV3 | null> => { let settled = false; const timeoutPromise = sleep(timeoutInMs).then((_) => null); const orderStatusRefreshPromiseFn = async (): Promise<OrderInfoV3 | null> => { while (!settled) { const orderInfo = await this.getOrderInfo(order); if (orderInfo.orderStatus === OrderStatusV3.Fillable) { await sleep(pollOrderStatusFrequencyInMs); continue; } else if (orderInfo.orderStatus === OrderStatusV3.FullyFilled) { return orderInfo; } else { // expired, bad order, etc if (throwIfStatusOtherThanFillableOrFilled) { throw new Error( OrderStatusCodeLookup[orderInfo.orderStatus] ?? orderInfo.orderStatus ?? 'Unknown status' ); } return orderInfo; } } return null; }; const fillEventListenerFn = async () => { // TODO(johnrjj) await sleep(timeoutInMs * 2); return null; }; const orderStatusRefreshPromiseLoop: Promise<OrderInfoV3 | null> = orderStatusRefreshPromiseFn(); const fillEventPromise: Promise<OrderInfoV3 | null> = fillEventListenerFn(); const orderInfo = await Promise.any([ timeoutPromise, orderStatusRefreshPromiseLoop, fillEventPromise, ]); settled = true; return orderInfo; }; public getOrderInfo = async (order: Order): Promise<OrderInfoV3> => { return _getOrderInfo(this.exchangeContract, order); }; public getOrderStatus = async (order: Order): Promise<OrderStatusV3> => { const orderInfo = await this.getOrderInfo(order); return orderInfo.orderStatus; }; public awaitTransactionHash = async (txHash: string) => { return this.provider.waitForTransaction(txHash); }; public signOrder = async ( order: Order, addressOfWalletSigningOrder: string, signerOverride?: Signer, signingOptions?: Partial<SigningOptionsV3> ) => { const signerToUser = signerOverride ?? this.signer; if (!signerToUser) { throw new Error('signOrder:Signer undefined'); } return _signOrder( order, addressOfWalletSigningOrder, signerToUser, this.provider, this.chainId, this.exchangeContract.address, signingOptions ); }; public buildOrder = ( makerAssets: SwappableAsset[], takerAssets: SwappableAsset[], makerAddress: string, userConfig?: Partial<BuildOrderAdditionalConfig> ) => { const defaultConfig = { chainId: this.chainId, makerAddress: makerAddress }; const config = { ...defaultConfig, ...userConfig }; return _buildOrder( convertAssetsToInternalFormat(makerAssets), convertAssetsToInternalFormat(takerAssets), config ); }; public loadApprovalStatus = async ( asset: SwappableAsset, walletAddress: string ) => { // TODO(johnrjj) - Fix this... const exchangeProxyAddressForAsset = getProxyAddressForErcType( asset.type as SupportedTokenTypes, this.chainId ); const assetInternalFmt = convertAssetToInternalFormat(asset); return _getApprovalStatus( walletAddress, exchangeProxyAddressForAsset, assetInternalFmt, this.provider ); }; /** * @param asset Asset in the SDK format * @returns */ public async approveTokenOrNftByAsset( asset: SwappableAsset, _walletAddress: string, // Remove in next release approvalTransactionOverrides?: Partial<TransactionOverrides>, otherOverrides?: Partial<ApprovalOverrides> ) { // TODO(johnrjj) - Look up via class fields instead... const exchangeProxyAddressForAsset = otherOverrides?.exchangeProxyContractAddressForAsset ?? getProxyAddressForErcType( asset.type as SupportedTokenTypes, this.chainId ); const signerToUse = otherOverrides?.signer ?? this.signer; if (!signerToUse) { throw new Error('approveTokenOrNftByAsset:Signer null'); } if (otherOverrides?.gasAmountBufferMultiple === null) { } let gasBufferMultiple: number | undefined = undefined; if (otherOverrides?.gasAmountBufferMultiple === null) { // keep gasBufferMultiple undefined, b/c user specifically specified null. gasBufferMultiple = undefined; } else { gasBufferMultiple = otherOverrides?.gasAmountBufferMultiple ?? this.getGasMultipleForChainId(this.chainId); } let maybeCustomGasLimit: BigNumberish | undefined; if (gasBufferMultiple) { const estimatedGasAmount = await _estimateGasForApproval( exchangeProxyAddressForAsset, convertAssetToInternalFormat(asset), signerToUse, approvalTransactionOverrides ?? {}, otherOverrides?.approve ?? true ); maybeCustomGasLimit = Math.floor( estimatedGasAmount.toNumber() * gasBufferMultiple ); } return _approveAsset( exchangeProxyAddressForAsset, convertAssetToInternalFormat(asset), signerToUse, { gasLimit: maybeCustomGasLimit, ...approvalTransactionOverrides, }, otherOverrides?.approve ?? true ); } public getOrderHash = (order: Order) => { return _hashOrder(order, this.chainId, this.exchangeContract.address); }; public getTypedData = ( chainId: number, exchangeContractAddress: string, order: Order ) => { const domain = getEipDomain(chainId, exchangeContractAddress); const types = EIP712_TYPES; const value = order; return { domain, types, value, }; }; /** * Decodes readable order data (maker and taker assets) from the Order's encoded asset data * @param order : 0x Order (or Signed Order); * @returns Maker and taker assets for the order */ public getAssetsFromOrder = (order: Order) => { return _getAssetsFromOrder(order); }; public checkIfOrderCanBeFilledWithNativeToken = ( order: Order, wrappedNativeTokenContractAddress: string | undefined = this .wrappedNativeTokenContractAddress ?? undefined ): boolean => { warning( this.wrappedNativeTokenContractAddress, 'Wrapped native token contract address not set. Cannot determine if order can be filled with native token' ); const decodedAssetData = decodeAssetData(order.takerAssetData); // Can only fill with native token when taker asset is ERC20. (Multiasset is not supported) if ( decodedAssetData.assetProxyId.toLowerCase() !== AssetProxyId.ERC20.toLowerCase() ) { return false; } // If we get this far, we have a single asset (non-multiasset) ERC20 for the taker token. // Let's check if it is the wrapped native contract address for this chain (e.g. WETH on mainnet or rinkeby, WMATIC on polygon) const erc20TokenAddress = (decodedAssetData as ERC20AssetDataSerialized) .tokenAddress; invariant( erc20TokenAddress, 'ERC20 token address missing from detected ERC20 asset data' ); return ( erc20TokenAddress.toLowerCase() === wrappedNativeTokenContractAddress?.toLowerCase() ); }; public fillSignedOrder = async ( signedOrder: SignedOrder, fillOverrides?: Partial<FillOrderOverrides>, transactionOverrides: Partial<PayableOverrides> = {} ) => { const exchangeContract = fillOverrides?.exchangeContract ?? this.exchangeContract; let gasBufferMultiple: number | undefined = undefined; if (fillOverrides?.gasAmountBufferMultiple === null) { // keep gasBufferMultiple undefined, b/c user specifically specified null. gasBufferMultiple = undefined; } else { gasBufferMultiple = fillOverrides?.gasAmountBufferMultiple ?? this.getGasMultipleForChainId(this.chainId); } let maybeCustomGasLimit: BigNumberish | undefined; if (gasBufferMultiple) { const estimatedGasAmount = await _estimateGasForFillOrder( signedOrder, exchangeContract ); // NOTE(johnrjj) - Underflow issues, so we convert to number. Gas amounts shouldn't overflow. maybeCustomGasLimit = Math.floor( estimatedGasAmount.toNumber() * gasBufferMultiple ); } const allTxOverrides: Partial<PayableOverrides> = { gasLimit: maybeCustomGasLimit, ...transactionOverrides, }; if (fillOverrides?.fillOrderWithNativeTokenInsteadOfWrappedToken) { const eligibleForNativeTokenFill = this.checkIfOrderCanBeFilledWithNativeToken(signedOrder); warning( eligibleForNativeTokenFill, `Order ineligible for native token fill, fill will fail.` ); invariant( this.forwarderContractAddress, 'Forwarder contract address null, cannot fill order in native token' ); const forwarderContract = Forwarder__factory.connect( this.forwarderContractAddress, this.signer ?? this.provider ); const amountOfEthToFillWith = signedOrder.takerAssetAmount; return forwarderContract.marketBuyOrdersWithEth( [signedOrder], signedOrder.makerAssetAmount, [signedOrder.signature], [], [], { value: amountOfEthToFillWith, ...allTxOverrides, } ); } return _fillSignedOrder(signedOrder, exchangeContract, allTxOverrides); }; private getGasMultipleForChainId = (chainId: number): number | undefined => { if (this.gasBufferMultiples) { return this.gasBufferMultiples[this.chainId]; } return undefined; }; public normalizeOrder = (order: Order): Order => { const normalizedOrder = _normalizeOrder(order); return normalizedOrder as Order; }; public normalizeSignedOrder = (order: SignedOrder): SignedOrder => { const normalizedOrder = _normalizeOrder(order); return normalizedOrder as SignedOrder; }; public verifyOrderSignature = ( order: Order, signature: string, chainId: number, exchangeContractAddress: string ) => { return _verifyOrderSignature( order, signature, chainId, exchangeContractAddress ); }; } export { NftSwapV3 };
the_stack
import * as d3 from 'd3'; export interface Node { // ID of the g element in SVG graph containing all the link elements. gID: string; DONUTS_MARGIN: number; DONUT_WIDTH: number; NODE_MAX_CHARS: number; NODE_TITLE_MAX_CHARS: number; // Number of nodes displayed per page during value selection. PAGE_RESIZE: number; // Count box default size CountBox: { x: number, y: number, w: number, h: number }; // Store choose node state to avoid multiple node expand at the same time chooseWaiting: boolean; getDonutInnerRadius: () => number; getDonutOuterRadius: () => number; pie: d3.Pie<ThisType<any>, number>; /** * Defines the list of possible nodes. * ROOT: Node used as graph root. It is the target of the query. Only one node of this type should be available in graph. * CHOOSE: Nodes defining a generic node label. From these node is is possible to select a value or explore relations. * VALUE: Unique node containing a value constraint. Usually replace CHOOSE nodes once a value as been selected. * GROUP: Empty node used to group relations. No value can be selected but relations can be explored. These nodes doesn't have count. */ nodeTypes: Readonly<{ ROOT: 0 CHOOSE: 0 VALUE: 0 GROUP: 0 }>; // Used to generate unique internal labels used for example as identifier in Cypher query. internalLabels: { [label: string]: number }; /** * Create a normalized identifier from a node label. * Multiple calls with the same node label will generate different unique identifier. * * @param nodeLabel * @returns normalized identifier from a node label */ generateInternalLabel: (nodeLabel: string) => string; /** * Update Nodes SVG elements using D3.js update mechanisms. */ updateNodes: () => void; /** * Update node data with changes done in dataModel.nodes model. */ updateData: () => void; /** * Update nodes and result counts by executing a query for every nodes with the new graph structure. */ updateCount: () => void; /** * Update values for nodes having preloadData property */ updateAutoLoadValues: () => void; /** * Remove old elements. * Should be called after updateData. */ removeElements: (exitingData: Node[]) => void; /** * Add all new elements. * Only the skeleton of new nodes are added custom data will be added during the element update phase. * Should be called after updateData and before updateElements. */ addNewElements: (enteringData: SVGGElement) => void; /** * Create the background for a new node element. * The background of a node is defined by a circle not visible by default (fill-opacity set to 0) but can be used to highlight a node with animation on this attribute. * This circle also define the node zone that can receive events like mouse clicks. * * @param gNewNodeElements */ addBackgroundElements: (gNewNodeElements: SVGGElement) => void; /** * Create the node main elements. * * @param gNewNodeElements */ addMiddlegroundElements: (gNewNodeElements: SVGGElement) => void; /** * Create the node foreground elements. * It contains node additional elements, count or tools like navigation arrows. * * @param gNewNodeElements */ addForegroundElements: (gNewNodeElements: SVGGElement) => void; /** * Updates all elements. */ updateElements: () => void; updateBackgroundElements: () => void; /** * Update the middle layer of nodes. * TODO refactor node generation to allow future extensions (for example add plugin with new node types...) */ updateMiddlegroundElements: () => void; updateMiddlegroundElementsTooltip: (middleG: SVGGElement) => void; updateMiddlegroundElementsText: (gMiddlegroundTextNodes: SVGGElement) => void; updateMiddlegroundElementsImage: (gMiddlegroundImageNodes: SVGGElement) => void; updateMiddlegroundElementsSymbol: (gMiddlegroundSymbolNodes: SVGGElement) => void; updateMiddlegroundElementsSVG: (gMiddlegroundSVGNodes: SVGGElement) => void; updateMiddlegroundElementsDisplayedText: (middleG: Node[]) => void; /** * Updates the foreground elements */ updateForegroundElements: () => void; segmentClick: (d: any) => void; /** * Handle the mouse over event on nodes. */ mouseOverNode: () => void; /** * Handle mouse out event on nodes. */ mouseOutNode: () => void; /** * Handle the click event on nodes. */ nodeClick: () => void; /** * Remove all the value node directly linked to clicked node. * * @param clickedNode */ collapseNode: (clickedNode: Node) => void; /** * Collapse all nodes with value expanded. * */ collapseAllNode: () => void; /** * Function called on a value node click. * In this case the value is added in the parent node and all the value nodes are collapsed. * * @param clickedNode */ valueNodeClick: (clickedNode: Node) => void; /** * Function called on choose node click. * In this case a query is executed to get all the possible value * @param clickedNode * TODO optimize with cached data? */ chooseNodeClick: (clickedNode: Node) => void; /** * Add in all expanded choose nodes the value containing the specified value for the given attribute. * And remove it from the nodes data. * * @param attribute * @param value */ addExpandedValue: (attribute: string, value: any) => void; /** * Get all nodes that contains a value. * * @param label If set return only node of this label. * @return Array of nodes containing at least one value. */ getContainingValue: (label: string) => Node[]; /** * Add value in all CHOOSE nodes with specified label. * * @param label nodes where to insert * @param value */ addValueForLabel: (label: string, value: any) => boolean; /** * Add a value in a node with the given id and the value of the first attribute if found in its data. * * @param nodeIds a list of node ids where to add the value. * @param displayAttributeValue the value to find in data and to add if found */ addValue: (nodeIds: string[], displayAttributeValue: any) => boolean; /** * Remove a value from a node. * If the value is not found nothing is done. * * @param n * @param value */ removeValue: (n: Node, value: any) => boolean; removeValues: (n: Node) => boolean; /** * Get the value in the provided nodeId for a specific value id. * * @param nodeId * @param constraintAttributeValue */ getValue: (nodeId: string, constraintAttributeValue: any) => any; /** * Remove in all expanded nodes the value containing the specified value for the given attribute. * And move it back to nodes data. * * @param attribute * @param value */ removeExpandedValue: (attribute: string, value: any) => void; /** * Return all nodes with isAutoLoadValue property set to true. */ getAutoLoadValueNodes: () => Node[]; /** * Add a list of related value if not already found in node. * A value is defined with the following structure * { * id, * rel, * label * } * * @param n * @param values * @param isNegative */ addRelatedValues: (n: Node, values: Node[], isNegative: boolean) => void; /** * Add a list of related value prefixed by a path of nodes. * A value is defined with the following structure * { * id, * rel, * label * } * * @param n * @param relPath * @param values * @param isNegative */ addRelatedBranch: (n: Node, relPath: Node[], values: Node[], isNegative: boolean) => void; /** * A value is defined with the following structure * { * id, * rel, * label * } * * @param n * @param values */ filterExistingValues: (n: Node, values: Node[]) => Node[]; /** * Function called to expand a node containing values. * This function will create the value nodes with the clicked node internal data. * Only nodes corresponding to the current page index will be generated. * * @param clickedNode */ expandNode: (clickedNode: Node) => void; /** * Fetches the list of relationships of a node and store them in the relationships property. * * @param n the node to fetch the relationships. * @param callback * @param directionAngle */ loadRelationshipData: (n: Node, callback: () => void, directionAngle: number) => void; /** * Expands all the relationships available in node. * * @param n * @param callback */ expandRelationships: (n: Node, callback: () => void) => void; /** * Remove a node and its relationships (recursively) from the graph. * * @param n the node to remove. */ removeNode: (n: Node) => boolean; /** * Remove empty branches containing a node. * * @param n the node to remove. * @return true if node have been removed */ removeEmptyBranches: (n: Node) => boolean; /** * Get in the parent nodes the closest one to the root. * * @param n the node to start from. * @return the trunk node or the node in parameters if not found. */ getTrunkNode: (n: Node) => typeof n; /** * Function to add on node event to clear the selection. * Call to this function on a node will remove the selected value and trigger a graph update. */ clearSelection: () => void; }
the_stack
import { ParamDefinition as PD } from '../../mol-util/param-definition'; import { Grid, Volume } from '../../mol-model/volume'; import { VisualContext } from '../visual'; import { Theme, ThemeRegistryContext } from '../../mol-theme/theme'; import { Mesh } from '../../mol-geo/geometry/mesh/mesh'; import { computeMarchingCubesMesh, computeMarchingCubesLines } from '../../mol-geo/util/marching-cubes/algorithm'; import { VolumeVisual, VolumeRepresentation, VolumeRepresentationProvider } from './representation'; import { LocationIterator } from '../../mol-geo/util/location-iterator'; import { NullLocation } from '../../mol-model/location'; import { VisualUpdateState } from '../util'; import { Lines } from '../../mol-geo/geometry/lines/lines'; import { RepresentationContext, RepresentationParamsGetter, Representation } from '../representation'; import { PickingId } from '../../mol-geo/geometry/picking'; import { EmptyLoci, Loci } from '../../mol-model/loci'; import { Interval } from '../../mol-data/int'; import { Tensor, Vec2, Vec3 } from '../../mol-math/linear-algebra'; import { fillSerial } from '../../mol-util/array'; import { createVolumeTexture2d, eachVolumeLoci, getVolumeTexture2dLayout } from './util'; import { TextureMesh } from '../../mol-geo/geometry/texture-mesh/texture-mesh'; import { extractIsosurface } from '../../mol-gl/compute/marching-cubes/isosurface'; import { WebGLContext } from '../../mol-gl/webgl/context'; import { CustomPropertyDescriptor } from '../../mol-model/custom-property'; import { Texture } from '../../mol-gl/webgl/texture'; export const VolumeIsosurfaceParams = { isoValue: Volume.IsoValueParam }; export type VolumeIsosurfaceParams = typeof VolumeIsosurfaceParams export type VolumeIsosurfaceProps = PD.Values<VolumeIsosurfaceParams> function gpuSupport(webgl: WebGLContext) { return webgl.extensions.colorBufferFloat && webgl.extensions.textureFloat && webgl.extensions.drawBuffers; } const Padding = 1; function suitableForGpu(volume: Volume, webgl: WebGLContext) { const gridDim = volume.grid.cells.space.dimensions as Vec3; const { powerOfTwoSize } = getVolumeTexture2dLayout(gridDim, Padding); return powerOfTwoSize <= webgl.maxTextureSize / 2; } export function IsosurfaceVisual(materialId: number, volume: Volume, props: PD.Values<IsosurfaceMeshParams>, webgl?: WebGLContext) { if (props.tryUseGpu && webgl && gpuSupport(webgl) && suitableForGpu(volume, webgl)) { return IsosurfaceTextureMeshVisual(materialId); } return IsosurfaceMeshVisual(materialId); } function getLoci(volume: Volume, props: VolumeIsosurfaceProps) { return Volume.Isosurface.Loci(volume, props.isoValue); } function getIsosurfaceLoci(pickingId: PickingId, volume: Volume, props: VolumeIsosurfaceProps, id: number) { const { objectId, groupId } = pickingId; if (id === objectId) { return Volume.Cell.Loci(volume, Interval.ofSingleton(groupId as Volume.CellIndex)); } return EmptyLoci; } export function eachIsosurface(loci: Loci, volume: Volume, props: VolumeIsosurfaceProps, apply: (interval: Interval) => boolean) { return eachVolumeLoci(loci, volume, props.isoValue, apply); } // export async function createVolumeIsosurfaceMesh(ctx: VisualContext, volume: Volume, theme: Theme, props: VolumeIsosurfaceProps, mesh?: Mesh) { ctx.runtime.update({ message: 'Marching cubes...' }); const ids = fillSerial(new Int32Array(volume.grid.cells.data.length)); const surface = await computeMarchingCubesMesh({ isoLevel: Volume.IsoValue.toAbsolute(props.isoValue, volume.grid.stats).absoluteValue, scalarField: volume.grid.cells, idField: Tensor.create(volume.grid.cells.space, Tensor.Data1(ids)) }, mesh).runAsChild(ctx.runtime); const transform = Grid.getGridToCartesianTransform(volume.grid); Mesh.transform(surface, transform); if (ctx.webgl && !ctx.webgl.isWebGL2) { // 2nd arg means not to split triangles based on group id. Splitting triangles // is too expensive if each cell has its own group id as is the case here. Mesh.uniformTriangleGroup(surface, false); } surface.setBoundingSphere(Volume.getBoundingSphere(volume)); return surface; } export const IsosurfaceMeshParams = { ...Mesh.Params, ...TextureMesh.Params, ...VolumeIsosurfaceParams, quality: { ...Mesh.Params.quality, isEssential: false }, tryUseGpu: PD.Boolean(true), }; export type IsosurfaceMeshParams = typeof IsosurfaceMeshParams export function IsosurfaceMeshVisual(materialId: number): VolumeVisual<IsosurfaceMeshParams> { return VolumeVisual<Mesh, IsosurfaceMeshParams>({ defaultProps: PD.getDefaultValues(IsosurfaceMeshParams), createGeometry: createVolumeIsosurfaceMesh, createLocationIterator: (volume: Volume) => LocationIterator(volume.grid.cells.data.length, 1, 1, () => NullLocation), getLoci: getIsosurfaceLoci, eachLocation: eachIsosurface, setUpdateState: (state: VisualUpdateState, volume: Volume, newProps: PD.Values<IsosurfaceMeshParams>, currentProps: PD.Values<IsosurfaceMeshParams>) => { if (!Volume.IsoValue.areSame(newProps.isoValue, currentProps.isoValue, volume.grid.stats)) state.createGeometry = true; }, geometryUtils: Mesh.Utils, mustRecreate: (volume: Volume, props: PD.Values<IsosurfaceMeshParams>, webgl?: WebGLContext) => { return props.tryUseGpu && !!webgl && suitableForGpu(volume, webgl); } }, materialId); } // namespace VolumeIsosurfaceTexture { const name = 'volume-isosurface-texture'; export const descriptor = CustomPropertyDescriptor({ name }); export function get(volume: Volume, webgl: WebGLContext) { const { resources } = webgl; const transform = Grid.getGridToCartesianTransform(volume.grid); const gridDimension = Vec3.clone(volume.grid.cells.space.dimensions as Vec3); const { width, height, powerOfTwoSize: texDim } = getVolumeTexture2dLayout(gridDimension, Padding); const gridTexDim = Vec3.create(width, height, 0); const gridTexScale = Vec2.create(width / texDim, height / texDim); // console.log({ texDim, width, height, gridDimension }); if (texDim > webgl.maxTextureSize / 2) { throw new Error('volume too large for gpu isosurface extraction'); } if (!volume._propertyData[name]) { volume._propertyData[name] = resources.texture('image-uint8', 'alpha', 'ubyte', 'linear'); const texture = volume._propertyData[name] as Texture; texture.define(texDim, texDim); // load volume into sub-section of texture texture.load(createVolumeTexture2d(volume, 'data', Padding), true); volume.customProperties.add(descriptor); volume.customProperties.assets(descriptor, [{ dispose: () => texture.destroy() }]); } gridDimension[0] += Padding; gridDimension[1] += Padding; return { texture: volume._propertyData[name] as Texture, transform, gridDimension, gridTexDim, gridTexScale }; } } async function createVolumeIsosurfaceTextureMesh(ctx: VisualContext, volume: Volume, theme: Theme, props: VolumeIsosurfaceProps, textureMesh?: TextureMesh) { if (!ctx.webgl) throw new Error('webgl context required to create volume isosurface texture-mesh'); const { max, min } = volume.grid.stats; const diff = max - min; const value = Volume.IsoValue.toAbsolute(props.isoValue, volume.grid.stats).absoluteValue; const isoLevel = ((value - min) / diff); const { texture, gridDimension, gridTexDim, gridTexScale, transform } = VolumeIsosurfaceTexture.get(volume, ctx.webgl); const buffer = textureMesh?.doubleBuffer.get(); const gv = extractIsosurface(ctx.webgl, texture, gridDimension, gridTexDim, gridTexScale, transform, isoLevel, value < 0, false, buffer?.vertex, buffer?.group, buffer?.normal); const surface = TextureMesh.create(gv.vertexCount, 1, gv.vertexTexture, gv.groupTexture, gv.normalTexture, Volume.getBoundingSphere(volume), textureMesh); return surface; } export function IsosurfaceTextureMeshVisual(materialId: number): VolumeVisual<IsosurfaceMeshParams> { return VolumeVisual<TextureMesh, IsosurfaceMeshParams>({ defaultProps: PD.getDefaultValues(IsosurfaceMeshParams), createGeometry: createVolumeIsosurfaceTextureMesh, createLocationIterator: (volume: Volume) => LocationIterator(volume.grid.cells.data.length, 1, 1, () => NullLocation), getLoci: getIsosurfaceLoci, eachLocation: eachIsosurface, setUpdateState: (state: VisualUpdateState, volume: Volume, newProps: PD.Values<IsosurfaceMeshParams>, currentProps: PD.Values<IsosurfaceMeshParams>) => { if (!Volume.IsoValue.areSame(newProps.isoValue, currentProps.isoValue, volume.grid.stats)) state.createGeometry = true; }, geometryUtils: TextureMesh.Utils, mustRecreate: (volume: Volume, props: PD.Values<IsosurfaceMeshParams>, webgl?: WebGLContext) => { return !props.tryUseGpu || !webgl || !suitableForGpu(volume, webgl); }, dispose: (geometry: TextureMesh) => { geometry.vertexTexture.ref.value.destroy(); geometry.groupTexture.ref.value.destroy(); geometry.normalTexture.ref.value.destroy(); geometry.doubleBuffer.destroy(); } }, materialId); } // export async function createVolumeIsosurfaceWireframe(ctx: VisualContext, volume: Volume, theme: Theme, props: VolumeIsosurfaceProps, lines?: Lines) { ctx.runtime.update({ message: 'Marching cubes...' }); const ids = fillSerial(new Int32Array(volume.grid.cells.data.length)); const wireframe = await computeMarchingCubesLines({ isoLevel: Volume.IsoValue.toAbsolute(props.isoValue, volume.grid.stats).absoluteValue, scalarField: volume.grid.cells, idField: Tensor.create(volume.grid.cells.space, Tensor.Data1(ids)) }, lines).runAsChild(ctx.runtime); const transform = Grid.getGridToCartesianTransform(volume.grid); Lines.transform(wireframe, transform); wireframe.setBoundingSphere(Volume.getBoundingSphere(volume)); return wireframe; } export const IsosurfaceWireframeParams = { ...Lines.Params, ...VolumeIsosurfaceParams, quality: { ...Lines.Params.quality, isEssential: false }, sizeFactor: PD.Numeric(3, { min: 0, max: 10, step: 0.1 }), }; export type IsosurfaceWireframeParams = typeof IsosurfaceWireframeParams export function IsosurfaceWireframeVisual(materialId: number): VolumeVisual<IsosurfaceWireframeParams> { return VolumeVisual<Lines, IsosurfaceWireframeParams>({ defaultProps: PD.getDefaultValues(IsosurfaceWireframeParams), createGeometry: createVolumeIsosurfaceWireframe, createLocationIterator: (volume: Volume) => LocationIterator(volume.grid.cells.data.length, 1, 1, () => NullLocation), getLoci: getIsosurfaceLoci, eachLocation: eachIsosurface, setUpdateState: (state: VisualUpdateState, volume: Volume, newProps: PD.Values<IsosurfaceWireframeParams>, currentProps: PD.Values<IsosurfaceWireframeParams>) => { if (!Volume.IsoValue.areSame(newProps.isoValue, currentProps.isoValue, volume.grid.stats)) state.createGeometry = true; }, geometryUtils: Lines.Utils }, materialId); } // const IsosurfaceVisuals = { 'solid': (ctx: RepresentationContext, getParams: RepresentationParamsGetter<Volume, IsosurfaceMeshParams>) => VolumeRepresentation('Isosurface mesh', ctx, getParams, IsosurfaceVisual, getLoci), 'wireframe': (ctx: RepresentationContext, getParams: RepresentationParamsGetter<Volume, IsosurfaceWireframeParams>) => VolumeRepresentation('Isosurface wireframe', ctx, getParams, IsosurfaceWireframeVisual, getLoci), }; export const IsosurfaceParams = { ...IsosurfaceMeshParams, ...IsosurfaceWireframeParams, visuals: PD.MultiSelect(['solid'], PD.objectToOptions(IsosurfaceVisuals)), }; export type IsosurfaceParams = typeof IsosurfaceParams export function getIsosurfaceParams(ctx: ThemeRegistryContext, volume: Volume) { const p = PD.clone(IsosurfaceParams); p.isoValue = Volume.createIsoValueParam(Volume.IsoValue.relative(2), volume.grid.stats); return p; } export type IsosurfaceRepresentation = VolumeRepresentation<IsosurfaceParams> export function IsosurfaceRepresentation(ctx: RepresentationContext, getParams: RepresentationParamsGetter<Volume, IsosurfaceParams>): IsosurfaceRepresentation { return Representation.createMulti('Isosurface', ctx, getParams, Representation.StateBuilder, IsosurfaceVisuals as unknown as Representation.Def<Volume, IsosurfaceParams>); } export const IsosurfaceRepresentationProvider = VolumeRepresentationProvider({ name: 'isosurface', label: 'Isosurface', description: 'Displays a triangulated isosurface of volumetric data.', factory: IsosurfaceRepresentation, getParams: getIsosurfaceParams, defaultValues: PD.getDefaultValues(IsosurfaceParams), defaultColorTheme: { name: 'uniform' }, defaultSizeTheme: { name: 'uniform' }, isApplicable: (volume: Volume) => !Volume.isEmpty(volume) });
the_stack
import { assertNever } from "../../assert"; import * as functionless_event_bridge from "../types"; /** * These are simplified and better structured interfaces/types to make it easier to work with Event Bridge Patterns. * Use the {@link patternToEventBridgePattern} to generate a valid object for event bridge. * * All patterns are applied to a single field {@link PatternDocument}s provide AND logic on multiple fields. */ export type Pattern = | AggregatePattern | ExactMatchPattern | PrefixMatchPattern | NumericRangePattern | PresentPattern | AnythingButPattern | AnythingButPrefixPattern | EmptyPattern | NumericAggregationPattern | NeverPattern; /** * The base of an event pattern is a object with multiple fields. Additionally some fields like detail can be deep. */ export interface PatternDocument { doc: { [key: string]: PatternDocument | Pattern; }; } export const isPatternDocument = ( x: Pattern | PatternDocument ): x is PatternDocument => { return "doc" in x; }; /** * One or more patterns with OR logic between them on a single field. */ export interface AggregatePattern { patterns: ( | ExactMatchPattern | PrefixMatchPattern | PresentPattern | AnythingButPattern | AnythingButPrefixPattern | NeverPattern )[]; } export const isAggregatePattern = (x: Pattern): x is AggregatePattern => { return "patterns" in x; }; /** * One or more {@link NumericRangePattern} with OR/Union logic applied. */ export interface NumericAggregationPattern { ranges: NumericRangePattern[]; } export const isNumericAggregationPattern = ( x: Pattern ): x is NumericAggregationPattern => { return "ranges" in x; }; /** * Equals logic for a string, boolean, or null. * Number is handled using {@link NumericRangePattern} and normalized later. */ export interface ExactMatchPattern { // use NumericRange to represent a number value: string | boolean | null; } export const isExactMatchPattern = (x: Pattern): x is ExactMatchPattern => { return "value" in x; }; /** * Starts With logic for strings. */ export interface PrefixMatchPattern { prefix: string; } export const isPrefixMatchPattern = (x: Pattern): x is PrefixMatchPattern => { return "prefix" in x; }; /** * The lower or upper end of a numeric range. * Use {@link Number.POSITIVE_INFINITY} or {@link Number.NEGATIVE_INFINITY} to represent no LOWER or UPPER value. */ export interface NumericRangeLimit { value: number; inclusive: boolean; } /** * A range of values from a possible {@link Number.NEGATIVE_INFINITY} to {@link Number.POSITIVE_INFINITY}. * * Use a Upper and Lower bound of a single value to represent a single value. * Exclusive on upper and lower represents a NOT on the value. * * Generally represents the AND logic of a numeric range. */ export interface NumericRangePattern { lower: NumericRangeLimit; upper: NumericRangeLimit; } export const isNumericRangePattern = (x: Pattern): x is NumericRangePattern => { return "lower" in x || "upper" in x; }; /** * Exists or `field in` logic on an object in Event Bridge */ export interface PresentPattern { isPresent: boolean; } export const isPresentPattern = (x: Pattern): x is PresentPattern => { return "isPresent" in x; }; /** * NOT logic for string and null. * * Use {@link NumericRangePattern} to represent NOT logic for numbers. */ export type AnythingButPattern = { // use NumericRange to represent a number anythingBut: (string | null)[]; }; export const isAnythingButPattern = (x: any): x is AnythingButPattern => { return "anythingBut" in x; }; /** * NOT logic for string and null. * * Use {@link NumericRangePattern} to represent NOT logic for numbers. */ export type AnythingButPrefixPattern = { // use NumericRange to represent a number anythingButPrefix: string; }; export const isAnythingButPrefixPattern = ( x: any ): x is AnythingButPrefixPattern => { return "anythingButPrefix" in x; }; /** * A Pattern that represents logic that is always true. * This pattern will be filtered out by the end of the compilation. * If the only pattern remaining is a EmptyPattern, the field will be removed from the pattern. */ export interface EmptyPattern { empty: true; } export const isEmptyPattern = (x: Pattern): x is EmptyPattern => { return "empty" in x; }; /** * A Pattern that represents logic that is never true. * This pattern may be filtered out at the end. * It is the opposite of EmptyPattern * If it is applied to AND logic, either between or within a field, an error is thrown. * * When to return NeverPattern and when to Error * * NeverPattern - when the logic is impossible, but valid aka, contradictions x !== "a" && x === "a". These MAY later be evaluated to possible using an OR. * * Error - When the combination is unsupported by Event Bridge or Functionless. * For example, if we do not know how to represent !x.startsWith("x") && x.startsWith("y"), * then we need to fail compilation as the logic may filter a event if it was supported and not ignored. */ export interface NeverPattern { never: true; reason?: string; } export const isNeverPattern = (x: Pattern): x is NeverPattern => { return "never" in x; }; export const isPositiveSingleValueRange = (pattern: NumericRangePattern) => pattern.lower.value === pattern.upper.value && pattern.lower.inclusive && pattern.upper.inclusive; export const isNegativeSingleValueRange = (pattern: NumericRangePattern) => pattern.lower.value === pattern.upper.value && !pattern.lower.inclusive && !pattern.upper.inclusive; /** * Transforms the proprietary {@link PatternDocument} into AWS's EventPattern schema. * https://docs.aws.amazon.com/eventbridge/latest/userguide/eb-event-patterns.html * * For each field, * if the field is a nested PatternDocument, recurse. * if the field is a Pattern, output the pattern as a EventPattern. * * We will not maintain empty patterns or pattern documents. */ export const patternDocumentToEventPattern = ( patternDocument: PatternDocument ): functionless_event_bridge.SubPattern => { return Object.entries(patternDocument.doc).reduce((pattern, [key, entry]) => { const keyPattern = isPatternDocument(entry) ? patternDocumentToEventPattern(entry) : patternToEventBridgePattern(entry); if (!keyPattern || Object.keys(keyPattern).length === 0) { return pattern; } return { ...pattern, [key]: keyPattern, }; }, {} as functionless_event_bridge.SubPattern); }; /** * Transforms the proprietary {@link Pattern} into a EventBridge's {@link functionless_event_bridge.PatternList} schema. */ export const patternToEventBridgePattern = ( pattern: Pattern, aggregate?: boolean ): functionless_event_bridge.PatternList | undefined => { if (isEmptyPattern(pattern)) { return undefined; } else if (isExactMatchPattern(pattern)) { return [pattern.value]; } else if (isPresentPattern(pattern)) { return [{ exists: pattern.isPresent }]; } else if (isPrefixMatchPattern(pattern)) { return [{ prefix: pattern.prefix }]; } else if (isAnythingButPattern(pattern)) { return Array.isArray(pattern.anythingBut) && pattern.anythingBut.length === 1 ? [{ "anything-but": pattern.anythingBut[0] }] : [ { "anything-but": pattern.anythingBut, }, ]; } else if (isAnythingButPrefixPattern(pattern)) { return [{ "anything-but": { prefix: pattern.anythingButPrefix } }]; } else if (isNumericRangePattern(pattern)) { if ( pattern.lower.value === Number.NEGATIVE_INFINITY && pattern.upper.value === Number.POSITIVE_INFINITY ) { return undefined; } if (isPositiveSingleValueRange(pattern)) { return [pattern.lower.value]; } else if (isNegativeSingleValueRange(pattern)) { return [{ "anything-but": pattern.lower.value }]; } /** * turns the structured numeric range {@link NumericRangePattern} * into the EventBridge format {@link functionless_event_bridge.NumberPattern} * * if the Lower or Upper are NEGATIVE_INFINITY or POSITIVE_INFIITY respectively, do not include in the range. * if the Lower or Upper range values are inclusive, add a `=` to the sign. * * { lower: { value: 10, inclusive: false }, upper: { value: POSITIVE_INFINITY } } => { numeric: [">", 10] } * { lower: { value: 10, inclusive: true }, upper: { value: POSITIVE_INFINITY } } => { numeric: [">=", 10] } * { lower: { value: 10, inclusive: true }, upper: { value: 100, inclusive: false } } => { numeric: [">=", 10, "<", 100] } * { lower: { value: NEGATIVE_INFINITY }, upper: { value: 100, inclusive: true } } => { numeric: ["<=", 100] } */ return [ { numeric: [ ...(pattern.lower.value !== Number.NEGATIVE_INFINITY ? [pattern.lower.inclusive ? ">=" : ">", pattern.lower.value] : []), ...(pattern.upper.value !== Number.POSITIVE_INFINITY ? [pattern.upper.inclusive ? "<=" : "<", pattern.upper.value] : []), ] as [string, number, string, number] | [string, number], }, ]; } else if (isAggregatePattern(pattern)) { if (pattern.patterns.length === 0) { return undefined; } return pattern.patterns .map((p) => patternToEventBridgePattern(p, pattern.patterns.length > 1)) .reduce( (acc, pattern) => [...(acc ?? []), ...(pattern ?? [])], [] as functionless_event_bridge.PatternList ); } else if (isNumericAggregationPattern(pattern)) { if (pattern.ranges.length === 0) { return undefined; } return pattern.ranges .map((x) => patternToEventBridgePattern(x, true)) .reduce( (acc, pattern) => [...(acc ?? []), ...(pattern ?? [])], [] as functionless_event_bridge.PatternList ); } else if (isNeverPattern(pattern)) { // if never is in an aggregate and not the lone pattern, return undefined // if never is the lone value, either in an aggregate or directly on a field, fail if (!aggregate) { throw Error( pattern.reason ? `Impossible logic discovered: ${pattern.reason}` : "Impossible logic discovered." ); } return undefined; } assertNever(pattern); };
the_stack
import * as R from "ramda"; import { decryptedEnvsStateProducer, fetchLoadedEnvs, fetchPendingEnvs, } from "../lib/envs"; import { Client, Api, Model } from "@core/types"; import { clientAction, dispatch } from "../handler"; import { pick } from "@core/lib/utils/pick"; import { getAuth } from "@core/lib/client"; import { verifyCurrentUser } from "../lib/trust"; import nacl from "tweetnacl"; import naclUtil from "tweetnacl-util"; import { log } from "@core/lib/utils/logger"; clientAction< Client.Action.ClientActions["CreateSession"], Partial<Pick<Client.State, "envs">> & { timestamp: number; } >({ type: "asyncClientAction", actionType: Client.ActionType.CREATE_SESSION, stateProducer: (draft) => { draft.isCreatingSession = true; delete draft.createSessionError; delete draft.trustedRoot; draft.graph = {}; delete draft.graphUpdatedAt; draft.trustedSessionPubkeys = {}; delete draft.fetchSessionError; }, endStateProducer: (draft) => { delete draft.isCreatingSession; delete draft.verifyingEmail; delete draft.emailVerificationCode; }, failureStateProducer: (draft, { payload }) => { draft.createSessionError = payload; }, handler: async ( state, action, { context: contextParams, dispatchSuccess, dispatchFailure } ) => { const { payload } = action; let auth = state.orgUserAccounts[ payload.accountId ] as Client.ClientUserAuth; if (!auth) { throw new Error("Invalid account"); } if (auth.provider == "email" && !payload.emailVerificationToken) { throw new Error("emailVerificationToken required"); } else if (auth.provider != "email" && !payload.externalAuthSessionId) { throw new Error("externalAuthSessionId required"); } const context = { ...contextParams, hostUrl: auth.hostUrl, accountIdOrCliKey: payload.accountId, }; const signature = naclUtil.encodeBase64( nacl.sign.detached( naclUtil.decodeUTF8( JSON.stringify( R.props(["userId", "orgId", "deviceId", "provider"], auth) ) ), naclUtil.decodeBase64(auth.privkey.keys.signingKey) ) ), apiRes = await dispatch( { type: Api.ActionType.CREATE_SESSION, payload: { ...pick(["orgId", "userId", "deviceId"], auth), signature, ...(auth.provider == "email" ? { provider: auth.provider, emailVerificationToken: payload.emailVerificationToken!, } : { provider: auth.provider, externalAuthSessionId: payload.externalAuthSessionId!, }), }, }, { ...context, rootClientAction: action } ); if (!apiRes.success) { return dispatchFailure((apiRes.resultAction as any).payload, context); } const timestamp = ( (apiRes.resultAction as any).payload as Api.Net.SessionResult ).timestamp; try { const verifyRes = await verifyCurrentUser(apiRes.state, context); if (!verifyRes.success) { throw new Error("Couldn't verify current user"); } const fetchLoadedRes = await fetchLoadedEnvs(verifyRes.state, context); if (fetchLoadedRes && !fetchLoadedRes.success) { throw new Error("Error fetching latest loaded environments"); } const fetchPendingRes = await fetchPendingEnvs( fetchLoadedRes?.state ?? verifyRes.state, context ); if (fetchPendingRes && !fetchPendingRes.success) { throw new Error("Error fetching latest pending environments"); } } catch (error) { return dispatchFailure({ type: "clientError", error }, context); } return dispatchSuccess({ timestamp }, context); }, }); clientAction< Api.Action.RequestActions["CreateSession"], Api.Net.ApiResultTypes["CreateSession"] >({ type: "apiRequestAction", actionType: Api.ActionType.CREATE_SESSION, loggableType: "authAction", successStateProducer: (draft, { meta, payload }) => { const accountId = payload.userId, orgAccount = draft.orgUserAccounts[accountId], org = payload.graph[payload.orgId] as Model.Org; draft.orgUserAccounts[accountId] = { ...orgAccount, ...pick( [ "token", "email", "firstName", "lastName", "uid", "provider", "userId", "deviceId", ], payload ), externalAuthProviderId: draft.completedExternalAuth?.externalAuthProviderId, lastAuthAt: payload.timestamp, orgName: org.name, requiresPassphrase: org.settings.crypto.requiresPassphrase, requiresLockout: org.settings.crypto.requiresLockout, lockoutMs: org.settings.crypto.lockoutMs, } as Client.ClientUserAuth; if (payload.type == "tokenSession") { draft.signedTrustedRoot = payload.signedTrustedRoot; } draft.graph = payload.graph; draft.graphUpdatedAt = payload.graphUpdatedAt; }, }); clientAction< Client.Action.ClientActions["GetSession"], Partial<Pick<Client.State, "envs">> & { timestamp?: number; notModified?: true; } >({ type: "asyncClientAction", actionType: Client.ActionType.GET_SESSION, stateProducer: (draft) => { draft.isFetchingSession = true; delete draft.fetchSessionError; }, failureStateProducer: (draft, { payload }) => { draft.fetchSessionError = payload; }, endStateProducer: (draft) => { delete draft.isFetchingSession; }, successStateProducer: decryptedEnvsStateProducer, successHandler: async (state, action, res, context) => { if (res.notModified) { return; } dispatch( { type: Client.ActionType.CLEAR_ORPHANED_BLOBS, }, context ); }, handler: async ( state, action, { context, dispatchSuccess, dispatchFailure } ) => { let auth = getAuth<Client.ClientUserAuth>(state, context.accountIdOrCliKey); if (!auth) { throw new Error("Action requires authentication and decrypted privkey"); } const apiRes = await dispatch( { type: Api.ActionType.GET_SESSION, payload: { graphUpdatedAt: state.graphUpdatedAt, }, }, { ...context, rootClientAction: action } ); if (!apiRes.success) { return dispatchFailure((apiRes.resultAction as any).payload, context); } if ( ( (apiRes.resultAction as any) .payload as Api.Net.ApiResultTypes["GetSession"] ).type == "notModified" ) { return dispatchSuccess({ notModified: true }, context); } const timestamp = ( (apiRes.resultAction as any).payload as Api.Net.SessionResult ).timestamp; try { const verifyRes = await verifyCurrentUser(apiRes.state, context); if (!verifyRes.success) { throw new Error("Couldn't verify current user"); } const fetchLoadedRes = await fetchLoadedEnvs( verifyRes.state, context, action.payload?.skipWaitForReencryption ); if (fetchLoadedRes && !fetchLoadedRes.success) { throw new Error( "Error fetching latest environments with pending changes" ); } const fetchPendingRes = await fetchPendingEnvs( fetchLoadedRes?.state ?? verifyRes.state, context ); if (fetchPendingRes && !fetchPendingRes.success) { throw new Error("Error fetching latest pending environments"); } } catch (error) { return dispatchFailure({ type: "clientError", error }, context); } return dispatchSuccess({ timestamp }, context); }, }); clientAction<Client.Action.ClientActions["SelectDefaultAccount"]>({ type: "clientAction", actionType: Client.ActionType.SELECT_DEFAULT_ACCOUNT, stateProducer: (draft, { payload: { accountId } }) => ({ ...draft, ...Client.defaultAccountState, ...Client.defaultClientState, defaultAccountId: accountId, }), }); clientAction<Client.Action.ClientActions["SignOut"]>({ type: "asyncClientAction", actionType: Client.ActionType.SIGN_OUT, successStateProducer: ( draft, { meta: { rootAction: { payload: { accountId }, }, }, } ) => ({ ...draft, ...R.omit( ["pendingEnvUpdates", "pendingEnvsUpdatedAt", "pendingInvites"], Client.defaultAccountState ), ...Client.defaultClientState, orgUserAccounts: { ...draft.orgUserAccounts, [accountId]: R.omit(["token"], draft.orgUserAccounts[accountId] ?? {}), }, } as Client.State), handler: async ( state, action, { context, dispatchSuccess, dispatchFailure } ) => { const { payload: { accountId }, } = action; // clear server token // if it fails we still just sign out on the client-side try { await dispatch( { type: Api.ActionType.CLEAR_TOKEN, payload: {}, }, { ...context, rootClientAction: action } ); } catch (err) {} return dispatchSuccess(null, context); }, }); clientAction<Client.Action.ClientActions["SignInPendingSelfHosted"]>({ type: "asyncClientAction", actionType: Client.ActionType.SIGN_IN_PENDING_SELF_HOSTED, stateProducer: (draft, { meta, payload: { index, initToken } }) => { let orgId: string, userId: string, deviceId: string, token: string; const throwInvalidTokenErr = () => { throw new Error("Invalid self-hosted init token"); }; let parsed: [string, string, string, string]; try { parsed = JSON.parse( naclUtil.encodeUTF8(naclUtil.decodeBase64(initToken)) ) as [string, string, string, string]; } catch (err) { return throwInvalidTokenErr(); } if (parsed.length != 4 || !R.all((s) => typeof s == "string", parsed)) { return throwInvalidTokenErr(); } [orgId, userId, deviceId, token] = parsed; const pendingAuth = draft.pendingSelfHostedDeployments[index]; const now = Date.now(); draft.orgUserAccounts[userId] = { ...R.omit( [ "type", "subdomain", "domain", "codebuildLink", "registerAction", "customDomain", "verifiedSenderEmail", "notifySmsWhenDone", ], pendingAuth ), type: "clientUserAuth", orgId, userId, deviceId, token, addedAt: now, lastAuthAt: now, }; delete draft.authenticatePendingSelfHostedAccountError; draft.authenticatingPendingSelfHostedAccountId = userId; }, failureStateProducer: (draft, { meta, payload }) => { draft.authenticatePendingSelfHostedAccountError = payload; }, successStateProducer: (draft, { meta, payload }) => { const index = meta.rootAction.payload.index; draft.pendingSelfHostedDeployments.splice(index, 1); }, endStateProducer: (draft, { meta, payload }) => { delete draft.authenticatingPendingSelfHostedAccountId; }, handler: async ( state, { payload: { index, initToken } }, { context, dispatchSuccess, dispatchFailure } ) => { if (!state.authenticatingPendingSelfHostedAccountId) { throw new Error("state.authenticatingPendingSelfHostedAccountId not set"); } const dispatchContext = { ...context, accountIdOrCliKey: state.authenticatingPendingSelfHostedAccountId, }; const res = await dispatch( { type: Client.ActionType.GET_SESSION }, dispatchContext ); return res.success ? dispatchSuccess(null, dispatchContext) : dispatchFailure((res.resultAction as any)?.payload, dispatchContext); }, }); clientAction< Api.Action.RequestActions["GetSession"], Api.Net.ApiResultTypes["GetSession"] >({ type: "apiRequestAction", actionType: Api.ActionType.GET_SESSION, loggableType: "fetchMetaAction", authenticated: true, failureStateProducer: (draft, { meta, payload }) => { const accountId = meta.accountIdOrCliKey; if (!accountId) { return; } if ( typeof payload.error == "object" && "code" in payload.error && payload.error.code == 401 ) { return { ...draft, ...R.omit( ["pendingEnvUpdates", "pendingEnvsUpdatedAt", "pendingInvites"], Client.defaultAccountState ), ...Client.defaultClientState, orgUserAccounts: { ...draft.orgUserAccounts, [accountId]: R.omit( ["token"], draft.orgUserAccounts[accountId] ?? {} ), }, } as Client.State; } }, successStateProducer: (draft, { meta, payload }) => { if (payload.type == "notModified") { return draft; } const accountId = meta.accountIdOrCliKey!, orgAccount = draft.orgUserAccounts[accountId]!, org = payload.graph[payload.orgId] as Model.Org; draft.orgUserAccounts[accountId] = { ...orgAccount, ...pick( [ "token", "email", "firstName", "lastName", "uid", "provider", "userId", "deviceId", ], payload ), lastAuthAt: payload.timestamp, orgName: org.name, requiresPassphrase: org.settings.crypto.requiresPassphrase, requiresLockout: org.settings.crypto.requiresLockout, lockoutMs: org.settings.crypto.lockoutMs, } as Client.ClientUserAuth; draft.signedTrustedRoot = payload.signedTrustedRoot; draft.graph = payload.graph; draft.graphUpdatedAt = payload.graphUpdatedAt; }, }); clientAction<Api.Action.RequestActions["ClearToken"]>({ type: "apiRequestAction", actionType: Api.ActionType.CLEAR_TOKEN, loggableType: "authAction", authenticated: true, }); clientAction<Api.Action.RequestActions["ClearUserTokens"]>({ type: "apiRequestAction", actionType: Api.ActionType.CLEAR_USER_TOKENS, loggableType: "authAction", authenticated: true, stateProducer: (draft, { payload: { userId } }) => { draft.isClearingUserTokens[userId] = true; }, endStateProducer: ( draft, { meta: { rootAction: { payload: { userId }, }, }, } ) => { delete draft.isClearingUserTokens[userId]; }, successStateProducer: ( draft, { meta: { accountIdOrCliKey, rootAction: { payload: { userId }, }, }, } ) => { // if user just cleared their own tokens, sign them out const auth = getAuth(draft, accountIdOrCliKey)!; if (auth.userId == userId) { return { ...draft, ...R.omit( ["pendingEnvUpdates", "pendingEnvsUpdatedAt", "pendingInvites"], Client.defaultAccountState ), ...Client.defaultClientState, orgUserAccounts: { ...draft.orgUserAccounts, [accountIdOrCliKey!]: R.omit( ["token"], draft.orgUserAccounts[accountIdOrCliKey!] ), }, } as Client.State; } }, }); clientAction<Api.Action.RequestActions["ClearOrgTokens"]>({ type: "apiRequestAction", actionType: Api.ActionType.CLEAR_ORG_TOKENS, loggableType: "authAction", authenticated: true, stateProducer: (draft) => { draft.isClearingOrgTokens = true; }, endStateProducer: (draft) => { delete draft.isClearingOrgTokens; }, successStateProducer: (draft, { meta: { accountIdOrCliKey } }) => { // since all org tokens were just cleared, sign out user return { ...draft, ...R.omit( ["pendingEnvUpdates", "pendingEnvsUpdatedAt", "pendingInvites"], Client.defaultAccountState ), ...Client.defaultClientState, orgUserAccounts: { ...draft.orgUserAccounts, [accountIdOrCliKey!]: R.omit( ["token"], draft.orgUserAccounts[accountIdOrCliKey!] ), }, } as Client.State; }, });
the_stack
import * as pulumi from "@pulumi/pulumi"; import * as utilities from "../utilities"; /** * Represents an Address resource. * * Each virtual machine instance has an ephemeral internal IP address and, * optionally, an external IP address. To communicate between instances on * the same network, you can use an instance's internal IP address. To * communicate with the Internet and instances outside of the same network, * you must specify the instance's external IP address. * * Internal IP addresses are ephemeral and only belong to an instance for * the lifetime of the instance; if the instance is deleted and recreated, * the instance is assigned a new internal IP address, either by Compute * Engine or by you. External IP addresses can be either ephemeral or * static. * * To get more information about Address, see: * * * [API documentation](https://cloud.google.com/compute/docs/reference/beta/addresses) * * How-to Guides * * [Reserving a Static External IP Address](https://cloud.google.com/compute/docs/instances-and-network) * * [Reserving a Static Internal IP Address](https://cloud.google.com/compute/docs/ip-addresses/reserve-static-internal-ip-address) * * ## Example Usage * ### Address Basic * * ```typescript * import * as pulumi from "@pulumi/pulumi"; * import * as gcp from "@pulumi/gcp"; * * const ipAddress = new gcp.compute.Address("ip_address", {}); * ``` * ### Address With Subnetwork * * ```typescript * import * as pulumi from "@pulumi/pulumi"; * import * as gcp from "@pulumi/gcp"; * * const defaultNetwork = new gcp.compute.Network("defaultNetwork", {}); * const defaultSubnetwork = new gcp.compute.Subnetwork("defaultSubnetwork", { * ipCidrRange: "10.0.0.0/16", * region: "us-central1", * network: defaultNetwork.id, * }); * const internalWithSubnetAndAddress = new gcp.compute.Address("internalWithSubnetAndAddress", { * subnetwork: defaultSubnetwork.id, * addressType: "INTERNAL", * address: "10.0.42.42", * region: "us-central1", * }); * ``` * ### Address With Gce Endpoint * * ```typescript * import * as pulumi from "@pulumi/pulumi"; * import * as gcp from "@pulumi/gcp"; * * const internalWithGceEndpoint = new gcp.compute.Address("internal_with_gce_endpoint", { * addressType: "INTERNAL", * purpose: "GCE_ENDPOINT", * }); * ``` * ### Instance With Ip * * ```typescript * import * as pulumi from "@pulumi/pulumi"; * import * as gcp from "@pulumi/gcp"; * * const static = new gcp.compute.Address("static", {}); * const debianImage = gcp.compute.getImage({ * family: "debian-9", * project: "debian-cloud", * }); * const instanceWithIp = new gcp.compute.Instance("instanceWithIp", { * machineType: "f1-micro", * zone: "us-central1-a", * bootDisk: { * initializeParams: { * image: debianImage.then(debianImage => debianImage.selfLink), * }, * }, * networkInterfaces: [{ * network: "default", * accessConfigs: [{ * natIp: static.address, * }], * }], * }); * ``` * ### Compute Address Ipsec Interconnect * * ```typescript * import * as pulumi from "@pulumi/pulumi"; * import * as gcp from "@pulumi/gcp"; * * const network = new gcp.compute.Network("network", {autoCreateSubnetworks: false}); * const ipsec_interconnect_address = new gcp.compute.Address("ipsec-interconnect-address", { * addressType: "INTERNAL", * purpose: "IPSEC_INTERCONNECT", * address: "192.168.1.0", * prefixLength: 29, * network: network.selfLink, * }); * ``` * * ## Import * * Address can be imported using any of these accepted formats * * ```sh * $ pulumi import gcp:compute/address:Address default projects/{{project}}/regions/{{region}}/addresses/{{name}} * ``` * * ```sh * $ pulumi import gcp:compute/address:Address default {{project}}/{{region}}/{{name}} * ``` * * ```sh * $ pulumi import gcp:compute/address:Address default {{region}}/{{name}} * ``` * * ```sh * $ pulumi import gcp:compute/address:Address default {{name}} * ``` */ export class Address extends pulumi.CustomResource { /** * Get an existing Address resource's state with the given name, ID, and optional extra * properties used to qualify the lookup. * * @param name The _unique_ name of the resulting resource. * @param id The _unique_ provider ID of the resource to lookup. * @param state Any extra arguments used during the lookup. * @param opts Optional settings to control the behavior of the CustomResource. */ public static get(name: string, id: pulumi.Input<pulumi.ID>, state?: AddressState, opts?: pulumi.CustomResourceOptions): Address { return new Address(name, <any>state, { ...opts, id: id }); } /** @internal */ public static readonly __pulumiType = 'gcp:compute/address:Address'; /** * Returns true if the given object is an instance of Address. This is designed to work even * when multiple copies of the Pulumi SDK have been loaded into the same process. */ public static isInstance(obj: any): obj is Address { if (obj === undefined || obj === null) { return false; } return obj['__pulumiType'] === Address.__pulumiType; } /** * The static external IP address represented by this resource. Only * IPv4 is supported. An address may only be specified for INTERNAL * address types. The IP address must be inside the specified subnetwork, * if any. */ public readonly address!: pulumi.Output<string>; /** * The type of address to reserve. * Default value is `EXTERNAL`. * Possible values are `INTERNAL` and `EXTERNAL`. */ public readonly addressType!: pulumi.Output<string | undefined>; /** * Creation timestamp in RFC3339 text format. */ public /*out*/ readonly creationTimestamp!: pulumi.Output<string>; /** * An optional description of this resource. */ public readonly description!: pulumi.Output<string | undefined>; /** * The fingerprint used for optimistic locking of this resource. Used internally during updates. */ public /*out*/ readonly labelFingerprint!: pulumi.Output<string>; /** * Labels to apply to this address. A list of key->value pairs. */ public readonly labels!: pulumi.Output<{[key: string]: string} | undefined>; /** * Name of the resource. The name must be 1-63 characters long, and * comply with RFC1035. Specifically, the name must be 1-63 characters * long and match the regular expression `a-z?` * which means the first character must be a lowercase letter, and all * following characters must be a dash, lowercase letter, or digit, * except the last character, which cannot be a dash. */ public readonly name!: pulumi.Output<string>; /** * The URL of the network in which to reserve the address. This field * can only be used with INTERNAL type with the VPC_PEERING and * IPSEC_INTERCONNECT purposes. */ public readonly network!: pulumi.Output<string | undefined>; /** * The networking tier used for configuring this address. If this field is not * specified, it is assumed to be PREMIUM. * Possible values are `PREMIUM` and `STANDARD`. */ public readonly networkTier!: pulumi.Output<string>; /** * The prefix length if the resource represents an IP range. */ public readonly prefixLength!: pulumi.Output<number | undefined>; /** * The ID of the project in which the resource belongs. * If it is not provided, the provider project is used. */ public readonly project!: pulumi.Output<string>; /** * The purpose of this resource, which can be one of the following values: * * GCE_ENDPOINT for addresses that are used by VM instances, alias IP * ranges, internal load balancers, and similar resources. * * SHARED_LOADBALANCER_VIP for an address that can be used by multiple * internal load balancers. * * VPC_PEERING for addresses that are reserved for VPC peer networks. * * IPSEC_INTERCONNECT for addresses created from a private IP range * that are reserved for a VLAN attachment in an IPsec-encrypted Cloud * Interconnect configuration. These addresses are regional resources. * * PRIVATE_SERVICE_CONNECT for a private network address that is used * to configure Private Service Connect. Only global internal addresses * can use this purpose. * This should only be set when using an Internal address. */ public readonly purpose!: pulumi.Output<string>; /** * The Region in which the created address should reside. * If it is not provided, the provider region is used. */ public readonly region!: pulumi.Output<string>; /** * The URI of the created resource. */ public /*out*/ readonly selfLink!: pulumi.Output<string>; /** * The URL of the subnetwork in which to reserve the address. If an IP * address is specified, it must be within the subnetwork's IP range. * This field can only be used with INTERNAL type with * GCE_ENDPOINT/DNS_RESOLVER purposes. */ public readonly subnetwork!: pulumi.Output<string>; /** * The URLs of the resources that are using this address. */ public /*out*/ readonly users!: pulumi.Output<string[]>; /** * Create a Address resource with the given unique name, arguments, and options. * * @param name The _unique_ name of the resource. * @param args The arguments to use to populate this resource's properties. * @param opts A bag of options that control this resource's behavior. */ constructor(name: string, args?: AddressArgs, opts?: pulumi.CustomResourceOptions) constructor(name: string, argsOrState?: AddressArgs | AddressState, opts?: pulumi.CustomResourceOptions) { let inputs: pulumi.Inputs = {}; opts = opts || {}; if (opts.id) { const state = argsOrState as AddressState | undefined; inputs["address"] = state ? state.address : undefined; inputs["addressType"] = state ? state.addressType : undefined; inputs["creationTimestamp"] = state ? state.creationTimestamp : undefined; inputs["description"] = state ? state.description : undefined; inputs["labelFingerprint"] = state ? state.labelFingerprint : undefined; inputs["labels"] = state ? state.labels : undefined; inputs["name"] = state ? state.name : undefined; inputs["network"] = state ? state.network : undefined; inputs["networkTier"] = state ? state.networkTier : undefined; inputs["prefixLength"] = state ? state.prefixLength : undefined; inputs["project"] = state ? state.project : undefined; inputs["purpose"] = state ? state.purpose : undefined; inputs["region"] = state ? state.region : undefined; inputs["selfLink"] = state ? state.selfLink : undefined; inputs["subnetwork"] = state ? state.subnetwork : undefined; inputs["users"] = state ? state.users : undefined; } else { const args = argsOrState as AddressArgs | undefined; inputs["address"] = args ? args.address : undefined; inputs["addressType"] = args ? args.addressType : undefined; inputs["description"] = args ? args.description : undefined; inputs["labels"] = args ? args.labels : undefined; inputs["name"] = args ? args.name : undefined; inputs["network"] = args ? args.network : undefined; inputs["networkTier"] = args ? args.networkTier : undefined; inputs["prefixLength"] = args ? args.prefixLength : undefined; inputs["project"] = args ? args.project : undefined; inputs["purpose"] = args ? args.purpose : undefined; inputs["region"] = args ? args.region : undefined; inputs["subnetwork"] = args ? args.subnetwork : undefined; inputs["creationTimestamp"] = undefined /*out*/; inputs["labelFingerprint"] = undefined /*out*/; inputs["selfLink"] = undefined /*out*/; inputs["users"] = undefined /*out*/; } if (!opts.version) { opts = pulumi.mergeOptions(opts, { version: utilities.getVersion()}); } super(Address.__pulumiType, name, inputs, opts); } } /** * Input properties used for looking up and filtering Address resources. */ export interface AddressState { /** * The static external IP address represented by this resource. Only * IPv4 is supported. An address may only be specified for INTERNAL * address types. The IP address must be inside the specified subnetwork, * if any. */ address?: pulumi.Input<string>; /** * The type of address to reserve. * Default value is `EXTERNAL`. * Possible values are `INTERNAL` and `EXTERNAL`. */ addressType?: pulumi.Input<string>; /** * Creation timestamp in RFC3339 text format. */ creationTimestamp?: pulumi.Input<string>; /** * An optional description of this resource. */ description?: pulumi.Input<string>; /** * The fingerprint used for optimistic locking of this resource. Used internally during updates. */ labelFingerprint?: pulumi.Input<string>; /** * Labels to apply to this address. A list of key->value pairs. */ labels?: pulumi.Input<{[key: string]: pulumi.Input<string>}>; /** * Name of the resource. The name must be 1-63 characters long, and * comply with RFC1035. Specifically, the name must be 1-63 characters * long and match the regular expression `a-z?` * which means the first character must be a lowercase letter, and all * following characters must be a dash, lowercase letter, or digit, * except the last character, which cannot be a dash. */ name?: pulumi.Input<string>; /** * The URL of the network in which to reserve the address. This field * can only be used with INTERNAL type with the VPC_PEERING and * IPSEC_INTERCONNECT purposes. */ network?: pulumi.Input<string>; /** * The networking tier used for configuring this address. If this field is not * specified, it is assumed to be PREMIUM. * Possible values are `PREMIUM` and `STANDARD`. */ networkTier?: pulumi.Input<string>; /** * The prefix length if the resource represents an IP range. */ prefixLength?: pulumi.Input<number>; /** * The ID of the project in which the resource belongs. * If it is not provided, the provider project is used. */ project?: pulumi.Input<string>; /** * The purpose of this resource, which can be one of the following values: * * GCE_ENDPOINT for addresses that are used by VM instances, alias IP * ranges, internal load balancers, and similar resources. * * SHARED_LOADBALANCER_VIP for an address that can be used by multiple * internal load balancers. * * VPC_PEERING for addresses that are reserved for VPC peer networks. * * IPSEC_INTERCONNECT for addresses created from a private IP range * that are reserved for a VLAN attachment in an IPsec-encrypted Cloud * Interconnect configuration. These addresses are regional resources. * * PRIVATE_SERVICE_CONNECT for a private network address that is used * to configure Private Service Connect. Only global internal addresses * can use this purpose. * This should only be set when using an Internal address. */ purpose?: pulumi.Input<string>; /** * The Region in which the created address should reside. * If it is not provided, the provider region is used. */ region?: pulumi.Input<string>; /** * The URI of the created resource. */ selfLink?: pulumi.Input<string>; /** * The URL of the subnetwork in which to reserve the address. If an IP * address is specified, it must be within the subnetwork's IP range. * This field can only be used with INTERNAL type with * GCE_ENDPOINT/DNS_RESOLVER purposes. */ subnetwork?: pulumi.Input<string>; /** * The URLs of the resources that are using this address. */ users?: pulumi.Input<pulumi.Input<string>[]>; } /** * The set of arguments for constructing a Address resource. */ export interface AddressArgs { /** * The static external IP address represented by this resource. Only * IPv4 is supported. An address may only be specified for INTERNAL * address types. The IP address must be inside the specified subnetwork, * if any. */ address?: pulumi.Input<string>; /** * The type of address to reserve. * Default value is `EXTERNAL`. * Possible values are `INTERNAL` and `EXTERNAL`. */ addressType?: pulumi.Input<string>; /** * An optional description of this resource. */ description?: pulumi.Input<string>; /** * Labels to apply to this address. A list of key->value pairs. */ labels?: pulumi.Input<{[key: string]: pulumi.Input<string>}>; /** * Name of the resource. The name must be 1-63 characters long, and * comply with RFC1035. Specifically, the name must be 1-63 characters * long and match the regular expression `a-z?` * which means the first character must be a lowercase letter, and all * following characters must be a dash, lowercase letter, or digit, * except the last character, which cannot be a dash. */ name?: pulumi.Input<string>; /** * The URL of the network in which to reserve the address. This field * can only be used with INTERNAL type with the VPC_PEERING and * IPSEC_INTERCONNECT purposes. */ network?: pulumi.Input<string>; /** * The networking tier used for configuring this address. If this field is not * specified, it is assumed to be PREMIUM. * Possible values are `PREMIUM` and `STANDARD`. */ networkTier?: pulumi.Input<string>; /** * The prefix length if the resource represents an IP range. */ prefixLength?: pulumi.Input<number>; /** * The ID of the project in which the resource belongs. * If it is not provided, the provider project is used. */ project?: pulumi.Input<string>; /** * The purpose of this resource, which can be one of the following values: * * GCE_ENDPOINT for addresses that are used by VM instances, alias IP * ranges, internal load balancers, and similar resources. * * SHARED_LOADBALANCER_VIP for an address that can be used by multiple * internal load balancers. * * VPC_PEERING for addresses that are reserved for VPC peer networks. * * IPSEC_INTERCONNECT for addresses created from a private IP range * that are reserved for a VLAN attachment in an IPsec-encrypted Cloud * Interconnect configuration. These addresses are regional resources. * * PRIVATE_SERVICE_CONNECT for a private network address that is used * to configure Private Service Connect. Only global internal addresses * can use this purpose. * This should only be set when using an Internal address. */ purpose?: pulumi.Input<string>; /** * The Region in which the created address should reside. * If it is not provided, the provider region is used. */ region?: pulumi.Input<string>; /** * The URL of the subnetwork in which to reserve the address. If an IP * address is specified, it must be within the subnetwork's IP range. * This field can only be used with INTERNAL type with * GCE_ENDPOINT/DNS_RESOLVER purposes. */ subnetwork?: pulumi.Input<string>; }
the_stack
import { Component, ReactNode } from 'react'; declare function ReactReconciler<Type, Props, Container, Instance, TextInstance, HydratableInstance, PublicInstance, HostContext, UpdatePayload, ChildSet, TimeoutHandle, NoTimeout>( // tslint:disable-next-line:no-unnecessary-generics config: ReactReconciler.HostConfig<Type, Props, Container, Instance, TextInstance, HydratableInstance, PublicInstance, HostContext, UpdatePayload, ChildSet, TimeoutHandle, NoTimeout>, ): ReactReconciler.Reconciler<Instance, TextInstance, Container, PublicInstance>; declare namespace ReactReconciler { // react-reconciler/ReactFiber // A Fiber is work on a Component that needs to be done or was done. There can // be more than one per component. interface Fiber { // These first fields are conceptually members of an Instance. This used to // be split into a separate type and intersected with the other Fiber fields, // but until Flow fixes its intersection bugs, we've merged them into a // single type. // An Instance is shared between all versions of a component. We can easily // break this out into a separate object to avoid copying so much to the // alternate versions of the tree. We put this on a single object for now to // minimize the number of objects created during the initial render. // Tag identifying the type of fiber. tag: WorkTag; // Unique identifier of this child. key: null | string; // The value of element.type which is used to preserve the identity during // reconciliation of this child. elementType: any; // The resolved function/class/ associated with this fiber. type: any; // The local state associated with this fiber. stateNode: any; // Conceptual aliases // parent : Instance -> return The parent happens to be the same as the // return fiber since we've merged the fiber and instance. // Remaining fields belong to Fiber // The Fiber to return to after finishing processing this one. // This is effectively the parent, but there can be multiple parents (two) // so this is only the parent of the thing we're currently processing. // It is conceptually the same as the return address of a stack frame. return: Fiber | null; // Singly Linked List Tree Structure. child: Fiber | null; sibling: Fiber | null; index: number; // The ref last used to attach this node. // I'll avoid adding an owner field for prod and model that as functions. ref: null | (((handle: any) => void) & { _stringRef: string | null | undefined }) | RefObject; // Input is the data coming into process this fiber. Arguments. Props. pendingProps: any; // This type will be more specific once we overload the tag. memoizedProps: any; // The props used to create the output. // A queue of state updates and callbacks. updateQueue: UpdateQueue<any> | null; // The state used to create the output memoizedState: any; // A linked-list of contexts that this fiber depends on firstContextDependency: ContextDependency<any> | null; // Bitfield that describes properties about the fiber and its subtree. E.g. // the ConcurrentMode flag indicates whether the subtree should be async-by- // default. When a fiber is created, it inherits the mode of its // parent. Additional flags can be set at creation time, but after that the // value should remain unchanged throughout the fiber's lifetime, particularly // before its child fibers are created. mode: TypeOfMode; // Effect effectTag: SideEffectTag; // Singly linked list fast path to the next fiber with side-effects. nextEffect: Fiber | null; // The first and last fiber with side-effect within this subtree. This allows // us to reuse a slice of the linked list when we reuse the work done within // this fiber. firstEffect: Fiber | null; lastEffect: Fiber | null; // Represents a time in the future by which this work should be completed. // Does not include work found in its subtree. expirationTime: ExpirationTime; // This is used to quickly determine if a subtree has no pending changes. childExpirationTime: ExpirationTime; // This is a pooled version of a Fiber. Every fiber that gets updated will // eventually have a pair. There are cases when we can clean up pairs to save // memory if we need to. alternate: Fiber | null; // Time spent rendering this Fiber and its descendants for the current update. // This tells us how well the tree makes use of sCU for memoization. // It is reset to 0 each time we render and only updated when we don't bailout. // This field is only set when the enableProfilerTimer flag is enabled. actualDuration?: number; // If the Fiber is currently active in the "render" phase, // This marks the time at which the work began. // This field is only set when the enableProfilerTimer flag is enabled. actualStartTime?: number; // Duration of the most recent render time for this Fiber. // This value is not updated when we bailout for memoization purposes. // This field is only set when the enableProfilerTimer flag is enabled. selfBaseDuration?: number; // Sum of base times for all descedents of this Fiber. // This value bubbles up during the "complete" phase. // This field is only set when the enableProfilerTimer flag is enabled. treeBaseDuration?: number; // Conceptual aliases // workInProgress : Fiber -> alternate The alternate used for reuse happens // to be the same as work in progress. // __DEV__ only _debugID?: number; _debugSource?: Source | null; _debugOwner?: Fiber | null; _debugIsCurrentlyTiming?: boolean; } // react-reconciler/ReactFiberExpirationTime type ExpirationTime = number; // react-reconciler/ReactFiberNewContext interface ContextDependency<T> { context: ReactContext<T>; observedBits: number; next: ContextDependency<any> | null; } // react-reconciler/ReactFiberReconciler type OpaqueHandle = Fiber; type OpaqueRoot = FiberRoot; interface HostConfig<Type, Props, Container, Instance, TextInstance, HydratableInstance, PublicInstance, HostContext, UpdatePayload, ChildSet, TimeoutHandle, NoTimeout> { getPublicInstance(instance: Instance | TextInstance): PublicInstance; getRootHostContext(rootContainerInstance: Container): HostContext; getChildHostContext(parentHostContext: HostContext, type: Type, rootContainerInstance: Container): HostContext; prepareForCommit(containerInfo: Container): void; resetAfterCommit(containerInfo: Container): void; createInstance( type: Type, props: Props, rootContainerInstance: Container, hostContext: HostContext, internalInstanceHandle: OpaqueHandle, ): Instance; appendInitialChild(parentInstance: Instance, child: Instance | TextInstance): void; finalizeInitialChildren( parentInstance: Instance, type: Type, props: Props, rootContainerInstance: Container, hostContext: HostContext, ): boolean; prepareUpdate( instance: Instance, type: Type, oldProps: Props, newProps: Props, rootContainerInstance: Container, hostContext: HostContext, ): null | UpdatePayload; shouldSetTextContent(type: Type, props: Props): boolean; shouldDeprioritizeSubtree(type: Type, props: Props): boolean; createTextInstance( text: string, rootContainerInstance: Container, hostContext: HostContext, internalInstanceHandle: OpaqueHandle, ): TextInstance; scheduleDeferredCallback( callback: () => any, options?: { timeout: number }, ): any; cancelDeferredCallback(callbackID: any): void; setTimeout(handler: (...args: any[]) => void, timeout: number): TimeoutHandle | NoTimeout; clearTimeout(handle: TimeoutHandle | NoTimeout): void; noTimeout: NoTimeout; now(): number; // Temporary workaround for scenario where multiple renderers concurrently // render using the same context objects. E.g. React DOM and React ART on the // same page. DOM is the primary renderer; ART is the secondary renderer. isPrimaryRenderer: boolean; supportsMutation: boolean; supportsPersistence: boolean; supportsHydration: boolean; // ------------------- // Mutation // (optional) // ------------------- appendChild?(parentInstance: Instance, child: Instance | TextInstance): void; appendChildToContainer?(container: Container, child: Instance | TextInstance): void; commitTextUpdate?(textInstance: TextInstance, oldText: string, newText: string): void; commitMount?( instance: Instance, type: Type, newProps: Props, internalInstanceHandle: OpaqueHandle, ): void; commitUpdate?( instance: Instance, updatePayload: UpdatePayload, type: Type, oldProps: Props, newProps: Props, internalInstanceHandle: OpaqueHandle, ): void; insertBefore?(parentInstance: Instance, child: Instance | TextInstance, beforeChild: Instance | TextInstance): void; insertInContainerBefore?( container: Container, child: Instance | TextInstance, beforeChild: Instance | TextInstance, ): void; removeChild?(parentInstance: Instance, child: Instance | TextInstance): void; removeChildFromContainer?(container: Container, child: Instance | TextInstance): void; resetTextContent?(instance: Instance): void; // ------------------- // Persistence // (optional) // ------------------- cloneInstance?( instance: Instance, updatePayload: null | UpdatePayload, type: Type, oldProps: Props, newProps: Props, internalInstanceHandle: OpaqueHandle, keepChildren: boolean, recyclableInstance: Instance, ): Instance; createContainerChildSet?(container: Container): ChildSet; appendChildToContainerChildSet?(childSet: ChildSet, child: Instance | TextInstance): void; finalizeContainerChildren?(container: Container, newChildren: ChildSet): void; replaceContainerChildren?(container: Container, newChildren: ChildSet): void; // ------------------- // Hydration // (optional) // ------------------- canHydrateInstance?(instance: HydratableInstance, type: Type, props: Props): null | Instance; canHydrateTextInstance?(instance: HydratableInstance, text: string): null | TextInstance; getNextHydratableSibling?(instance: Instance | TextInstance | HydratableInstance): null | HydratableInstance; getFirstHydratableChild?(parentInstance: Instance | Container): null | HydratableInstance; hydrateInstance?( instance: Instance, type: Type, props: Props, rootContainerInstance: Container, hostContext: HostContext, internalInstanceHandle: OpaqueHandle, ): null | UpdatePayload; hydrateTextInstance?( textInstance: TextInstance, text: string, internalInstanceHandle: OpaqueHandle, ): boolean; didNotMatchHydratedContainerTextInstance?( parentContainer: Container, textInstance: TextInstance, text: string, ): void; didNotMatchHydratedTextInstance?( parentType: Type, parentProps: Props, parentInstance: Instance, textInstance: TextInstance, text: string, ): void; didNotHydrateContainerInstance?(parentContainer: Container, instance: Instance | TextInstance): void; didNotHydrateInstance?( parentType: Type, parentProps: Props, parentInstance: Instance, instance: Instance | TextInstance, ): void; didNotFindHydratableContainerInstance?( parentContainer: Container, type: Type, props: Props, ): void; didNotFindHydratableContainerTextInstance?( parentContainer: Container, text: string, ): void; didNotFindHydratableInstance?( parentType: Type, parentProps: Props, parentInstance: Instance, type: Type, props: Props, ): void; didNotFindHydratableTextInstance?( parentType: Type, parentProps: Props, parentInstance: Instance, text: string, ): void; } // 0 is PROD, 1 is DEV. // Might add PROFILE later. type BundleType = 0 | 1; interface DevToolsConfig<Instance, TextInstance> { bundleType: BundleType; version: string; rendererPackageName: string; // Note: this actually *does* depend on Fiber internal fields. // Used by "inspect clicked DOM element" in React DevTools. findFiberByHostInstance?: (instance: Instance | TextInstance) => Fiber; // Used by RN in-app inspector. // This API is unfortunately RN-specific. // TODO: Change it to accept Fiber instead and type it properly. getInspectorDataForViewTag?: (tag: number) => object; } interface Reconciler<Instance, TextInstance, Container, PublicInstance> { updateContainerAtExpirationTime( element: ReactNodeList, container: OpaqueRoot, parentComponent: Component<any, any> | null | undefined, expirationTime: ExpirationTime, callback: () => void | null | undefined, ): ExpirationTime; createContainer( containerInfo: Container, isConcurrent: boolean, hydrate: boolean, ): OpaqueRoot; updateContainer( element: ReactNodeList, container: OpaqueRoot, parentComponent: Component<any, any> | null | undefined, callback: () => void | null | undefined, ): ExpirationTime; flushRoot(root: OpaqueRoot, expirationTime: ExpirationTime): void; requestWork(root: OpaqueRoot, expirationTime: ExpirationTime): void; computeUniqueAsyncExpiration(): ExpirationTime; batchedUpdates<A>(fn: () => A): A; unbatchedUpdates<A>(fn: () => A): A; deferredUpdates<A>(fn: () => A): A; syncUpdates<A>(fn: () => A): A; interactiveUpdates<A>(fn: () => A): A; flushInteractiveUpdates(): void; flushControlled(fn: () => any): void; flushSync<A>(fn: () => A): A; // Used to extract the return value from the initial render. Legacy API. getPublicRootInstance( container: OpaqueRoot, ): Component<any, any> | PublicInstance | null; // Use for findDOMNode/findHostNode. Legacy API. findHostInstance(component: object): PublicInstance | null; // Used internally for filtering out portals. Legacy API. findHostInstanceWithNoPortals(component: Fiber): PublicInstance | null; injectIntoDevTools(devToolsConfig: DevToolsConfig<Instance, TextInstance>): boolean; } // react-reconciler/ReactFiberRoot // TODO: This should be lifted into the renderer. interface Batch { _defer: boolean; _expirationTime: ExpirationTime; _onComplete: () => any; _next: Batch | null; } type PendingInteractionMap = Map<ExpirationTime, Set<Interaction>>; interface BaseFiberRootProperties { // Any additional information from the host associated with this root. containerInfo: any; // Used only by persistent updates. pendingChildren: any; // The currently active root fiber. This is the mutable root of the tree. current: Fiber; // The following priority levels are used to distinguish between 1) // uncommitted work, 2) uncommitted work that is suspended, and 3) uncommitted // work that may be unsuspended. We choose not to track each individual // pending level, trading granularity for performance. // // The earliest and latest priority levels that are suspended from committing. earliestSuspendedTime: ExpirationTime; latestSuspendedTime: ExpirationTime; // The earliest and latest priority levels that are not known to be suspended. earliestPendingTime: ExpirationTime; latestPendingTime: ExpirationTime; // The latest priority level that was pinged by a resolved promise and can // be retried. latestPingedTime: ExpirationTime; pingCache: | WeakMap<Thenable, Set<ExpirationTime>> | Map<Thenable, Set<ExpirationTime>> | null; // If an error is thrown, and there are no more updates in the queue, we try // rendering from the root one more time, synchronously, before handling // the error. didError: boolean; pendingCommitExpirationTime: ExpirationTime; // A finished work-in-progress HostRoot that's ready to be committed. finishedWork: Fiber | null; // Timeout handle returned by setTimeout. Used to cancel a pending timeout, if // it's superseded by a new one. timeoutHandle: any; // Top context object, used by renderSubtreeIntoContainer context: object | null; pendingContext: object | null; // Determines if we should attempt to hydrate on the initial mount readonly hydrate: boolean; // Remaining expiration time on this root. // TODO: Lift this into the renderer nextExpirationTimeToWorkOn: ExpirationTime; expirationTime: ExpirationTime; // List of top-level batches. This list indicates whether a commit should be // deferred. Also contains completion callbacks. // TODO: Lift this into the renderer firstBatch: Batch | null; // Linked-list of roots nextScheduledRoot: FiberRoot | null; } // The following attributes are only used by interaction tracing builds. // They enable interactions to be associated with their async work, // And expose interaction metadata to the React DevTools Profiler plugin. // Note that these attributes are only defined when the enableSchedulerTracing flag is enabled. interface ProfilingOnlyFiberRootProperties { interactionThreadID: number; memoizedInteractions: Set<Interaction>; pendingInteractionMap: PendingInteractionMap; } type FiberRoot = BaseFiberRootProperties & ProfilingOnlyFiberRootProperties; // react-reconciler/ReactFiberScheduler interface Thenable { then(resolve: () => any, reject?: () => any): any; } // react-reconciler/ReactTypeOfMode type TypeOfMode = number; // react-reconciler/ReactUpdateQueue interface Update<State> { expirationTime: ExpirationTime; tag: 0 | 1 | 2 | 3; payload: any; callback: (() => any) | null; next: Update<State> | null; nextEffect: Update<State> | null; } interface UpdateQueue<State> { baseState: State; firstUpdate: Update<State> | null; lastUpdate: Update<State> | null; firstCapturedUpdate: Update<State> | null; lastCapturedUpdate: Update<State> | null; firstEffect: Update<State> | null; lastEffect: Update<State> | null; firstCapturedEffect: Update<State> | null; lastCapturedEffect: Update<State> | null; } // scheduler/Tracing interface Interaction { __count: number; id: number; name: string; timestamp: number; } // shared/ReactElementType interface Source { fileName: string; lineNumber: number; } // shared/ReactSideEffectTags type SideEffectTag = number; // shared/ReactTypes type ReactEmpty = null | undefined | boolean; type ReactNodeList = ReactEmpty | ReactNode; interface ReactProviderType<T> { $$typeof: symbol | number; _context: ReactContext<T>; } interface ReactContext<T> { $$typeof: symbol | number; Consumer: ReactContext<T>; Provider: ReactProviderType<T>; unstable_read: () => T; _calculateChangedBits: ((a: T, b: T) => number) | null; _currentValue: T; _currentValue2: T; _threadCount: number; // DEV only _currentRenderer?: object | null; _currentRenderer2?: object | null; } interface RefObject { current: any; } // shared/ReactWorkTags type WorkTag = | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18; } export = ReactReconciler;
the_stack
import * as vscode from 'vscode'; import * as path from 'path'; import * as fs from 'fs-extra'; import { dxService, FCOauth, FCConnection, getHomeDir, notifications, saveConfigFile, readConfigFile, SFDX, checkConfig, enterCredentials, getVSCodeSetting, containerService, } from '.'; const jsforce = require('jsforce'); import klaw = require('klaw'); import { FCCancellationToken } from '../commands'; import { VSCODE_SETTINGS } from './configuration'; export class FCConnectionService implements vscode.TreeDataProvider<FCConnection> { private static instance: FCConnectionService; private _onDidChangeTreeData: vscode.EventEmitter<FCConnection | undefined> = new vscode.EventEmitter<FCConnection | undefined>(); private loggingIn: boolean = false; private refreshingConns: boolean = false; public readonly onDidChangeTreeData: vscode.Event<FCConnection | undefined> = this._onDidChangeTreeData.event; public currentConnection: FCConnection | undefined; public connections: Map<string, FCConnection>; public constructor() { notifications.writeLog('Starting connection service...'); this.connections = new Map<string, FCConnection>(); } public static getInstance() { if (!FCConnectionService.instance) { FCConnectionService.instance = new FCConnectionService(); } return FCConnectionService.instance; } public getTreeItem(element: FCConnection): vscode.TreeItem { return element; } public getChildren(element?: FCConnection): FCConnection[] { if (!element) { // This is the root node return Array.from(this.connections.values()).sort(this.sortFunc); } return []; } public getParent(_element: FCConnection): any { return null; // this is the parent } public refreshConnsStatus() { if (this.connections) { this.connections.forEach((conn) => { conn.showConnection(); }); this._onDidChangeTreeData.fire(undefined); } } public isLoggedIn(): boolean { const loggedIn: boolean = vscode.window.forceCode.conn !== undefined && this.currentConnection?.connection !== undefined && this.currentConnection?.isLoggedIn === true; if (loggedIn) { vscode.commands.executeCommand('setContext', 'ForceCodeLoggedIn', true); } else { vscode.commands.executeCommand('setContext', 'ForceCodeLoggedIn', false); } return loggedIn; } public getSavedUsernames(): Promise<string[]> { return new Promise((resolve) => { let usernames: string[] = []; let fcPath: string = path.join(vscode.window.forceCode.workspaceRoot, '.forceCode'); if (fs.existsSync(fcPath)) { klaw(fcPath, { depthLimit: 0 }) .on('data', function (file) { if (file.stats.isDirectory()) { let fileName: string | undefined = file.path.split(path.sep).pop(); if (fileName && fileName.indexOf('@') > 0) { usernames.push(fileName); } } }) .on('end', function () { resolve(usernames); }) .on('error', (err: Error, item: klaw.Item) => { notifications.writeLog( `ForceCode: Error reading ${item.path}. Message: ${err.message}` ); }); } else { resolve(usernames); } }); } public async refreshConnections(): Promise<boolean> { if (!this.refreshingConns) { this.refreshingConns = true; let orgs = await dxService.orgList(); const uNames = await this.getSavedUsernames(); uNames.forEach((uName) => { this.addConnection({ username: uName }); }); if (orgs) { if (getVSCodeSetting(VSCODE_SETTINGS.onlyShowProjectUsernames)) { orgs = orgs.filter((currentOrg) => uNames.includes(currentOrg.username || '')); } orgs.forEach((curOrg) => { this.addConnection(curOrg); }); } // tell the connections to refresh their text/icons this.refreshConnsStatus(); notifications.writeLog('Orgs refreshed'); this.refreshingConns = false; } return Promise.resolve(true); } // this is a check that will refresh the orgs and check if logged in. if not, it asks to log in public async checkLoginStatus( reason: any, cancellationToken: FCCancellationToken ): Promise<boolean> { const message = reason?.message || reason; notifications.writeLog('Checking login status: ' + message); await this.refreshConnections(); if ( !this.isLoggedIn() || (message && (message.indexOf('expired access/refresh token') !== -1 || message.indexOf('ECONNRESET') !== -1)) ) { if (this.currentConnection) { this.currentConnection.isLoggedIn = false; } return this.connect(this.currentConnection?.orgInfo, cancellationToken); } else { return Promise.resolve(true); } } public async connect( orgInfo: FCOauth | SFDX | undefined, cancellationToken: FCCancellationToken ): Promise<boolean> { const service = this; let username: string | undefined; if (this.loggingIn) { return Promise.resolve(false); } this.loggingIn = true; if (orgInfo) { username = orgInfo.username; } let finalRes: any; try { service.currentConnection = service.getConnByUsername(username); finalRes = service.isLoggedIn(); const connection = await setupConn(finalRes); await login(connection); await vscode.window.forceCode.connect(); } catch (err) { notifications.writeLog(err); finalRes = false; } this.loggingIn = false; return Promise.resolve(finalRes); // pretty much this whole function is skipped if a user is logged in already async function setupConn(isLoggedIn: boolean): Promise<FCConnection> { if (isLoggedIn && service.currentConnection) { vscode.window.forceCode.config = readConfigFile(username); return Promise.resolve(service.currentConnection); } let orgInf: FCOauth; try { orgInf = await dxService.getOrgInfo(username); } catch (_error) { if (service.currentConnection) { service.currentConnection.connection = undefined; } orgInf = await enterCredentials(cancellationToken); } service.currentConnection = service.addConnection(orgInf, true); if (!service.currentConnection) { return Promise.reject('Error setting up connection: setupConn'); } vscode.window.forceCode.config = readConfigFile(orgInf.username); const sfdxPath = path.join(getHomeDir(), '.sfdx', orgInf.username + '.json'); const refreshToken: string = fs.readJsonSync(sfdxPath).refreshToken; let connection = new jsforce.Connection({ oauth2: { clientId: service.currentConnection.orgInfo.clientId || 'SalesforceDevelopmentExperience', }, instanceUrl: service.currentConnection.orgInfo.instanceUrl, accessToken: service.currentConnection.orgInfo.accessToken, refreshToken: refreshToken, version: vscode.window.forceCode?.config?.apiVersion || getVSCodeSetting(VSCODE_SETTINGS.defaultApiVersion), }); service.currentConnection.connection = connection; // get the user id const identity = await connection.identity(); service.currentConnection.orgInfo.userId = identity.user_id; service.currentConnection.isLoggedIn = true; vscode.commands.executeCommand('setContext', 'ForceCodeLoggedIn', true); return Promise.resolve(service.currentConnection); } async function login(fcConnection: FCConnection): Promise<void> { containerService.clear(); const config = await checkConfig(vscode.window.forceCode.config); saveConfigFile(config.username, config); if (!fcConnection.connection) { return Promise.reject('Error setting up connection: login'); } vscode.window.forceCode.conn = fcConnection.connection; // writing to force.json will trigger the file watcher. this, in turn, will call configuration() // which will finish setting up the login process fs.outputFileSync( path.join(vscode.window.forceCode.workspaceRoot, 'force.json'), JSON.stringify({ lastUsername: config.username }, undefined, 4) ); const describe = await vscode.window.forceCode.conn.metadata.describe(); vscode.window.forceCode.describe = describe; if (vscode.window.forceCode.config.useSourceFormat) { // TODO these types currently aren't supported for whatever reason, but can be retrieved via SFDX and non-source // format in ForceCode...so not sure why it doesn't work here as of yet let excludeVals = new Set<String>([ 'AIApplication', 'AIApplicationConfig', 'MLDataDefinition', 'MLPredictionDefinition', ]); vscode.window.forceCode.describe.metadataObjects = vscode.window.forceCode.describe.metadataObjects.filter( (type) => !excludeVals.has(type.xmlName) ); } vscode.window.forceCode.config.prefix = describe.organizationNamespace; notifications.writeLog('Done retrieving metadata records'); return Promise.resolve(); } } // END connect() ===================================================================== public disconnect(conn: FCConnection | undefined): Promise<any> { if (!conn) { return Promise.resolve(); } if (this.connections.has(conn.orgInfo.username)) { this.connections.delete(conn.orgInfo.username); return conn.disconnect(); } else { return Promise.resolve(); } } public getConnByUsername(userName: string | undefined): FCConnection | undefined { return this.connections.get(userName ? userName : ''); } public addConnection( orgInfo: FCOauth | SFDX | undefined, saveToken?: boolean ): FCConnection | undefined { if (orgInfo?.username) { let fcConn: FCConnection; if (!this.connections.has(orgInfo.username)) { fcConn = new FCConnection(this, orgInfo); } else { fcConn = this.connections.get(orgInfo.username)!; const aToken: string | undefined = fcConn.orgInfo.accessToken; Object.assign(fcConn.orgInfo, orgInfo); // only the getOrgInfo command gives us the right access token, for some reason the others don't work if (!saveToken) { fcConn.orgInfo.accessToken = aToken; } } fcConn.isLoggedIn = !orgInfo.isExpired && (orgInfo.connectedStatus === 'Connected' || orgInfo.connectedStatus === 'Unknown'); this.connections.set(orgInfo.username, fcConn); return fcConn; } else { return undefined; } } private sortFunc(a: FCConnection, b: FCConnection): number { let aStr = a.getLabel().toUpperCase() || ''; let bStr = b.getLabel().toUpperCase() || ''; return aStr.localeCompare(bStr); } }
the_stack
import React, { useEffect, useRef, useState } from 'react'; import _ from 'lodash'; import { Block } from 'baseui/block'; import { useSnackbar, DURATION } from 'baseui/snackbar'; import { Check } from 'baseui/icon'; import { BiUpload } from 'react-icons/all'; import { Notification } from 'baseui/notification'; import { createGeneration, Generation, Genome, Percentage, Probability, select } from '../../libs/genetic'; import { CarsLossType, CarsInProgressType } from './PopulationTable'; import { CarLicencePlateType, CarsType, CarType } from '../world/types/car'; import { DEFAULT_BATCH_SIZE, DEFAULT_GENERATION_LIFETIME, DEFAULT_GENERATION_SIZE, DEFAULT_LONG_LIVING_CHAMPIONS_PERCENTAGE, DEFAULT_MUTATION_PROBABILITY, DEFAULT_PERFORMANCE_BOOST, SECOND, } from './EvolutionBoardParams'; import { carLossToFitness, GENOME_LENGTH } from '../../libs/carGenetic'; import { generateWorldVersion, generationToCars, loadGenerationFromStorage, removeGenerationFromStorage, saveGenerationToStorage } from './utils/evolution'; import { deleteSearchParam, getBooleanSearchParam, getFloatSearchParam, getIntSearchParam, setSearchParam, } from '../../utils/url'; import EvolutionAnalytics from './EvolutionAnalytics'; import { loggerBuilder } from '../../utils/logger'; import ParkingAutomatic from '../world/parkings/ParkingAutomatic'; import World from '../world/World'; import { BAD_SIMULATION_BATCH_INDEX_CHECK, BAD_SIMULATION_MIN_LOSS_INCREASE_PERCENTAGE, BAD_SIMULATION_RETRIES_ENABLED, BAD_SIMULATION_RETRIES_NUM, FITNESS_ALPHA } from './constants/evolution'; import EvolutionCheckpointSaver, { EvolutionCheckpoint } from './EvolutionCheckpointSaver'; import { ARTICLE_LINK } from '../../constants/links'; import { DynamicCarsPosition } from '../world/constants/cars'; import { DYNAMIC_CARS_POSITION_FRONT } from '../world/constants/cars'; const GENERATION_SIZE_URL_PARAM = 'generation'; const GROUP_SIZE_URL_PARAM = 'group'; const GENERATION_LIFETIME_URL_PARAM = 'lifetime'; const MUTATION_PROBABILITY_URL_PARAM = 'mutation'; const LONG_LIVING_CHAMPIONS_URL_PARAM = 'champions'; const PERFORMANCE_BOOST_URL_PARAM = 'boost'; // Genome array, concatenated to a string (i.e. '1010011') type GenomeKey = string; type GenomeLossType = Record<GenomeKey, number | null>; function EvolutionTabEvolution() { const {enqueue} = useSnackbar(); const [performanceBoost, setPerformanceBoost] = useState<boolean>( getBooleanSearchParam(PERFORMANCE_BOOST_URL_PARAM, DEFAULT_PERFORMANCE_BOOST) ); const [worldIndex, setWorldIndex] = useState<number>(0); const [generationSize, setGenerationSize] = useState<number>( getIntSearchParam(GENERATION_SIZE_URL_PARAM, DEFAULT_GENERATION_SIZE) ); const [restoredFromGenerationIndex, setRestoredFromGenerationIndex] = useState<number | null>(null); const [generationIndex, setGenerationIndex] = useState<number | null>(null); const [generation, setGeneration] = useState<Generation>([]); const [generationLifetime, setGenerationLifetime] = useState<number>( getIntSearchParam(GENERATION_LIFETIME_URL_PARAM, DEFAULT_GENERATION_LIFETIME) ); const [cars, setCars] = useState<CarsType>({}); const [carsBatch, setCarsBatch] = useState<CarType[]>([]); const [carsBatchSize, setCarsBatchSize] = useState<number>( getIntSearchParam(GROUP_SIZE_URL_PARAM, DEFAULT_BATCH_SIZE) ); const [carsBatchIndex, setCarsBatchIndex] = useState<number | null>(null); const carsRef = useRef<CarsType>({}); const [bestGenome, setBestGenome] = useState<Genome | null>(null); const [minLoss, setMinLoss] = useState<number | null>(null); const [bestCarLicencePlate, setBestCarLicencePlate] = useState<CarLicencePlateType | null>(null); const [secondBestGenome, setSecondBestGenome] = useState<Genome | null>(null); const [secondMinLoss, setSecondMinLoss] = useState<number | null>(null); const [secondBestCarLicencePlate, setSecondBestCarLicencePlate] = useState<CarLicencePlateType | null>(null); const [dynamicCarsPosition] = useState<DynamicCarsPosition>(DYNAMIC_CARS_POSITION_FRONT); const batchTimer = useRef<NodeJS.Timeout | null>(null); const carsLossRef = useRef<CarsLossType[]>([{}]); const [carsLoss, setCarsLoss] = useState<CarsLossType[]>([{}]); const [lossHistory, setLossHistory] = useState<number[]>([]); const [avgLossHistory, setAvgLossHistory] = useState<number[]>([]); const genomeLossRef = useRef<GenomeLossType[]>([{}]); const [mutationProbability, setMutationProbability] = useState<Probability>( getFloatSearchParam(MUTATION_PROBABILITY_URL_PARAM, DEFAULT_MUTATION_PROBABILITY) ); const [longLivingChampionsPercentage, setLongLivingChampionsPercentage] = useState<Percentage>( getIntSearchParam(LONG_LIVING_CHAMPIONS_URL_PARAM, DEFAULT_LONG_LIVING_CHAMPIONS_PERCENTAGE) ); const [badSimulationRetriesNum, setBadSimulationRetriesNum] = useState<number>(BAD_SIMULATION_RETRIES_NUM); const logger = loggerBuilder({ context: 'EvolutionTab' }); const carsBatchesTotal: number = Math.ceil(Object.keys(cars).length / carsBatchSize); const carsInProgress: CarsInProgressType = carsBatch.reduce((cars: CarsInProgressType, car: CarType) => { cars[car.licencePlate] = true; return cars; }, {}); const batchVersion = generateWorldVersion(generationIndex, carsBatchIndex); const generationLifetimeMs = generationLifetime * SECOND; const onCommonStateReset = () => { setGeneration([]); setCarsBatch([]); setCars({}); setCarsLoss([{}]); carsRef.current = {}; carsLossRef.current = [{}]; genomeLossRef.current = [{}]; setLossHistory([]); setAvgLossHistory([]); setBestGenome(null); setMinLoss(null); setBestCarLicencePlate(null); setSecondBestGenome(null); setSecondMinLoss(null); setSecondBestCarLicencePlate(null); }; const onEvolutionRestart = () => { cancelBatchTimer(); onCommonStateReset(); setWorldIndex(worldIndex + 1); setGenerationIndex(0); setCarsBatchIndex(null); }; const onCarLossUpdate = (licensePlate: CarLicencePlateType, loss: number) => { if (generationIndex === null) { return; } // Save the car loss to the "LicencePlate → Loss" map. if (!carsLossRef.current[generationIndex]) { carsLossRef.current[generationIndex] = {}; } carsLossRef.current[generationIndex][licensePlate] = loss; // Save the car loss to the "GenomeKey → Loss" map. if (!genomeLossRef.current[generationIndex]) { genomeLossRef.current[generationIndex] = {}; } if (carsRef.current[licensePlate]) { const carGenomeIndex = carsRef.current[licensePlate].genomeIndex; const carGenome: Genome = generation[carGenomeIndex]; const carGenomeKey: GenomeKey = carGenome.join(''); genomeLossRef.current[generationIndex][carGenomeKey] = loss; } }; const onGenerationSizeChange = (size: number) => { setGenerationSize(size); setSearchParam(GENERATION_SIZE_URL_PARAM, `${size}`); onEvolutionRestart(); }; const onSetDefaultFilterValues = () => { deleteSearchParam(GENERATION_SIZE_URL_PARAM); deleteSearchParam(GROUP_SIZE_URL_PARAM); deleteSearchParam(GENERATION_LIFETIME_URL_PARAM); deleteSearchParam(MUTATION_PROBABILITY_URL_PARAM); deleteSearchParam(LONG_LIVING_CHAMPIONS_URL_PARAM); deleteSearchParam(PERFORMANCE_BOOST_URL_PARAM); setGenerationSize(DEFAULT_GENERATION_SIZE); setCarsBatchSize(DEFAULT_BATCH_SIZE); setGenerationLifetime(DEFAULT_GENERATION_LIFETIME); setMutationProbability(DEFAULT_MUTATION_PROBABILITY); setLongLivingChampionsPercentage(DEFAULT_LONG_LIVING_CHAMPIONS_PERCENTAGE); setPerformanceBoost(DEFAULT_PERFORMANCE_BOOST); }; const onReset = () => { removeGenerationFromStorage(); onSetDefaultFilterValues(); onEvolutionRestart(); enqueue({ message: 'Evolution setup and training progress have been reset', startEnhancer: ({size}) => <Check size={size} />, }, DURATION.medium); }; const onMutationProbabilityChange = (probability: Probability) => { setMutationProbability(probability); setSearchParam(MUTATION_PROBABILITY_URL_PARAM, `${probability}`); }; const onLongLivingChampionsPercentageChange = (percentage: Percentage) => { setLongLivingChampionsPercentage(percentage); setSearchParam(LONG_LIVING_CHAMPIONS_URL_PARAM, `${percentage}`); }; const onPerformanceBoost = (state: boolean) => { setPerformanceBoost(state); setSearchParam(PERFORMANCE_BOOST_URL_PARAM, `${state ? 'true' : 'false'}`); }; const onBatchSizeChange = (size: number) => { setCarsBatchSize(size); setSearchParam(GROUP_SIZE_URL_PARAM, `${size}`); onEvolutionRestart(); }; const onGenerationLifetimeChange = (time: number) => { setGenerationLifetime(time); setSearchParam(GENERATION_LIFETIME_URL_PARAM, `${time}`); }; const onRestoreFromCheckpoint = (checkpoint: EvolutionCheckpoint) => { cancelBatchTimer(); setSearchParam(MUTATION_PROBABILITY_URL_PARAM, `${checkpoint.mutationProbability}`); setSearchParam(LONG_LIVING_CHAMPIONS_URL_PARAM, `${checkpoint.longLivingChampionsPercentage}`); setSearchParam(GENERATION_LIFETIME_URL_PARAM, `${checkpoint.generationLifetime}`); setSearchParam(PERFORMANCE_BOOST_URL_PARAM, `${checkpoint.performanceBoost ? 'true' : 'false'}`); setSearchParam(GENERATION_SIZE_URL_PARAM, `${checkpoint.generationSize}`); setSearchParam(GROUP_SIZE_URL_PARAM, `${checkpoint.carsBatchSize}`); saveGenerationToStorage({ generation: checkpoint.generation, generationIndex: checkpoint.generationIndex, lossHistory: checkpoint.lossHistory, avgLossHistory: checkpoint.avgLossHistory, }); document.location.reload(); }; const onCheckpointToFile = (): EvolutionCheckpoint => { const checkpoint: EvolutionCheckpoint = { dateTime: (new Date()).toISOString(), generationIndex: generationIndex || 0, performanceBoost, generationSize, generationLifetime, carsBatchSize, mutationProbability, longLivingChampionsPercentage, lossHistory, avgLossHistory, generation, }; return checkpoint; }; const cancelBatchTimer = () => { logger.info('Trying to cancel batch timer'); if (batchTimer.current === null) { return; } clearTimeout(batchTimer.current); batchTimer.current = null; }; const syncBestGenome = (): string | null | undefined => { if (generationIndex === null) { return; } const generationLoss: CarsLossType = carsLossRef.current[generationIndex]; if (!generationLoss) { return; } let bestCarLicensePlate: CarLicencePlateType | null = null; let minLoss: number = Infinity; let bestGenomeIndex: number = -1; Object.keys(generationLoss).forEach((licencePlate: CarLicencePlateType) => { const carLoss: number | null = generationLoss[licencePlate]; if (carLoss === null) { return; } if (carLoss < minLoss) { minLoss = carLoss; bestCarLicensePlate = licencePlate; bestGenomeIndex = cars[licencePlate].genomeIndex; } }); if (bestGenomeIndex === -1) { return; } setMinLoss(minLoss); setBestGenome(generation[bestGenomeIndex]); setBestCarLicencePlate(bestCarLicensePlate); return bestCarLicensePlate; }; const syncSecondBestGenome = ( bestLicensePlateSoFar: string | null | undefined ): string | null | undefined => { if (generationIndex === null || !bestLicensePlateSoFar) { return; } const generationLoss: CarsLossType = carsLossRef.current[generationIndex]; if (!generationLoss) { return; } let secondBestCarLicensePlate: CarLicencePlateType | null = null; let secondMinLoss: number = Infinity; let secondBestGenomeIndex: number = -1; Object.keys(generationLoss).forEach((licencePlate: CarLicencePlateType) => { // Skipping the best car genome. if (licencePlate === bestLicensePlateSoFar) { return; } const carLoss: number | null = generationLoss[licencePlate]; if (carLoss === null) { return; } if (carLoss < secondMinLoss) { secondMinLoss = carLoss; secondBestCarLicensePlate = licencePlate; secondBestGenomeIndex = cars[licencePlate].genomeIndex; } }); if (secondBestGenomeIndex === -1) { return; } setSecondMinLoss(secondMinLoss); setSecondBestGenome(generation[secondBestGenomeIndex]); setSecondBestCarLicencePlate(secondBestCarLicensePlate); return secondBestCarLicensePlate; }; const syncLossHistory = () => { if (generationIndex === null) { return; } const generationLoss: CarsLossType = carsLossRef.current[generationIndex]; // Sync min loss history. const newLossHistory = [...lossHistory]; if (generationLoss) { newLossHistory[generationIndex] = Object.values(generationLoss).reduce( (minVal: number, currVal: number | null) => { if (currVal === null) { return minVal; } return Math.min(minVal, currVal); }, Infinity ); } else { newLossHistory[generationIndex] = Infinity; } setLossHistory(newLossHistory); // Sync avg loss history. const newAvgLossHistory = [...avgLossHistory]; if (generationLoss) { let nonNullLosses = 0; const ascSortedGenerationLoss = Object.values<number | null>(generationLoss) .sort((a: number | null, b: number | null): number => { const aTuned: number = a === null ? Infinity : a; const bTuned: number = b === null ? Infinity : b; if (aTuned < bTuned) { return -1; } if (aTuned > bTuned) { return 1; } return 0; } ); const P50GenerationLoss = ascSortedGenerationLoss.slice( 0, Math.ceil(ascSortedGenerationLoss.length * 0.5), ); const lossSum = P50GenerationLoss.reduce( (sum: number, currVal: number | null) => { if (currVal === null) { return sum; } nonNullLosses += 1; return sum + currVal; }, 0 ); newAvgLossHistory[generationIndex] = nonNullLosses ? lossSum / nonNullLosses : 0; } else { newAvgLossHistory[generationIndex] = Infinity; } setAvgLossHistory(newAvgLossHistory); }; const carFitnessFunction = (generationIndex: number) => (genome: Genome): number => { const genomeKey = genome.join(''); if ( generationIndex === null || !genomeLossRef.current[generationIndex] || typeof genomeLossRef.current[generationIndex][genomeKey] !== 'number' ) { throw new Error('Fitness value for specified genome is undefined'); } const loss = genomeLossRef.current[generationIndex][genomeKey]; if (typeof loss !== 'number') { throw new Error('Loss value is not a number'); } return carLossToFitness(loss, FITNESS_ALPHA); }; const isValidGenerationFromStorage = (generation: Generation | null): boolean => { return !!( generation && generation.length === generationSize && generation[0].length === GENOME_LENGTH ); }; const getGenerationIndexFromStorage = (): number | null => { const { generation: generationFromStorage, generationIndex: generationIndexFromStorage, } = loadGenerationFromStorage(); if ( isValidGenerationFromStorage(generationFromStorage) && generationIndexFromStorage ) { return generationIndexFromStorage; } return null; }; const getLossHistoryFromStorage = (): number[] | null => { const { lossHistory: lossHistoryFromStorage, generation: generationFromStorage, } = loadGenerationFromStorage(); if ( isValidGenerationFromStorage(generationFromStorage) && lossHistoryFromStorage ) { return lossHistoryFromStorage; } return null; }; const getAvgLossHistoryFromStorage = (): number[] | null => { const { avgLossHistory: avgLossHistoryFromStorage, generation: generationFromStorage, } = loadGenerationFromStorage(); if ( isValidGenerationFromStorage(generationFromStorage) && avgLossHistoryFromStorage ) { return avgLossHistoryFromStorage; } return null; }; const getGenerationFromStorage = (): Generation | null => { const { generation: generationFromStorage, } = loadGenerationFromStorage(); if (isValidGenerationFromStorage(generationFromStorage)) { return generationFromStorage; } if (generationFromStorage) { try { const debugGenerationSize = generationFromStorage.length; const debugGenomeLength = generationFromStorage[0].length; logger.warn(`Generation from storage is invalid: generation size ${debugGenerationSize}, genome length ${debugGenomeLength}`); } catch (err) { logger.warn('Generation from storage is invalid'); } } return null; }; const startEvolution = () => { logger.info('Start evolution'); let generationStartIndex = 0; const generationIndexFromStorage = getGenerationIndexFromStorage(); const lossHistoryFromStorage = getLossHistoryFromStorage(); const avgLossHistoryFromStorage = getAvgLossHistoryFromStorage(); if (generationIndexFromStorage && lossHistoryFromStorage && avgLossHistoryFromStorage) { generationStartIndex = generationIndexFromStorage; setRestoredFromGenerationIndex(generationIndexFromStorage); setLossHistory(lossHistoryFromStorage); setAvgLossHistory(avgLossHistoryFromStorage); } setGenerationIndex(generationStartIndex); }; const createFirstGeneration = () => { if (generationIndex === null) { return; } logger.info('Create first generation'); let firstGeneration: Generation = createGeneration({ generationSize, genomeLength: GENOME_LENGTH, }); const generationFromStorage: Generation | null = getGenerationFromStorage(); const generationIndexFromStorage: number | null = getGenerationIndexFromStorage(); if (generationFromStorage && generationIndexFromStorage) { firstGeneration = generationFromStorage; enqueue({ message: `Generation #${generationIndexFromStorage} has been restored from the saved checkpoint. To start from scratch, press the Reset button.`, startEnhancer: ({size}) => <BiUpload size={size} />, }, DURATION.medium); } setGeneration(firstGeneration); setBestGenome(firstGeneration[0]); setSecondBestGenome(firstGeneration[1]); }; const mateExistingGeneration = () => { if (generationIndex === null) { return; } logger.info(`Mate generation #${generationIndex}`); try { const newGeneration = select( generation, carFitnessFunction(generationIndex - 1), { mutationProbability, longLivingChampionsPercentage: longLivingChampionsPercentage, }, ); setGeneration(newGeneration); saveGenerationToStorage({ generation: newGeneration, generationIndex, lossHistory, avgLossHistory, }); } catch (e: any) { // If selection failed for some reason, clone the existing generation and try again. setGeneration([...generation]); const errorMessage = 'The selection for the new generation has failed. Cloning the existing generation to try it next time.'; const exceptionMessage = e && e.message ? e.message : ''; logger.warn(errorMessage, exceptionMessage); } }; const createCarsFromGeneration = () => { if (!generation || !generation.length) { return; } logger.info(`Create cars from generation #${generationIndex}`); const cars = generationToCars({ generation, generationIndex, onLossUpdate: onCarLossUpdate, }); setCars(cars); setCarsBatchIndex(0); carsRef.current = _.cloneDeep(cars); }; const generateNextCarsBatch = () => { if (carsBatchIndex === null || generationIndex === null) { return; } if (!cars || !Object.keys(cars).length) { return; } if (carsBatchIndex >= carsBatchesTotal) { return; } logger.info(`Generate cars batch #${carsBatchIndex}`); const batchStart = carsBatchSize * carsBatchIndex; const batchEnd = batchStart + carsBatchSize; const carsBatch: CarType[] = Object.values(cars).slice(batchStart, batchEnd); setCarsBatch(carsBatch); }; const needToRetry = BAD_SIMULATION_RETRIES_ENABLED && carsBatchIndex === BAD_SIMULATION_BATCH_INDEX_CHECK && badSimulationRetriesNum > 0 && lossHistory.length > 1 && lossHistory[lossHistory.length - 1] > (lossHistory[lossHistory.length - 2] * BAD_SIMULATION_MIN_LOSS_INCREASE_PERCENTAGE / 100); const onBatchLifetimeEnd = () => { if (carsBatchIndex === null) { return; } logger.info(`Batch #${carsBatchIndex} lifetime ended`); setCarsLoss(_.cloneDeep<CarsLossType[]>(carsLossRef.current)); syncLossHistory(); const bestLicensePlate = syncBestGenome(); syncSecondBestGenome(bestLicensePlate); let nextBatchIndex = carsBatchIndex + 1; // Retrying logic if (BAD_SIMULATION_RETRIES_ENABLED && carsBatchIndex) { if (badSimulationRetriesNum === 0) { if (carsBatchIndex > BAD_SIMULATION_BATCH_INDEX_CHECK) { logger.info(`Resetting the simulation retries counter back to #${BAD_SIMULATION_RETRIES_NUM}`); setBadSimulationRetriesNum(BAD_SIMULATION_RETRIES_NUM); } } else if (needToRetry) { logger.info(`Retry needed. Number of retries left: ${badSimulationRetriesNum - 1}`); setBadSimulationRetriesNum(badSimulationRetriesNum - 1); nextBatchIndex = 0; } } if (nextBatchIndex >= carsBatchesTotal) { setCarsBatch([]); if (generationIndex !== null) { setCarsBatchIndex(null); setGenerationIndex(generationIndex + 1); } return; } setCarsBatchIndex(nextBatchIndex); }; const countDownBatchLifetime = (onLifetimeEnd: () => void) => { if (carsBatchIndex === null) { return; } if (!carsBatch || !carsBatch.length) { return; } logger.info(`Batch #${carsBatchIndex} lifetime started`); cancelBatchTimer(); batchTimer.current = setTimeout(onLifetimeEnd, generationLifetimeMs); }; // Start the evolution. useEffect(() => { startEvolution(); // eslint-disable-next-line react-hooks/exhaustive-deps }, []); // Once generation index is changed we need to create (or mate) a new generation. useEffect(() => { if (generationIndex === 0 || generationIndex === restoredFromGenerationIndex) { createFirstGeneration(); } else { mateExistingGeneration(); } // eslint-disable-next-line react-hooks/exhaustive-deps }, [generationIndex, worldIndex]); // Once generation is changed we need to create cars. useEffect(() => { createCarsFromGeneration(); // eslint-disable-next-line react-hooks/exhaustive-deps }, [generation]); // Once the cars batch index is updated we need to generate a cars batch. useEffect(() => { generateNextCarsBatch(); // eslint-disable-next-line react-hooks/exhaustive-deps }, [carsBatchIndex]); // Once the new cars batch is created we need to start generation timer. useEffect(() => { countDownBatchLifetime(onBatchLifetimeEnd); return () => { cancelBatchTimer(); }; // eslint-disable-next-line react-hooks/exhaustive-deps }, [carsBatch]); return ( <Block> <World version={batchVersion} performanceBoost={performanceBoost} > <ParkingAutomatic performanceBoost={performanceBoost} cars={carsBatch} withVisibleSensors withLabels carsPosition={dynamicCarsPosition} /> </World> <Block marginTop="20px"> <Notification overrides={{Body: {style: {width: 'auto'}}}}> Train the car to do self-parking using genetic algorithm<br/><br/> <small>For better results, increase the population size to 500-1000 and wait for 50-100 generations. <a style={{color: 'rgb(30, 84, 183)'}} href={ARTICLE_LINK}>More about params setup</a></small> </Notification> </Block> <EvolutionAnalytics mutationProbability={mutationProbability} onMutationProbabilityChange={onMutationProbabilityChange} longLivingChampionsPercentage={longLivingChampionsPercentage} generationIndex={generationIndex} carsBatchIndex={carsBatchIndex} totalBatches={carsBatchesTotal} worldIndex={worldIndex} needToRetry={needToRetry} generationLifetimeMs={generationLifetimeMs} generationSize={generationSize} performanceBoost={performanceBoost} carsBatchSize={carsBatchSize} generationLifetime={generationLifetime} batchVersion={batchVersion} onGenerationSizeChange={onGenerationSizeChange} onBatchSizeChange={onBatchSizeChange} onGenerationLifetimeChange={onGenerationLifetimeChange} onLongLivingChampionsPercentageChange={onLongLivingChampionsPercentageChange} onPerformanceBoost={onPerformanceBoost} onReset={onReset} lossHistory={lossHistory} avgLossHistory={avgLossHistory} cars={cars} carsInProgress={carsInProgress} carsLoss={carsLoss} bestGenome={bestGenome} bestCarLicencePlate={bestCarLicencePlate} minLoss={minLoss} secondBestGenome={secondBestGenome} secondBestCarLicencePlate={secondBestCarLicencePlate} secondMinLoss={secondMinLoss} /> <Block marginTop="30px"> <EvolutionCheckpointSaver onRestoreFromCheckpoint={onRestoreFromCheckpoint} onCheckpointToFile={onCheckpointToFile} /> </Block> </Block> ); } export default EvolutionTabEvolution;
the_stack
import {NgxPermissionsService} from '../service/permissions.service'; import {Component, ModuleWithProviders, NgModule, NgModuleFactoryLoader} from '@angular/core'; import {Route, Router, RouterModule} from '@angular/router'; import {Location} from '@angular/common'; import {ComponentFixture, fakeAsync, inject, TestBed, tick} from '@angular/core/testing'; import {RouterTestingModule, SpyNgModuleFactoryLoader} from '@angular/router/testing'; import {NgxPermissionsModule} from '../index'; import {NgxRolesService} from '../service/roles.service'; @Component({ selector: 'ngx-permissions-root', template: ` <router-outlet></router-outlet>` }) class RootComponent { constructor(public permissions: NgxPermissionsService) { permissions.addPermission('ADMIN', () => { return false; }); } } @Component({ selector: 'ngx-permissions-lazy', template: 'lazy-loaded-parent [<router-outlet></router-outlet>]' }) class ParentLazyLoadedComponent { } function getLazyLoadedModule(importedModule: any) { @Component({selector: 'ngx-permissions-lazy', template: 'lazy-loaded-child'}) class ChildLazyLoadedComponent { constructor(public permissions: NgxPermissionsService) { permissions.addPermission('ADMIN', () => { return true; }); // expect(permissions.hasPermission('LAZY')).toBe(true); } } @NgModule({ declarations: [ParentLazyLoadedComponent, ChildLazyLoadedComponent], imports: [ RouterModule.forChild([{ path: 'loaded', component: ParentLazyLoadedComponent, children: [{path: 'child', component: ChildLazyLoadedComponent}] }]), importedModule ] }) class LoadedModule { } return LoadedModule; } function advance(fixture: ComponentFixture<any>): void { tick(); fixture.detectChanges(); } function createRoot(router: Router, type: any): ComponentFixture<any> { const f = TestBed.createComponent(type); advance(f); router.initialNavigation(); advance(f); return f; } describe('module', () => { beforeEach(() => { TestBed.configureTestingModule({ imports: [ RouterTestingModule, NgxPermissionsModule.forRoot(), ], declarations: [RootComponent] }); }); it('should work when lazy loaded using forChild', fakeAsync(inject( [Router, Location, NgModuleFactoryLoader], (router: Router, location: Location, loader: SpyNgModuleFactoryLoader) => { const LoadedModule = getLazyLoadedModule(NgxPermissionsModule.forChild()); loader.stubbedModules = {expected: LoadedModule}; const fixture = createRoot(router, RootComponent); const permissionsService: NgxPermissionsService = TestBed.inject(NgxPermissionsService); permissionsService.hasPermission('ADMIN').then((data) => { expect(data).toBe(false); }); router.resetConfig([{path: 'lazy', loadChildren: 'expected'}]); router.navigateByUrl('/lazy/loaded/child'); advance(fixture); expect(location.path()).toEqual('/lazy/loaded/child'); // since the root module imports the NgxPermissionsModule with forRoot and the lazy loaded module with forChild // the permissionsService service is shared between both modules // the constructor of the ChildLazyLoadedComponent overwrote the "ADMIN" key of the root NgxPermissionsService permissionsService.hasPermission('ADMIN').then((data) => { expect(data).toBe(true); }); })) ); it('should work when loaded using just Module', fakeAsync(inject( [Router, Location, NgModuleFactoryLoader], (router: Router, location: Location, loader: SpyNgModuleFactoryLoader) => { const LoadedModule = getLazyLoadedModule(NgxPermissionsModule); loader.stubbedModules = {expected: LoadedModule}; const fixture = createRoot(router, RootComponent); const permissionsService: NgxPermissionsService = TestBed.inject(NgxPermissionsService); permissionsService.hasPermission('ADMIN').then((data) => { expect(data).toBe(false); }); router.resetConfig([{path: 'lazy', loadChildren: 'expected'}]); router.navigateByUrl('/lazy/loaded/child'); advance(fixture); expect(location.path()).toEqual('/lazy/loaded/child'); // since the root module imports the NgxPermissionsModule with forRoot and the lazy loaded module with forChild // the permissionsService service is shared between both modules // the constructor of the ChildLazyLoadedComponent overwrote the "ADMIN" key of the root NgxPermissionsService permissionsService.hasPermission('ADMIN').then((data) => { expect(data).toBe(true); }); })) ); it('should create 2 instances of the service when lazy loaded using forRoot', fakeAsync(inject( [Router, Location, NgModuleFactoryLoader], (router: Router, location: Location, loader: SpyNgModuleFactoryLoader) => { const LoadedModule = getLazyLoadedModule(NgxPermissionsModule.forRoot()); loader.stubbedModules = {expected: LoadedModule}; const fixture = createRoot(router, RootComponent); const permissionsService = TestBed.inject(NgxPermissionsService); permissionsService.hasPermission('ADMIN').then((data) => { expect(data).toBe(false); }); router.resetConfig([{path: 'lazy', loadChildren: 'expected'}]); router.navigateByUrl('/lazy/loaded/child'); advance(fixture); expect(location.path()).toEqual('/lazy/loaded/child'); // since both the root module and the lazy loaded module use forRoot to define the NgxPermissionsModule // the permissionsService service is NOT shared, and 2 instances co-exist // the constructor of the ChildLazyLoadedComponent didn't overwrote the "ADMIN" key of the root NgxPermissionsService permissionsService.hasPermission('ADMIN').then((data) => { expect(data).toBe(false); }); })) ); it('should create 2 instances of the service when lazy loaded using forChild and isolate true', fakeAsync(inject( [Router, Location, NgModuleFactoryLoader], (router: Router, location: Location, loader: SpyNgModuleFactoryLoader) => { const LoadedModule = getLazyLoadedModule(NgxPermissionsModule.forChild({permissionsIsolate: true})); loader.stubbedModules = {expected: LoadedModule}; const fixture = createRoot(router, RootComponent); const permissionsService = TestBed.inject(NgxPermissionsService); permissionsService.hasPermission('ADMIN').then((data) => { expect(data).toBe(false); }); router.resetConfig([{path: 'lazy', loadChildren: 'expected'}]); router.navigateByUrl('/lazy/loaded/child'); advance(fixture); expect(location.path()).toEqual('/lazy/loaded/child'); // since both the root module and the lazy loaded module use forRoot to define the NgxPermissionsModule // the permissions service is NOT shared, and 2 instances co-exist // the constructor of the ChildLazyLoadedComponent didn't overwrote the "false" key of the root NgxPermissionsService permissionsService.hasPermission('ADMIN').then((data) => { expect(data).toBe(false); }); })) ); }); @Component({ selector: 'ngx-permissions-root-roles', template: ` <router-outlet></router-outlet>` }) class RootRolesComponent { constructor(public roleService: NgxRolesService) { roleService.addRole('ADMIN', () => { return false; }); } } @Component({ selector: 'ngx-permissions-lazy', template: 'lazy-loaded-parent [<router-outlet></router-outlet>]' }) class ParentLazyRolesLoadedComponent { } function getLazyRolesLoadedModule(importedModule: ModuleWithProviders<any>) { @Component({selector: 'ngx-permissions-lazy', template: 'lazy-loaded-child'}) class ChildLazyLoadedComponent { constructor(public permissions: NgxRolesService) { permissions.addRole('ADMIN', () => { return true; }); // expect(permissions.hasPermission('LAZY')).toBe(true); } } @NgModule({ declarations: [ParentLazyLoadedComponent, ChildLazyLoadedComponent], imports: [ RouterModule.forChild([{ path: 'loaded', component: ParentLazyLoadedComponent, children: [{path: 'child', component: ChildLazyLoadedComponent}] } as Route]), importedModule ] }) class LoadedModule { } return LoadedModule; } describe('Role module', () => { beforeEach(() => { TestBed.configureTestingModule({ imports: [ RouterTestingModule, NgxPermissionsModule.forRoot(), ], declarations: [RootRolesComponent] }); }); it('should work when lazy loaded using forChild', fakeAsync(inject( [Router, Location, NgModuleFactoryLoader], (router: Router, location: Location, loader: SpyNgModuleFactoryLoader) => { const LoadedModule = getLazyRolesLoadedModule(NgxPermissionsModule.forChild()); loader.stubbedModules = {expected: LoadedModule}; const fixture = createRoot(router, RootRolesComponent); const rolesService: NgxRolesService = TestBed.inject(NgxRolesService); rolesService.hasOnlyRoles('ADMIN').then((data) => { expect(data).toBe(false); }); router.resetConfig([{path: 'lazy', loadChildren: 'expected'}]); router.navigateByUrl('/lazy/loaded/child'); advance(fixture); expect(location.path()).toEqual('/lazy/loaded/child'); // since the root module imports the NgxPermissionsModule with forRoot and the lazy loaded module with forChild // the rolesServihasOnlyRoles() is shared between both modules // the constructor of the ChildLazyLoadedComponent overwrote the "ADMIN" key of the root roleServiceService rolesService.hasOnlyRoles('ADMIN').then((data) => { expect(data).toBe(true); }); })) ); it('should create 2 instances of the service when lazy loaded using forRoot', fakeAsync(inject( [Router, Location, NgModuleFactoryLoader], (router: Router, location: Location, loader: SpyNgModuleFactoryLoader) => { const LoadedModule = getLazyRolesLoadedModule(NgxPermissionsModule.forRoot()); loader.stubbedModules = {expected: LoadedModule}; const fixture = createRoot(router, RootRolesComponent); const rolesService = TestBed.inject(NgxRolesService); rolesService.hasOnlyRoles('ADMIN').then((data) => { expect(data).toBe(false); }); router.resetConfig([{path: 'lazy', loadChildren: 'expected'}]); router.navigateByUrl('/lazy/loaded/child'); advance(fixture); expect(location.path()).toEqual('/lazy/loaded/child'); // since both the root module and the lazy loaded module use forRoot to define the NgxPermisionsModule // the rolesService service is NOT shared, and 2 instances co-exist // the constructor of the ChildLazyLoadedComponent didn't overwrote the "ADMIN" key of the root PermissionsService rolesService.hasOnlyRoles('ADMIN').then((data) => { expect(data).toBe(false); }); })) ); it('should create 2 instances of the service when lazy loaded using forChild and isolate true', fakeAsync(inject( [Router, Location, NgModuleFactoryLoader], (router: Router, location: Location, loader: SpyNgModuleFactoryLoader) => { const LoadedModule = getLazyRolesLoadedModule(NgxPermissionsModule.forChild({rolesIsolate: true})); loader.stubbedModules = {expected: LoadedModule}; const fixture = createRoot(router, RootRolesComponent); const rolesService = TestBed.inject(NgxRolesService); rolesService.hasOnlyRoles('ADMIN').then((data) => { expect(data).toBe(false); }); router.resetConfig([{path: 'lazy', loadChildren: 'expected'}]); router.navigateByUrl('/lazy/loaded/child'); advance(fixture); expect(location.path()).toEqual('/lazy/loaded/child'); // since both the root module and the lazy loaded module use forRoot to define the NgxPermissionsModule // the permissions service is NOT shared, and 2 instances co-exist // the constructor of the ChildLazyLoadedComponent didn't overwrote the "false" key of the root NgxRolesService rolesService.hasOnlyRoles('ADMIN').then((data) => { expect(data).toBe(false); }); })) ); });
the_stack
/// <reference types="node" /> export = massive; declare function massive(connection: massive.ConnectionInfo | string, loaderConfig?: massive.Loader, driverConfig?: object): Promise<massive.Database>; declare namespace massive { type UUID = number | string; interface Loader { blacklist?: string | string[] | undefined; whitelist?: string | string[] | undefined; functionBlacklist?: string | string[] | undefined; functionWhitelist?: string | string[] | undefined; allowedSchemas?: string | string[] | undefined; exceptions?: string | string[] | undefined; scripts?: string | undefined; } interface DropOptions { cascade?: boolean | undefined; } interface ConnectionInfo { user?: string | undefined; database?: string | undefined; password?: string | null | undefined; port?: number | undefined; host?: string | undefined; ssl?: boolean | undefined; application_name?: string | undefined; fallback_application_name?: boolean | undefined; } /** Bundled insert, select, update and delete query options.. Shame on me */ interface GenericQueryOptions { columns?: string[] | undefined; limit?: number | undefined; offset?: number | undefined; only?: boolean | undefined; order?: string[] | undefined; orderBody?: boolean | undefined; build?: boolean | undefined; document?: boolean | undefined; single?: boolean | undefined; stream?: boolean | undefined; } interface SearchDefinition { /** List of the fields to search. */ fields: string[]; /** Search term. */ term: string; } interface QueryOptions { /** This is a query against a document table. */ document?: boolean | undefined; /** True to return a single result object instead of an array of results. */ single?: boolean | undefined; /** True to return a stream instead of a resultset. */ stream?: boolean | undefined; } interface SearchCriteria { fields: string[]; term: string; } class Table extends Queryable { /** Delete a record or records. */ destroy(criteria: object, options?: GenericQueryOptions): Promise<void>; /** Attempts to assemble primary key criteria for a record object representing a row in this table. The criteria must include the full primary key, and must not invoke any operations. */ getPkCriteria(record: object): object; /** Insert a record or records into the table. */ insert(data: object | any[], options?: GenericQueryOptions): Promise<any>; /** * Update a document, adding new information and changing existing information. * This function can be used with any JSON field, not just document tables; however, only document tables can use criteria objects which directly reference document fields. * If calling modify with a criteria object for a non-document table, the criteria will be tested against the entire row (as opposed to the document body as it is for document tables). * To test elements of the JSON field in a non-document table with a criteria object, use a JSON path string. */ modify(criteria: object | UUID, changes: object, field?: string): Promise<any>; /** * Performs an upsert. * If the record does not include a value for the primary key column, it will be inserted and the persisted record (including primary key) returned; * if it does, the row will be updated and the modified record returned. */ save(record: object, options?: GenericQueryOptions): Promise<any>; /** * Save a document to the database. This function replaces the entire document body. */ saveDoc(record: object): Promise<any>; /** * Update a record. * May be invoked with a complete record (including primary key), or with a criteria object and a map of fields to new values. * Multi-row updates are only possible through the latter usage. */ update(criteria: object, fields: object, options?: GenericQueryOptions): void; } interface EntitySpecification { /** A Database. */ db: object; /** The entity's name. */ name: string; /** Path to the entity, if a file. */ path: string; /** Entity's owning schema, if a database object. */ schema: string; } class Entity { constructor(spec: EntitySpecification); } class Queryable extends Entity { constructor(spec: EntitySpecification); /** * Count rows matching criteria. There are two ways to use this method: 1. find() style: db.mytable.count({field: value}); 2. where() style: db.mytable.count("field=$1", [value]); * * @param conditions A criteria object or SQL predicate. * @param params Prepared statement parameters for use with raw SQL predicates. */ count(conditions: object | string, params?: any): Promise<number>; /** * Count documents matching criteria. Unlike count, this function only supports criteria objects. */ countDoc(criteria: object): Promise<number>; /** * Find rows matching criteria. * * @param criteria A criteria object or primary key value. */ find(criteria: object | UUID, options?: GenericQueryOptions): Promise<any>; /** * Find a document by searching in the body. * * @param criteria A criteria object or primary key value. */ findDoc(criteria?: object | UUID, options?: GenericQueryOptions): Promise<any>; /** * Return a single record. * * @param criteria A criteria object or primary key value. */ findOne(criteria?: object | UUID, options?: GenericQueryOptions): Promise<any>; /** * Determine whether criteria represent a search by primary key. * If a number or uuid are passed, it is assumed to be a primary key value; if an object, it must have only one key, which must specify the primary key column. */ isPkSearch(criteria?: object | string | number): boolean; /** * Perform a full-text search on queryable fields. If options.document is true, looks in the document body fields instead of the table columns. */ search(plan: SearchDefinition, options?: GenericQueryOptions): Promise<any>; /** Shortcut to perform a full text search on a document table. */ searchDoc(plan: SearchDefinition, options?: GenericQueryOptions): Promise<any>; /** Run a query with a raw SQL predicate, eg: db.mytable.where('id=$1', [123]).then(...); */ where(conditions: string, params?: any, options?: GenericQueryOptions): Promise<any>; } /** Represents a SELECT query. */ class Select { /** * @param source Database object to query. * @param criteria A criteria object, prebuilt predicate, or primitive pk value. */ constructor(source: Queryable, criteria: object | UUID, options?: GenericQueryOptions); /** Format this object into a SQL SELECT. */ format(): string; } /** Represents a INSERT query. */ class Insert { /** * @param source Database object to query. * @param record A map of field names to values to be inserted, or an array of same. */ constructor(source: Queryable, record: object | any[], options?: GenericQueryOptions); /** Format this object into a SQL SELECT. */ format(): string; } /** Represents a UPDATE query. */ class Update { /** * @param source Database object to query. * @param changes A map of field names to new values. * @param criteria A criteria object. */ constructor(source: Queryable, changes: object, criteria: object, options?: GenericQueryOptions); /** Format this object into a SQL SELECT. */ format(): string; } /** Represents a UPDATE query. */ class Delete { /** * @param source Database object to query. * @param criteria A criteria object. */ constructor(source: Queryable, criteria?: object, options?: GenericQueryOptions); /** Format this object into a SQL SELECT. */ format(): string; } class Database { /** * @param connection A connection object or connection string */ constructor(connection: object | string, loader?: Loader, driverConfig?: object); /** Attach an entity to the connected instance. */ attach(ctor: any, ...sources: any[]): Promise<any[]>; /** Remove all attached entities from the instance, returning it to the pre- introspection state. */ clean(): void; /** Create a new document table and attach it to the Database for usage. */ createDocumentTable(location: string): Promise<void>; /** Create a new schema in the database. */ createSchema(schemaName: string): Promise<void>; /** Forget an entity. */ detach(entityPath: string): void; /** Drop a schema and remove it and its owned objects from the Database. */ dropSchema(schemaName: string, options?: DropOptions): Promise<void>; /** Drop a table and remove it from the Database. */ dropTable(tablePath: string, options?: DropOptions): Promise<void>; /** List all the functions and scripts attached to the connected instance. */ listFunctions(): Promise<any[]>; /** List all the tables attached to the connected instance. */ listTables(): Promise<any[]>; /** List all the views attached to the connected instance. */ listViews(): Promise<any[]>; /** Execute a query. */ query(query: Select | Insert | Update | Delete | string, params?: any, options?: QueryOptions): Promise<any>; /** * Synchronize the database API with the current state by scanning for tables, views, functions, and scripts. * Objects and files which no longer exist are cleared and new objects and files added. */ reload(): void; /** * Save a document. * * @param collection Document table name to save to. If it does not already exist, it will be created. * @param doc A JSON document. */ saveDoc(collection: string, doc: object): Promise<any>; [tableName: string]: Table | any; } }
the_stack
import { Container } from './Container'; import { createToken } from './createToken/createToken'; describe('DI Container', () => { it('Использование useValue провайдеров', () => { const container = new Container(); const stringToken = createToken('stringToken'); const stringPayload = '__value__'; container.register({ provide: stringToken, useValue: stringPayload }); expect(container.get(stringToken)).toBe(stringPayload); /* Тестирование class */ const classToken = createToken<SecretClass>('classToken'); class SecretClass { testMethod() { return 'ra'; } } container.register({ provide: classToken, useValue: new SecretClass() }); expect(container.get(classToken).testMethod()).toBe('ra'); }); it('Использование useClass провайдеров', () => { const container = new Container(); const result = 'Oby'; class TestClass { method() { return result; } } const token = createToken<TestClass>('tokenName'); container.register({ provide: token, useClass: TestClass }); expect(container.get(token).method()).toBe(result); }); it('Использование useFactory провайдеров', () => { const container = new Container(); const result = 'Oby'; const factory = () => result; const token = createToken<() => string>('tokenName'); container.register({ provide: token, useFactory: factory }); expect(container.get(token)).toBe(result); }); it('Динамическая инициализация при получении класса', () => { const container = new Container(); let modA = 'PO'; let modB = 'LK'; let modC = 'GF'; class A { constructor() { modA = 'A'; } } class B { constructor() { modB = 'B'; } } function ccc() { modC = 'C'; } const tokenA = createToken<A>('tokenA'); const tokenB = createToken<B>('tokenB'); const tokenC = createToken('tokenC'); container.register({ provide: tokenA, useClass: A }); container.register({ provide: tokenB, useClass: B }); container.register({ provide: tokenC, useFactory: ccc }); container.get(tokenA); expect(modA).toBe('A'); expect(modB).toBe('LK'); expect(modC).toBe('GF'); container.get(tokenB); expect(modA).toBe('A'); expect(modB).toBe('B'); expect(modC).toBe('GF'); container.get(tokenC); expect(modA).toBe('A'); expect(modB).toBe('B'); expect(modC).toBe('C'); }); it('Зависимости', () => { const result: string[] = []; const container = new Container(); class A { get() { return 'Token A'; } } function bbb({ tokenC }: any) { return { a: { b: `token B ${tokenC.a.b}` } }; } const c = { a: { b: 'token C' }, }; class D { constructor({ tokenA, tokenB, tokenC }: any) { result.push(tokenA.get()); result.push(tokenC.a.b); result.push(tokenB.a.b); } } container.register({ provide: 'tokenA', useClass: A }); container.register({ provide: 'tokenB', useFactory: bbb, deps: { tokenC: 'tokenC' }, }); container.register({ provide: 'tokenC', useValue: c }); const depsTokenD = { tokenA: 'tokenA', tokenB: 'tokenB', tokenC: 'tokenC' }; container.register({ provide: 'tokenD', useClass: D, deps: depsTokenD, }); container.get('tokenD'); expect(result).toEqual(['Token A', 'token C', 'token B token C']); // Проверяем работу getOfDeps expect(Object.keys(container.getOfDeps(depsTokenD))).toEqual(['tokenA', 'tokenB', 'tokenC']); expect(container.getOfDeps(depsTokenD).tokenC).toEqual({ a: { b: 'token C' } }); }); it('Опциональные зависимости - получение null если нет', () => { const container = new Container(); const result: any[] = []; container.register({ provide: 'A', useValue: 9, }); container.register({ provide: 'B', useFactory: ({ a, b }) => { result.push(a, b); }, deps: { a: 'A', b: { token: 'K', optional: true } }, }); container.get('B'); expect(result).toEqual([9, null]); }); it('Опциональные зависимости - получение значения если есть', () => { const container = new Container(); const result: any[] = []; container.register({ provide: 'A', useValue: 9, }); container.register({ provide: 'K', useValue: 5, }); container.register({ provide: 'B', useFactory: ({ a, b }) => { result.push(a, b); }, deps: { a: 'A', b: { token: 'K', optional: true } }, }); container.get('B'); expect(result).toEqual([9, 5]); }); it('Опциональные зависимости - получение null если не удалось создать запрошенную сущность', () => { const container = new Container(); const result: any[] = []; container.register({ provide: 'A', useFactory: () => { return 9; }, deps: { c: 'C', }, }); container.register({ provide: 'B', useFactory: ({ a }) => { result.push(a); }, deps: { a: { token: 'A', optional: true } }, }); expect(() => container.get('B')).not.toThrow(); expect(result).toEqual([null]); }); it('Мульти провайдер', () => { const container = new Container(); container.register({ provide: 'B', useValue: 2, multi: true }); container.register({ provide: 'A', useValue: { a: 1 }, multi: true }); container.register({ provide: 'A', useFactory: ({ B }) => ({ b: B[0] }), deps: { B: 'B' }, multi: true, }); expect(container.get('A')).toEqual([{ a: 1 }, { b: 2 }]); }); describe('Ошибки', () => { it('смешивание multi true/false', () => { const container = new Container(); try { container.register({ provide: 'A', useValue: { a: 1 }, multi: false }); container.register({ provide: 'A', useValue: { a: 1 }, multi: true }); expect(true).toBe(false); } catch (e) { expect(e.message).toBe('Mixed multi-provider for A'); expect(e.type).toBe('MixedMulti'); } try { // @ts-ignore container.register({ provide: 'B', __stack: new Error().stack, useValue: 1, multi: false }); // @ts-ignore container.register({ provide: 'B', __stack: new Error().stack, useValue: 2, multi: true }); } catch (e) { expect(e.stack).toMatch(/---- caused by: ----[\s\S]+Container.spec.ts/); } }); it('Ошибки, если мы смешиваем token multi с не мульти проваейдером', () => { const container = new Container(); const token = createToken('tadam', { multi: true }); try { container.register({ provide: token, useValue: { a: 1 } }); expect(true).toBe(false); } catch (e) { expect(e.message).toBe('Token tadam require multi providers'); expect(e.type).toBe('RequireMulti'); } try { // @ts-ignore container.register({ provide: token, __stack: new Error().stack, useValue: { a: 1 } }); } catch (e) { expect(e.stack).toMatch(/---- caused by: ----[\s\S]+Container.spec.ts/); } }); it('не найден провайдер', () => { const container = new Container(); const token = createToken('tadam'); container.register({ provide: token, // @ts-ignore __stack: new Error().stack, useClass: class Sdsa {}, deps: { jj: 'jj' }, }); try { container.get(token); expect(true).toBe(false); } catch (e) { expect(e.message).toBe('Token not found "jj" at "tadam"'); expect(e.type).toBe('NotFound'); expect(e.stack).toMatch(/---- caused by: ----[\s\S]+Container.spec.ts/); } }); it('circular зависимости', () => { const container = new Container(); container.register({ provide: 'A', useFactory: (a) => { throw new Error('never'); }, deps: { B: 'B' }, }); container.register({ provide: 'B', useFactory: (a) => { throw new Error('never'); }, deps: { A: 'A' }, }); try { container.get('B'); expect(true).toBe(false); } catch (e) { expect(e.message).toBe('Circular dep for "B" at "A" < B'); expect(e.type).toBe('CircularDep'); } }); it('не верный формат provider', () => { const container = new Container(); expect(() => container.register(undefined as any)).toThrowErrorMatchingInlineSnapshot( `"Invalid provider. Проверь что отправляется в DI, сейчас нам приходит не верный формат: \\"undefined\\""` ); expect(() => container.register({ provide: undefined, useValue: 1 }) ).toThrowErrorMatchingInlineSnapshot( `"Invalid provider. Проверь что отправляется в DI, сейчас нам приходит не верный формат: {\\"provide\\":\\"undefined\\",\\"useValue\\":1}"` ); expect(() => // @ts-ignore container.register({ provide: 'testio' }) ).toThrowErrorMatchingInlineSnapshot( `"Invalid provider. Проверь что отправляется в DI, сейчас нам приходит не верный формат: {\\"provide\\":\\"testio\\"}"` ); try { // @ts-ignore container.register({ provide: undefined, __stack: new Error().stack, useValue: 1 }); } catch (e) { expect(e.stack).toMatch(/---- caused by: ----[\s\S]+Container.spec.ts/); } }); it('вложенная неопциональная зависимость не должна ломать получение зависимости после', () => { const container = new Container(); container.register({ provide: 'A', useFactory: () => { return 'success'; }, deps: { b: { token: 'B', optional: true }, c: { token: 'C', optional: true }, }, }); container.register({ provide: 'B', useFactory: () => {}, deps: { c: 'C', }, }); container.register({ provide: 'C', useFactory: () => {}, deps: { // такой зависимости нет, поэтому получения токена C будет падать с ошибкой d: 'D', }, }); expect(container.get('A')).toBe('success'); }); }); describe('get optional', () => { it('Получение отсутсвующих данных', () => { const container = new Container(); expect(container.get({ token: 'Token', optional: true })).toBe(null); }); it('Получение данных с multi провайдером TCORE-2540', () => { const container = new Container(); container.register({ provide: 'Token multi', multi: true, useFactory: () => {}, deps: { aa: 'aa' }, }); expect(() => // @ts-ignore container.get({ token: 'Token multi', optional: true }) ).toThrowErrorMatchingInlineSnapshot(`"Token not found \\"aa\\" at \\"Token multi\\""`); }); it('optional with deep multi error deps', () => { const container = new Container(); container.register({ provide: 'Multi first', multi: true, useFactory: ({ dep1 }) => { return dep1; }, deps: { dep1: 'Multi second' }, }); container.register({ provide: 'Multi second', multi: true, useFactory: () => { return 'Multi second value 1'; }, deps: { dep1: 'Multi error' }, }); container.register({ provide: 'Multi error', multi: true, useFactory: () => {}, deps: { dep1: { token: 'token not found', optional: false } }, }); expect(() => container.get({ token: 'Multi first', optional: true }) ).toThrowErrorMatchingInlineSnapshot( `"Token not found \\"token not found\\" at \\"Multi error\\" < Multi second < Multi first"` ); }); it('optional with nothing', () => { const container = new Container(); expect(container.get({ token: 'not', optional: true })).toBe(null); }); }); describe('correct error message on hydration should be returned when', () => { it('one level of modules', () => { const container = new Container([ { provide: 'test_module_token', useFactory: () => { throw new Error('Test error'); }, }, ]); expect(() => { container.get('test_module_token'); }).toThrow('Test error at "test_module_token"'); }); it('two levels of modules', () => { const container = new Container([ { provide: 'dependant', deps: { dep: 'dependency', }, useFactory: () => {}, }, { provide: 'dependency', useFactory: () => { throw new Error('Some error'); }, }, ]); expect(() => { container.get('dependant'); }).toThrow('Some error at "dependency" < dependant'); }); }); describe('fallback', () => { it('base case', () => { const mockFactory = jest.fn(() => 'mock'); const fallback = new Container(); fallback.register({ provide: 'test', useFactory: mockFactory, }); const container = new Container([], fallback); expect(container.get('test')).toBe('mock'); }); }); describe('borrowing tokens', () => { it('base', () => { const from = new Container([ { provide: 'dep', useValue: 'from', }, { provide: 'test', useFactory: ({ dep }) => { return `test-${dep}`; }, deps: { dep: 'dep', }, }, ]); const container = new Container( [ { provide: 'dep', useValue: 'container', }, ], from ); expect(container.get('test')).toBe('test-from'); container.borrowToken(from, 'test'); expect(container.get('test')).toBe('test-container'); }); }); }); /* eslint-enable jest/no-try-expect */
the_stack
import * as React from "react" import * as ReactDOM from "react-dom" import * as Immutable from "immutable" import * as i18next from 'i18next' import {C, Mode, Cont, CmdCommon, Context, make_C, unit, bind} from './core' import {div, a} from './html' import {bool} from './primitives' export type RepeatProps<A> = { kind:"repeat", value:A, p:(_:A)=>C<A> } & CmdCommon<A> export type AllProps<A> = { kind:"all", ps:Array<C<A>> } & CmdCommon<Array<A>> export type AnyProps<A,B> = { kind:"any", value:A, ps:Array<(_:A)=>C<B>>, className:string } & CmdCommon<B> export type NeverProps<A,B> = { kind:"never", p:C<A> } & CmdCommon<B> export type RetractProps<A,B> = { kind:"retract", inb:(_:A)=>B, out:(_:A)=>(_:B)=>A, p:(_:B)=>C<B>, value:A } & CmdCommon<A> export type DelayProps<A> = { kind:"delay", dt:number, value:A, p:(_:A)=>C<A> } & CmdCommon<A> export type WaitProps<A> = { kind:"wait", dt:number, value:A, p:(_:A)=>C<A> } & CmdCommon<A> export type RetryStrategy<A> = "never" | "semi exponential" | { kind:"retry then show failure", times:number, on_failure: C<A> } | { kind : "never" , on_failure: C<A> } export type LiftPromiseProps<A,B> = { kind:"lift promise", p:(_:B)=>Promise<A>, retry_strategy:RetryStrategy<A>, value:B } & CmdCommon<A> export type SimpleMenuType = "side menu" | { kind:"tabs", max_tabs:number } type RepeatState<A> = { current_value:A, frame_index:number } class Repeat<A> extends React.Component<RepeatProps<A>,RepeatState<A>> { constructor(props:RepeatProps<A>,context:any) { super(props, context) this.state = { current_value: props.value, frame_index:1 } } stopped:boolean = false componentWillUnmount() { this.stopped = true } componentWillMount() { this.stopped = false } render() { this.props.debug_info && console.log("Render:", this.props.debug_info(), this.state.current_value) return this.props.p(this.state.current_value).comp(this.props.context)(callback => new_value => { if (this.stopped) return return this.setState({...this.state, frame_index:this.state.frame_index+1, current_value:new_value}, () => this.props.cont(callback)(new_value)) }) } } export let repeat = function<A>(key?:string, dbg?:() => string) : ((p:(_:A)=>C<A>) => (_:A) => C<A>) { return p => initial_value => make_C<A>(ctxt => cont => React.createElement<RepeatProps<A>>(Repeat, ({ kind:"repeat", debug_info:dbg, p:p as (_:A)=>C<A>, value:initial_value, context:ctxt, cont:cont, key:key }))) } type AnyState<A,B> = { ps:"creating"|Array<JSX.Element> } class Any<A,B> extends React.Component<AnyProps<A,B>,AnyState<A,B>> { constructor(props:AnyProps<A,B>,context:any) { super(props, context) this.state = { ps:"creating" } } componentWillReceiveProps(new_props:AnyProps<A,B>) { this.setState({...this.state, ps:new_props.ps.map(p => p(new_props.value).comp(new_props.context)(callback => new_value => new_props.cont(callback)(new_value)))}) } componentWillMount() { this.setState({...this.state, ps:this.props.ps.map(p => p(this.props.value).comp(this.props.context)(callback => new_value => this.props.cont(callback)(new_value)))}) } render() { return <div className={this.props.className}> { this.state.ps != "creating" ? this.state.ps : null } </div> } } export let any = function<A,B>(key?:string, className?:string, dbg?:() => string) : ((ps:Array<(_:A)=>C<B>>) => (_:A) => C<B>) { return ps => initial_value => make_C<B>(ctxt => cont => React.createElement<AnyProps<A,B>>(Any, { kind:"any", debug_info:dbg, ps:ps, value:initial_value, context:ctxt, cont:cont, key:key, className:className })) } type NeverState<A,B> = { p:"loading"|JSX.Element } class Never<A,B> extends React.Component<NeverProps<A,B>,NeverState<A,B>> { constructor(props:NeverProps<A,B>,context:any) { super(props, context) this.state = { p:"loading" } } componentWillReceiveProps(new_props:NeverProps<A,B>) { this.setState({...this.state, p:new_props.p.comp(new_props.context)(callback => new_value => {})}) } componentWillMount() { this.setState({...this.state, p:this.props.p.comp(this.props.context)(callback => new_value => {})}) } render(): JSX.Element | JSX.Element[] { return this.state.p != "loading" ? this.state.p : [] } } export let never = function<A,B>(p:C<A>, key?:string) : C<B> { return make_C<B>(ctxt => cont => React.createElement<NeverProps<A,B>>(Never, { kind:"never", p:p, context:ctxt, cont:cont, key:key, debug_info:undefined })) } type AllState<A> = { results:Immutable.Map<number,A>, ps:"creating"|Array<JSX.Element> } class All<A> extends React.Component<AllProps<A>,AllState<A>> { constructor(props:AllProps<A>,context:any) { super(props, context) this.state = { results:Immutable.Map<number,A>(), ps:"creating" } } componentWillReceiveProps(new_props:AllProps<A>) { this.setState({...this.state, ps:new_props.ps.map((p,p_i) => p.comp(new_props.context)(callback => result => this.setState({...this.state, results:this.state.results.set(p_i, result) }, () => { if (this.state.results.keySeq().toSet().equals(Immutable.Range(0, new_props.ps.length).toSet())) { let results = this.state.results.sortBy((r,r_i) => r_i).toArray() this.setState({...this.state, results:Immutable.Map<number,A>()}, () => new_props.cont(callback)(results)) } }) ))}) } componentWillMount() { this.setState({...this.state, ps:this.props.ps.map((p,p_i) => p.comp(this.props.context)(callback => result => this.setState({...this.state, results:this.state.results.set(p_i, result) }, () => { if (this.state.results.keySeq().toSet().equals(Immutable.Range(0, this.props.ps.length).toSet())) { let results = this.state.results.sortBy((r,r_i) => r_i).toArray() this.setState({...this.state, results:Immutable.Map<number,A>()}, () => this.props.cont(callback)(results)) } }) ))}) } render() { return <div> { this.state.ps != "creating" ? this.state.ps : null } </div> } } export let all = function<A>(ps:Array<C<A>>, key?:string, dbg?:() => string) : C<Array<A>> { return make_C<A[]>(ctxt => cont => React.createElement<AllProps<A>>(All, { kind:"all", debug_info:dbg, ps:ps, context:ctxt, cont:cont, key:key })) } type RetractState<A,B> = { p:"creating"|JSX.Element } class Retract<A,B> extends React.Component<RetractProps<A,B>,RetractState<A,B>> { constructor(props:RetractProps<A,B>,context:any) { super(props, context) this.state = { p:"creating" } } componentWillReceiveProps(new_props:RetractProps<A,B>) { this.setState({...this.state, p:new_props.p(new_props.inb(new_props.value)).comp(new_props.context) (callback => new_value => new_props.cont(callback) (new_props.out(new_props.value)(new_value)))}) } componentWillMount() { this.setState({...this.state, p:this.props.p(this.props.inb(this.props.value)).comp(this.props.context) (callback => new_value => this.props.cont(callback) (this.props.out(this.props.value)(new_value)))}) } render(): JSX.Element | JSX.Element[] { return this.state.p != "creating" ? this.state.p : [] } } export let retract = function<A,B>(key?:string, dbg?:() => string) : ((inb:(_:A)=>B, out:(_:A)=>(_:B)=>A, p:(_:B)=>C<B>) => (_:A) => C<A>) { return (inb, out, p) => (initial_value:A) => make_C<A>(ctxt => (cont:Cont<A>) => React.createElement<RetractProps<A,B>>(Retract, { kind:"retract", debug_info:dbg, inb:inb as (_:A)=>any, out:out as (_:A)=>(_:any)=>A, p:p, value:initial_value, context:ctxt, cont:cont, key:key })) } type LiftPromiseState<A,B> = { result:"busy"|"error"| { kind:"failing", failure_renderer:JSX.Element } | A, input:any, retry_count: number } class LiftPromise<A,B> extends React.Component<LiftPromiseProps<A,B>,LiftPromiseState<A,B>> { constructor(props:LiftPromiseProps<A,B>,context:any) { super(props, context) this.state = { result:"busy", input:props.value, retry_count: 0 } } componentWillReceiveProps(new_props:LiftPromiseProps<A,B>) { // if (this.state.result != "busy" && this.state.result != "error") { // this.props.debug_info && console.log("New props (ignored):", this.props.debug_info(), this.state.input, new_props.value) // return // } this.props.debug_info && console.log("New props:", this.props.debug_info(), this.state.input, new_props.value) this.setState({...this.state, input:new_props.value}, () => this.load(new_props)) } wait_time:number = 500 stopped:boolean = false load(props:LiftPromiseProps<A,B>) { if (this.stopped) return this.setState({...this.state, result:"busy"}, () => props.p(this.state.input).then(x => { this.wait_time = 500 if (this.props.debug_info) console.log("Promise done:", this.props.debug_info()) if (this.stopped) return this.setState({...this.state, result:x}, () => props.cont(() => null)(x)) }) .catch(() => { if (props.retry_strategy == "never") { if (this.stopped) return this.setState({...this.state, result:"error"}) } else if (props.retry_strategy == "semi exponential") { this.wait_time = Math.floor(Math.max(this.wait_time * 1.5, 2500)) setTimeout(() => this.load(props), this.wait_time) } else if (props.retry_strategy.kind == "retry then show failure") { if (this.stopped) return if (this.state.retry_count < props.retry_strategy.times) { this.setState({...this.state, retry_count: this.state.retry_count+1 }) setTimeout(() => this.load(props), this.wait_time) } else { let failedJSX : JSX.Element = props.retry_strategy.on_failure.comp(props.context)(props.cont) this.setState({...this.state, retry_count:0, result:{ kind:"failing", failure_renderer:failedJSX } }) } } else if (props.retry_strategy.kind == "never") { if (this.stopped) return let failedJSX : JSX.Element = props.retry_strategy.on_failure.comp(props.context)(props.cont) this.setState({...this.state, result:{ kind:"failing", failure_renderer:failedJSX } }) } })) } componentWillUnmount() { this.stopped = true } componentWillMount() { this.stopped = false this.props.debug_info && console.log("Mount:", this.props.debug_info()) this.load(this.props) } render(): JSX.Element | JSX.Element[] { this.props.debug_info && console.log("Render:", this.props.debug_info()) return this.state.result == "busy" ? <div className="busy">{i18next.t("busy")}</div> : this.state.result == "error" ? <div className="error">{i18next.t("error")}</div> : this.state.result != undefined && this.state.result.hasOwnProperty('kind') && (this.state.result as any).kind === "failing" ? (this.state.result as any).failure_renderer : [] } } export let lift_promise = function<A,B>(p:(_:A) => Promise<B>, retry_strategy:RetryStrategy<B>, key?:string, dbg?:() => string) : ((_:A)=>C<B>) { return x => make_C<B>(ctxt => cont => React.createElement<LiftPromiseProps<B,A>>(LiftPromise, { kind:"lift promise", debug_info:dbg, value:x, retry_strategy:retry_strategy, p:p, context:ctxt, cont:cont, key:key })) } type DelayState<A> = { status:"dirty"|"waiting", value:A, last_command:JSX.Element } class Delay<A> extends React.Component<DelayProps<A>,DelayState<A>> { constructor(props:DelayProps<A>,context:any) { super(props, context) this.state = { status:"dirty", value:props.value, last_command:props.p(props.value).comp(props.context)(props.cont) } } running:boolean = false componentWillMount() { //console.log("starting delay thread") if (this.running) return this.running = true var self = this let process = () => setTimeout(() => { //console.log("delay is ticking", self.state.status, self.state.value) if (self.state.status == "dirty") { //console.log("delay is submitting the data to save") if (!this.running) return self.setState({...self.state, status:"waiting", last_command:self.props.p(self.state.value).comp(this.props.context)(callback => new_value => { //console.log("calling the continuation of dirty", self.state.value) self.props.cont(callback)(new_value) })}) process() } else { if (self.running) process() } }, self.props.dt) process() } componentWillUnmount() { //console.log("stopping delay thread") this.running = false } componentWillReceiveProps(new_props:DelayProps<A>) { //console.log("Delay received new props and is going back to dirty") this.setState({...this.state, value: new_props.value, status:"dirty"}) } render() { return this.state.last_command } } export let delay = function<A>(dt:number, key?:string, dbg?:() => string) : (p:(_:A)=>C<A>) => ((_:A) => C<A>) { return p => initial_value => make_C<A>(ctxt => cont => React.createElement<DelayProps<A>>(Delay, { kind:"delay", debug_info:dbg, dt:dt, p:p as (_:A)=>C<A>, value:initial_value, context:ctxt, cont:cont, key:key })) } type WaitState<A> = { status:"open"|"closed", last_command:JSX.Element } class Wait<A> extends React.Component<WaitProps<A>,WaitState<A>> { constructor(props:WaitProps<A>,context:any) { super(props, context) this.state = { status:"open", last_command: null //last_command:props.p(props.value).comp(props.context)(props.cont) } } running:boolean = false end_process() { if (!this.running) return //console.log('Ending process') this.setState({...this.state, status: "closed", last_command: this.props.p(this.props.value).comp(this.props.context)(callback => new_value => this.props.cont(callback)(new_value))}) } process() { //console.log('Starting Wait process') } componentWillMount() { //console.log("starting wait thread") if (this.running) return this.running = true //console.log('Starting first waiting') setTimeout(() => this.end_process(),this.props.dt) // var self = this // let process = () => // setTimeout(() => { // console.log("wait is ticking", self.state.status, self.state.value) // if (self.state.status == "dirty") { // console.log("wait is submitting the data to save") // if (!this.running) return // self.setState({...self.state, status:"waiting", last_command:self.props.p(self.state.value).comp(this.props.context)(callback => new_value => { // console.log("calling the continuation of dirty", self.state.value) // self.props.cont(callback)(new_value) // })}) // process() // } else { // if (self.running) // process() // } // }, self.props.dt) // process() } componentWillUnmount() { //console.log("stopping wait thread") this.running = false } componentDidUpdate(prevProps: WaitProps<A>,prevState: WaitState<A>) { if (prevState.status == 'closed' && this.state.status == "open") { //console.log('Here we start the process') setTimeout(() => this.end_process(),this.props.dt) } } componentWillReceiveProps(new_props:WaitProps<A>) { // console.log("Wait received new props and is going to wait") // let process = () => console.log('start process') || setTimeout(() => { // console.log('the process is ending') // this.setState({...this.state, status:"closed", last_command: this.props.p(this.state.value).comp(this.props.context)(callback => new_value => { // this.props.cont(callback)(new_value) // })}) // } // ,this.props.dt) // if (this.state.status == "closed") { // this.setState({...this.state, value: new_props.value, status:"waiting"}, () => process()) // } // else this.setState({...this.state, value: new_props.value}) this.setState({...this.state, status: 'open'}) } render() { //console.log(this.props.value) return this.state.last_command } } export let waiting = function<A>(dt:number, key?:string, dbg?:() => string) : (p:(_:A)=>C<A>) => ((_:A) => C<A>) { return p => initial_value => make_C<A>(ctxt => cont => React.createElement<WaitProps<A>>(Wait, { kind:"wait", debug_info:dbg, dt:dt, p:p as (_:A)=>C<A>, value:initial_value, context:ctxt, cont:cont, key:key })) } export let mk_submenu_entry = function<A>(label:string, children:Array<MenuEntryValue<A>>) : MenuEntrySubMenu<A> { return { kind:"sub menu", label:label, children:children } } export let mk_menu_entry = function<A>(v:A) : MenuEntryValue<A> { return { kind:"item", value:v } } export type MenuEntryValue<A> = { kind:"item", value:A } export type MenuEntrySubMenu<A> = { kind:"sub menu", label:string, children:Array<MenuEntryValue<A>> } export type MenuEntry<A> = MenuEntryValue<A> | MenuEntrySubMenu<A> export let simple_menu = function<A,B>(type:SimpleMenuType, to_string:(_:A)=>string, key?:string, dbg?:() => string) : ((items:Array<MenuEntry<A>>, p:(_:A)=>C<B>, selected_item?:A, selected_sub_menu?:string) => C<B>) { type ShownRange = { first:number, amount:number } type MenuState = { selected:{ kind:"nothing" } | { kind:"item", value:A } sub_selected : { kind:"nothing" } | { kind:"sub menu", label:string } last_action:{kind:"init"|"selection"}|{kind:"p", p_res:B }, shown_range:undefined|ShownRange } let content_menu_class:string, content_class:string, menu_class:string, entries_class:string, entry_class:string, sub_entry_class:string if (type == "side menu") { content_menu_class = "monadic-content-with-menu" content_class = "monadic-content" menu_class = "monadic-content-menu" entries_class = "monadic-content-menu__entries" entry_class = "monadic-content-menu__entry" sub_entry_class = "monadic-content-menu__sub-entry" } else { content_menu_class = "monadic-content-with-tabs" content_class = "monadic-content" menu_class = "monadic-tabs" entries_class = "monadic-tabs__entries" entry_class = "monadic-tabs__entry" sub_entry_class = "monadic-tabs__sub-entry" } return (items_array, p, selected_item:undefined|A, selected_sub_menu:undefined|string) => { let items = Immutable.List<MenuEntry<A>>(items_array) let entries : (s:MenuState) => Array<(_:MenuState) => C<MenuState>> = (s:MenuState) => (type != "side menu" && s.shown_range.first > 0 ? [s => div<MenuState,MenuState>(`${entry_class} monadic-prev-tab`)(a<MenuState>("<"))({...s, shown_range:{...s.shown_range, first:s.shown_range.first-1}})] : []).concat( items.map((item, i) => { return (s:MenuState) => item.kind == "item" ? div<MenuState, MenuState>(`${entry_class} ${s.selected.kind == "item" && item.value == s.selected.value ? ` ${entry_class}--active` : ""}`, to_string(item.value))( a<MenuState>(to_string(item.value), undefined, undefined, false, undefined) )({...s, sub_selected:{ kind:"nothing" }, selected:item, last_action:{kind:"selection"} }) : any<MenuState, MenuState>(item.label)([ (s:MenuState) => div<MenuState, MenuState>(`${entry_class} `, item.label)( a<MenuState>(item.label, undefined, undefined, false, undefined) )({...s, sub_selected:item, last_action:{kind:"selection"} }) ].concat( (s.sub_selected.kind == "sub menu" && item.label == s.sub_selected.label) || (s.selected.kind == "item" && item.children.some(c => s.selected.kind == "item" && c.value == s.selected.value)) ? item.children.map(c => (s:MenuState) => div<MenuState, MenuState>(`${sub_entry_class} ${s.selected.kind == "item" && c.value == s.selected.value ? ` ${sub_entry_class}--active` : ""}`, to_string(c.value))( a<MenuState>(to_string(c.value), undefined, undefined, false, undefined) )({...s, sub_selected:item, selected:c, last_action:{kind:"selection"} }) ) : [] ))(s) }).filter((i, i_i) => type == "side menu" || i_i >= s.shown_range.first && (i_i - s.shown_range.first) < s.shown_range.amount) .concat( type != "side menu" && s.shown_range.first + s.shown_range.amount < items.count() ? [s => div<MenuState,MenuState>(`${entry_class} monadic-next-tab`)(a<MenuState>(">"))({...s, shown_range:{...s.shown_range, first:s.shown_range.first+1}})] : []) .toArray()) return repeat<MenuState>()( div<MenuState, MenuState>()( any<MenuState, MenuState>(undefined, content_menu_class)( [ div<MenuState, MenuState>(menu_class, menu_class)( s => any<MenuState, MenuState>(undefined, entries_class)(entries(s))(s)), div<MenuState, MenuState>(content_class, content_class)( (s:MenuState) => s.selected.kind == "item" ? p(s.selected.value).then<MenuState>(undefined, (p_res:B) => unit<MenuState>({...s, last_action:{ kind:"p", p_res:p_res }})) : unit<MenuState>(s).never<MenuState>()) ] ) ))({ selected:selected_item == undefined ? { kind:"nothing" } : { kind:"item", value:selected_item }, sub_selected:selected_sub_menu == undefined ? { kind:"nothing" } : { kind:"sub menu", label:selected_sub_menu }, last_action:{ kind:"init" }, shown_range:type=="side menu" ? undefined : { first:0, amount:type.max_tabs } }) .filter(s => s.last_action.kind != "p") .map<B>(s => s.last_action.kind == "p" && s.last_action.p_res) } } export let custom = function<A>(key?:string, dbg?:() => string) : (render:(ctxt:()=>Context) => (_:Cont<A>) => JSX.Element) => C<A> { return (render) => make_C<A>(ctxt => cont => render(ctxt)(cont)) } export let hide = (f_name:string, f:C<void>) => repeat<boolean>()(visible => bool("edit", "plus/minus")(visible))(false).then(`${f_name} toggle`, visible => !visible ? unit<void>(null) : f.then(`visible ${f_name}`, _ => unit<void>(null)))
the_stack
import { PagedAsyncIterableIterator } from "@azure/core-paging"; import { Accounts } from "../operationsInterfaces"; import * as coreClient from "@azure/core-client"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; import { CognitiveServicesManagementClient } from "../cognitiveServicesManagementClient"; import { PollerLike, PollOperationState, LroEngine } from "@azure/core-lro"; import { LroImpl } from "../lroImpl"; import { Account, AccountsListByResourceGroupNextOptionalParams, AccountsListByResourceGroupOptionalParams, AccountsListNextOptionalParams, AccountsListOptionalParams, AccountsCreateOptionalParams, AccountsCreateResponse, AccountsUpdateOptionalParams, AccountsUpdateResponse, AccountsDeleteOptionalParams, AccountsGetOptionalParams, AccountsGetResponse, AccountsListByResourceGroupResponse, AccountsListResponse, AccountsListKeysOptionalParams, AccountsListKeysResponse, KeyName, AccountsRegenerateKeyOptionalParams, AccountsRegenerateKeyResponse, AccountsListSkusOptionalParams, AccountsListSkusResponse, AccountsListUsagesOptionalParams, AccountsListUsagesResponse, AccountsListByResourceGroupNextResponse, AccountsListNextResponse } from "../models"; /// <reference lib="esnext.asynciterable" /> /** Class containing Accounts operations. */ export class AccountsImpl implements Accounts { private readonly client: CognitiveServicesManagementClient; /** * Initialize a new instance of the class Accounts class. * @param client Reference to the service client */ constructor(client: CognitiveServicesManagementClient) { this.client = client; } /** * Returns all the resources of a particular type belonging to a resource group * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param options The options parameters. */ public listByResourceGroup( resourceGroupName: string, options?: AccountsListByResourceGroupOptionalParams ): PagedAsyncIterableIterator<Account> { const iter = this.listByResourceGroupPagingAll(resourceGroupName, options); return { next() { return iter.next(); }, [Symbol.asyncIterator]() { return this; }, byPage: () => { return this.listByResourceGroupPagingPage(resourceGroupName, options); } }; } private async *listByResourceGroupPagingPage( resourceGroupName: string, options?: AccountsListByResourceGroupOptionalParams ): AsyncIterableIterator<Account[]> { let result = await this._listByResourceGroup(resourceGroupName, options); yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { result = await this._listByResourceGroupNext( resourceGroupName, continuationToken, options ); continuationToken = result.nextLink; yield result.value || []; } } private async *listByResourceGroupPagingAll( resourceGroupName: string, options?: AccountsListByResourceGroupOptionalParams ): AsyncIterableIterator<Account> { for await (const page of this.listByResourceGroupPagingPage( resourceGroupName, options )) { yield* page; } } /** * Returns all the resources of a particular type belonging to a subscription. * @param options The options parameters. */ public list( options?: AccountsListOptionalParams ): PagedAsyncIterableIterator<Account> { const iter = this.listPagingAll(options); return { next() { return iter.next(); }, [Symbol.asyncIterator]() { return this; }, byPage: () => { return this.listPagingPage(options); } }; } private async *listPagingPage( options?: AccountsListOptionalParams ): AsyncIterableIterator<Account[]> { let result = await this._list(options); yield result.value || []; let continuationToken = result.nextLink; while (continuationToken) { result = await this._listNext(continuationToken, options); continuationToken = result.nextLink; yield result.value || []; } } private async *listPagingAll( options?: AccountsListOptionalParams ): AsyncIterableIterator<Account> { for await (const page of this.listPagingPage(options)) { yield* page; } } /** * Create Cognitive Services Account. Accounts is a resource group wide resource type. It holds the * keys for developer to access intelligent APIs. It's also the resource type for billing. * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param accountName The name of Cognitive Services account. * @param account The parameters to provide for the created account. * @param options The options parameters. */ async beginCreate( resourceGroupName: string, accountName: string, account: Account, options?: AccountsCreateOptionalParams ): Promise< PollerLike< PollOperationState<AccountsCreateResponse>, AccountsCreateResponse > > { const directSendOperation = async ( args: coreClient.OperationArguments, spec: coreClient.OperationSpec ): Promise<AccountsCreateResponse> => { return this.client.sendOperationRequest(args, spec); }; const sendOperation = async ( args: coreClient.OperationArguments, spec: coreClient.OperationSpec ) => { let currentRawResponse: | coreClient.FullOperationResponse | undefined = undefined; const providedCallback = args.options?.onResponse; const callback: coreClient.RawResponseCallback = ( rawResponse: coreClient.FullOperationResponse, flatResponse: unknown ) => { currentRawResponse = rawResponse; providedCallback?.(rawResponse, flatResponse); }; const updatedArgs = { ...args, options: { ...args.options, onResponse: callback } }; const flatResponse = await directSendOperation(updatedArgs, spec); return { flatResponse, rawResponse: { statusCode: currentRawResponse!.status, body: currentRawResponse!.parsedBody, headers: currentRawResponse!.headers.toJSON() } }; }; const lro = new LroImpl( sendOperation, { resourceGroupName, accountName, account, options }, createOperationSpec ); return new LroEngine(lro, { resumeFrom: options?.resumeFrom, intervalInMs: options?.updateIntervalInMs }); } /** * Create Cognitive Services Account. Accounts is a resource group wide resource type. It holds the * keys for developer to access intelligent APIs. It's also the resource type for billing. * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param accountName The name of Cognitive Services account. * @param account The parameters to provide for the created account. * @param options The options parameters. */ async beginCreateAndWait( resourceGroupName: string, accountName: string, account: Account, options?: AccountsCreateOptionalParams ): Promise<AccountsCreateResponse> { const poller = await this.beginCreate( resourceGroupName, accountName, account, options ); return poller.pollUntilDone(); } /** * Updates a Cognitive Services account * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param accountName The name of Cognitive Services account. * @param account The parameters to provide for the created account. * @param options The options parameters. */ async beginUpdate( resourceGroupName: string, accountName: string, account: Account, options?: AccountsUpdateOptionalParams ): Promise< PollerLike< PollOperationState<AccountsUpdateResponse>, AccountsUpdateResponse > > { const directSendOperation = async ( args: coreClient.OperationArguments, spec: coreClient.OperationSpec ): Promise<AccountsUpdateResponse> => { return this.client.sendOperationRequest(args, spec); }; const sendOperation = async ( args: coreClient.OperationArguments, spec: coreClient.OperationSpec ) => { let currentRawResponse: | coreClient.FullOperationResponse | undefined = undefined; const providedCallback = args.options?.onResponse; const callback: coreClient.RawResponseCallback = ( rawResponse: coreClient.FullOperationResponse, flatResponse: unknown ) => { currentRawResponse = rawResponse; providedCallback?.(rawResponse, flatResponse); }; const updatedArgs = { ...args, options: { ...args.options, onResponse: callback } }; const flatResponse = await directSendOperation(updatedArgs, spec); return { flatResponse, rawResponse: { statusCode: currentRawResponse!.status, body: currentRawResponse!.parsedBody, headers: currentRawResponse!.headers.toJSON() } }; }; const lro = new LroImpl( sendOperation, { resourceGroupName, accountName, account, options }, updateOperationSpec ); return new LroEngine(lro, { resumeFrom: options?.resumeFrom, intervalInMs: options?.updateIntervalInMs }); } /** * Updates a Cognitive Services account * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param accountName The name of Cognitive Services account. * @param account The parameters to provide for the created account. * @param options The options parameters. */ async beginUpdateAndWait( resourceGroupName: string, accountName: string, account: Account, options?: AccountsUpdateOptionalParams ): Promise<AccountsUpdateResponse> { const poller = await this.beginUpdate( resourceGroupName, accountName, account, options ); return poller.pollUntilDone(); } /** * Deletes a Cognitive Services account from the resource group. * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param accountName The name of Cognitive Services account. * @param options The options parameters. */ async beginDelete( resourceGroupName: string, accountName: string, options?: AccountsDeleteOptionalParams ): Promise<PollerLike<PollOperationState<void>, void>> { const directSendOperation = async ( args: coreClient.OperationArguments, spec: coreClient.OperationSpec ): Promise<void> => { return this.client.sendOperationRequest(args, spec); }; const sendOperation = async ( args: coreClient.OperationArguments, spec: coreClient.OperationSpec ) => { let currentRawResponse: | coreClient.FullOperationResponse | undefined = undefined; const providedCallback = args.options?.onResponse; const callback: coreClient.RawResponseCallback = ( rawResponse: coreClient.FullOperationResponse, flatResponse: unknown ) => { currentRawResponse = rawResponse; providedCallback?.(rawResponse, flatResponse); }; const updatedArgs = { ...args, options: { ...args.options, onResponse: callback } }; const flatResponse = await directSendOperation(updatedArgs, spec); return { flatResponse, rawResponse: { statusCode: currentRawResponse!.status, body: currentRawResponse!.parsedBody, headers: currentRawResponse!.headers.toJSON() } }; }; const lro = new LroImpl( sendOperation, { resourceGroupName, accountName, options }, deleteOperationSpec ); return new LroEngine(lro, { resumeFrom: options?.resumeFrom, intervalInMs: options?.updateIntervalInMs }); } /** * Deletes a Cognitive Services account from the resource group. * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param accountName The name of Cognitive Services account. * @param options The options parameters. */ async beginDeleteAndWait( resourceGroupName: string, accountName: string, options?: AccountsDeleteOptionalParams ): Promise<void> { const poller = await this.beginDelete( resourceGroupName, accountName, options ); return poller.pollUntilDone(); } /** * Returns a Cognitive Services account specified by the parameters. * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param accountName The name of Cognitive Services account. * @param options The options parameters. */ get( resourceGroupName: string, accountName: string, options?: AccountsGetOptionalParams ): Promise<AccountsGetResponse> { return this.client.sendOperationRequest( { resourceGroupName, accountName, options }, getOperationSpec ); } /** * Returns all the resources of a particular type belonging to a resource group * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param options The options parameters. */ private _listByResourceGroup( resourceGroupName: string, options?: AccountsListByResourceGroupOptionalParams ): Promise<AccountsListByResourceGroupResponse> { return this.client.sendOperationRequest( { resourceGroupName, options }, listByResourceGroupOperationSpec ); } /** * Returns all the resources of a particular type belonging to a subscription. * @param options The options parameters. */ private _list( options?: AccountsListOptionalParams ): Promise<AccountsListResponse> { return this.client.sendOperationRequest({ options }, listOperationSpec); } /** * Lists the account keys for the specified Cognitive Services account. * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param accountName The name of Cognitive Services account. * @param options The options parameters. */ listKeys( resourceGroupName: string, accountName: string, options?: AccountsListKeysOptionalParams ): Promise<AccountsListKeysResponse> { return this.client.sendOperationRequest( { resourceGroupName, accountName, options }, listKeysOperationSpec ); } /** * Regenerates the specified account key for the specified Cognitive Services account. * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param accountName The name of Cognitive Services account. * @param keyName key name to generate (Key1|Key2) * @param options The options parameters. */ regenerateKey( resourceGroupName: string, accountName: string, keyName: KeyName, options?: AccountsRegenerateKeyOptionalParams ): Promise<AccountsRegenerateKeyResponse> { return this.client.sendOperationRequest( { resourceGroupName, accountName, keyName, options }, regenerateKeyOperationSpec ); } /** * List available SKUs for the requested Cognitive Services account * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param accountName The name of Cognitive Services account. * @param options The options parameters. */ listSkus( resourceGroupName: string, accountName: string, options?: AccountsListSkusOptionalParams ): Promise<AccountsListSkusResponse> { return this.client.sendOperationRequest( { resourceGroupName, accountName, options }, listSkusOperationSpec ); } /** * Get usages for the requested Cognitive Services account * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param accountName The name of Cognitive Services account. * @param options The options parameters. */ listUsages( resourceGroupName: string, accountName: string, options?: AccountsListUsagesOptionalParams ): Promise<AccountsListUsagesResponse> { return this.client.sendOperationRequest( { resourceGroupName, accountName, options }, listUsagesOperationSpec ); } /** * ListByResourceGroupNext * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param nextLink The nextLink from the previous successful call to the ListByResourceGroup method. * @param options The options parameters. */ private _listByResourceGroupNext( resourceGroupName: string, nextLink: string, options?: AccountsListByResourceGroupNextOptionalParams ): Promise<AccountsListByResourceGroupNextResponse> { return this.client.sendOperationRequest( { resourceGroupName, nextLink, options }, listByResourceGroupNextOperationSpec ); } /** * ListNext * @param nextLink The nextLink from the previous successful call to the List method. * @param options The options parameters. */ private _listNext( nextLink: string, options?: AccountsListNextOptionalParams ): Promise<AccountsListNextResponse> { return this.client.sendOperationRequest( { nextLink, options }, listNextOperationSpec ); } } // Operation Specifications const serializer = coreClient.createSerializer(Mappers, /* isXml */ false); const createOperationSpec: coreClient.OperationSpec = { path: "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CognitiveServices/accounts/{accountName}", httpMethod: "PUT", responses: { 200: { bodyMapper: Mappers.Account }, 201: { bodyMapper: Mappers.Account }, 202: { bodyMapper: Mappers.Account }, 204: { bodyMapper: Mappers.Account }, default: { bodyMapper: Mappers.ErrorResponse } }, requestBody: Parameters.account, queryParameters: [Parameters.apiVersion], urlParameters: [ Parameters.$host, Parameters.resourceGroupName, Parameters.accountName, Parameters.subscriptionId ], headerParameters: [Parameters.contentType, Parameters.accept], mediaType: "json", serializer }; const updateOperationSpec: coreClient.OperationSpec = { path: "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CognitiveServices/accounts/{accountName}", httpMethod: "PATCH", responses: { 200: { bodyMapper: Mappers.Account }, 201: { bodyMapper: Mappers.Account }, 202: { bodyMapper: Mappers.Account }, 204: { bodyMapper: Mappers.Account }, default: { bodyMapper: Mappers.ErrorResponse } }, requestBody: Parameters.account, queryParameters: [Parameters.apiVersion], urlParameters: [ Parameters.$host, Parameters.resourceGroupName, Parameters.accountName, Parameters.subscriptionId ], headerParameters: [Parameters.contentType, Parameters.accept], mediaType: "json", serializer }; const deleteOperationSpec: coreClient.OperationSpec = { path: "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CognitiveServices/accounts/{accountName}", httpMethod: "DELETE", responses: { 200: {}, 201: {}, 202: {}, 204: {}, default: { bodyMapper: Mappers.ErrorResponse } }, queryParameters: [Parameters.apiVersion], urlParameters: [ Parameters.$host, Parameters.resourceGroupName, Parameters.accountName, Parameters.subscriptionId ], headerParameters: [Parameters.accept], serializer }; const getOperationSpec: coreClient.OperationSpec = { path: "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CognitiveServices/accounts/{accountName}", httpMethod: "GET", responses: { 200: { bodyMapper: Mappers.Account }, default: { bodyMapper: Mappers.ErrorResponse } }, queryParameters: [Parameters.apiVersion], urlParameters: [ Parameters.$host, Parameters.resourceGroupName, Parameters.accountName, Parameters.subscriptionId ], headerParameters: [Parameters.accept], serializer }; const listByResourceGroupOperationSpec: coreClient.OperationSpec = { path: "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CognitiveServices/accounts", httpMethod: "GET", responses: { 200: { bodyMapper: Mappers.AccountListResult }, default: { bodyMapper: Mappers.ErrorResponse } }, queryParameters: [Parameters.apiVersion], urlParameters: [ Parameters.$host, Parameters.resourceGroupName, Parameters.subscriptionId ], headerParameters: [Parameters.accept], serializer }; const listOperationSpec: coreClient.OperationSpec = { path: "/subscriptions/{subscriptionId}/providers/Microsoft.CognitiveServices/accounts", httpMethod: "GET", responses: { 200: { bodyMapper: Mappers.AccountListResult }, default: { bodyMapper: Mappers.ErrorResponse } }, queryParameters: [Parameters.apiVersion], urlParameters: [Parameters.$host, Parameters.subscriptionId], headerParameters: [Parameters.accept], serializer }; const listKeysOperationSpec: coreClient.OperationSpec = { path: "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CognitiveServices/accounts/{accountName}/listKeys", httpMethod: "POST", responses: { 200: { bodyMapper: Mappers.ApiKeys }, default: { bodyMapper: Mappers.ErrorResponse } }, queryParameters: [Parameters.apiVersion], urlParameters: [ Parameters.$host, Parameters.resourceGroupName, Parameters.accountName, Parameters.subscriptionId ], headerParameters: [Parameters.accept], serializer }; const regenerateKeyOperationSpec: coreClient.OperationSpec = { path: "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CognitiveServices/accounts/{accountName}/regenerateKey", httpMethod: "POST", responses: { 200: { bodyMapper: Mappers.ApiKeys }, default: { bodyMapper: Mappers.ErrorResponse } }, requestBody: { parameterPath: { keyName: ["keyName"] }, mapper: { ...Mappers.RegenerateKeyParameters, required: true } }, queryParameters: [Parameters.apiVersion], urlParameters: [ Parameters.$host, Parameters.resourceGroupName, Parameters.accountName, Parameters.subscriptionId ], headerParameters: [Parameters.contentType, Parameters.accept], mediaType: "json", serializer }; const listSkusOperationSpec: coreClient.OperationSpec = { path: "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CognitiveServices/accounts/{accountName}/skus", httpMethod: "GET", responses: { 200: { bodyMapper: Mappers.AccountSkuListResult }, default: { bodyMapper: Mappers.ErrorResponse } }, queryParameters: [Parameters.apiVersion], urlParameters: [ Parameters.$host, Parameters.resourceGroupName, Parameters.accountName, Parameters.subscriptionId ], headerParameters: [Parameters.accept], serializer }; const listUsagesOperationSpec: coreClient.OperationSpec = { path: "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CognitiveServices/accounts/{accountName}/usages", httpMethod: "GET", responses: { 200: { bodyMapper: Mappers.UsageListResult }, default: { bodyMapper: Mappers.ErrorResponse } }, queryParameters: [Parameters.apiVersion, Parameters.filter], urlParameters: [ Parameters.$host, Parameters.resourceGroupName, Parameters.accountName, Parameters.subscriptionId ], headerParameters: [Parameters.accept], serializer }; const listByResourceGroupNextOperationSpec: coreClient.OperationSpec = { path: "{nextLink}", httpMethod: "GET", responses: { 200: { bodyMapper: Mappers.AccountListResult }, default: { bodyMapper: Mappers.ErrorResponse } }, queryParameters: [Parameters.apiVersion], urlParameters: [ Parameters.$host, Parameters.resourceGroupName, Parameters.subscriptionId, Parameters.nextLink ], headerParameters: [Parameters.accept], serializer }; const listNextOperationSpec: coreClient.OperationSpec = { path: "{nextLink}", httpMethod: "GET", responses: { 200: { bodyMapper: Mappers.AccountListResult }, default: { bodyMapper: Mappers.ErrorResponse } }, queryParameters: [Parameters.apiVersion], urlParameters: [ Parameters.$host, Parameters.subscriptionId, Parameters.nextLink ], headerParameters: [Parameters.accept], serializer };
the_stack
import { Provider, EProviders, ISelectEvent, IContextMenuEvent, EActions, IDoubleclickEvent, } from './provider'; import { Subject, Observable, Subscription } from 'rxjs'; import { Session } from '../../../../controller/session/session'; import { KeyboardListener } from './keyboard.listener'; import { IMenuItem } from 'src/app/environment/services/standalone/service.contextmenu'; import { Entity } from './entity'; import * as Toolkit from 'chipmunk.client.toolkit'; import TabsSessionsService from '../../../../services/service.sessions.tabs'; import EventsSessionService from '../../../../services/standalone/service.events.session'; type TSelectedEntities = string[]; const PROVIDERS_SCOPE_KEY: string = 'SEARCH_MANAGER_PROVIDERS_SCOPE_KEY'; export class Providers { private readonly SENDER = Toolkit.guid(); private readonly PROVIDERS_ORDER: EProviders[] = [ EProviders.filters, EProviders.charts, EProviders.ranges, EProviders.disabled, ]; private readonly _providers: Map<EProviders, Provider<any>> = new Map(); private readonly _subscriptions: { [key: string]: Subscription } = {}; private readonly _selsubs: { [key: string]: Subscription } = {}; private readonly _keyboard: KeyboardListener = new KeyboardListener(); private readonly _subjects: { select: Subject<ISelectEvent | undefined>; context: Subject<IContextMenuEvent>; doubleclick: Subject<IDoubleclickEvent>; change: Subject<void>; } = { select: new Subject(), context: new Subject(), doubleclick: new Subject(), change: new Subject(), }; private _session: Session | undefined; constructor() { this._session = TabsSessionsService.getActive(); this._subscriptions.onSessionChange = EventsSessionService.getObservable().onSessionChange.subscribe( this._onSessionChange.bind(this), ); } public destroy() { Object.keys(this._subscriptions).forEach((key: string) => { this._subscriptions[key].unsubscribe(); }); Object.keys(this._selsubs).forEach((key: string) => { this._selsubs[key].unsubscribe(); }); this._providers.forEach((provider: Provider<any>) => { provider.destroy(); }); this._keyboard.destroy(); this._store().drop(); } public getObservable(): { select: Observable<ISelectEvent | undefined>; context: Observable<IContextMenuEvent>; doubleclick: Observable<IDoubleclickEvent>; change: Observable<void>; } { return { select: this._subjects.select.asObservable(), context: this._subjects.context.asObservable(), doubleclick: this._subjects.doubleclick.asObservable(), change: this._subjects.change.asObservable(), }; } public add(name: EProviders, provider: Provider<any>): boolean { if (this._providers.has(name)) { return false; } provider.setKeyboardListener(this._keyboard); provider.setProvidersGetter(this.list.bind(this)); this._selsubs[`selection_${name}`] = provider .getObservable() .selection.subscribe(this._onSelectionEntity.bind(this)); this._selsubs[`context_${name}`] = provider .getObservable() .context.subscribe(this._onContextMenuEvent.bind(this)); this._selsubs[`doubleclick_${name}`] = provider .getObservable() .doubleclick.subscribe(this._onDoubleclickEvent.bind(this)); this._selsubs[`change_${name}`] = provider .getObservable() .change.subscribe(this._onChange.bind(this)); this._selsubs[`reload_${name}`] = provider .getObservable() .reload.subscribe(this._onReload.bind(this)); this._providers.set(name, provider); return true; } public all(): any[] { let entries: any[] = []; this.list().forEach((provider: Provider<any>) => { entries = entries.concat(provider.get()); }); return entries; } public list(): Provider<any>[] { const list: Provider<any>[] = []; this.PROVIDERS_ORDER.forEach((ref: EProviders) => { const provider: Provider<any> | undefined = this._providers.get(ref); if (provider !== undefined) { list.push(provider); } }); return list; } public select(): { next: () => void; prev: () => void; drop: () => void; first: () => void; last: () => void; single: ( session?: string, ) => | { provider: Provider<any>; next?: Provider<any>; prev?: Provider<any>; guid: string } | undefined; getEntities: () => Array<Entity<any>>; getProviders: () => Array<Provider<any>>; } { const single: ( session?: string, ) => | { provider: Provider<any>; next?: Provider<any>; prev?: Provider<any>; guid: string } | undefined = (session?: string) => { if ( session !== undefined && (this._session === undefined || (this._session !== undefined && this._session.getGuid() !== session)) ) { return undefined; } if (this._providers.size === 0) { return undefined; } const providers: Array<Provider<any>> = []; let next: Provider<any> | undefined; let prev: Provider<any> | undefined; Array.from(this._providers.values()).forEach( (provider: Provider<any>, i: number, all: Array<Provider<any>>) => { if (provider.select().single() !== undefined) { providers.push(provider); for (let k = i + 1; k <= all.length - 1; k += 1) { if (next === undefined && all[k].get().length > 0) { next = all[k]; } } for (let k = i - 1; k >= 0; k -= 1) { if (prev === undefined && all[k].get().length > 0) { prev = all[k]; } } } }, ); if (providers.length !== 1) { return undefined; } const guid: string | undefined = (providers[0] as Provider<any>).select().single()?.getGUID(); return guid === undefined ? undefined : { provider: providers[0], next: next, prev: prev, guid: guid, }; }; const drop: () => void = () => { this._providers.forEach((provider: Provider<any>) => { provider.select().drop(this.SENDER); }); }; const first: () => void = () => { if (this._providers.size === 0) { return; } (Array.from(this._providers.values())[0] as Provider<any>).select().first(); }; const last: () => void = () => { if (this._providers.size === 0) { return; } const entities = Array.from(this._providers.values()); (entities[entities.length - 1] as Provider<any>).select().last(); }; return { next: () => { if (this._providers.size === 0) { return; } const sel = single(); if (sel === undefined) { drop(); first(); } else { if (!sel.provider.select().next()) { sel.provider.select().drop(); if (sel.next !== undefined) { sel.next.select().first(); } else { first(); } } } }, prev: () => { if (this._providers.size === 0) { return; } const sel = single(); if (sel === undefined) { drop(); last(); } else { if (!sel.provider.select().prev()) { sel.provider.select().drop(); if (sel.prev !== undefined) { sel.prev.select().last(); } else { last(); } } } }, drop: drop, first: first, last: last, single: single, getEntities: () => { let entities: Entity<any>[] = []; this._providers.forEach((provider: Provider<any>) => { entities = entities.concat(provider.select().getEntities()); }); return entities; }, getProviders: () => { const list: Provider<any>[] = []; this.PROVIDERS_ORDER.forEach((ref: EProviders) => { const provider: Provider<any> | undefined = this._providers.get(ref); if (provider !== undefined && provider.select().getEntities().length !== 0) { list.push(provider); } }); return list; }, }; } public edit(): { in: () => void; out: () => void; } { return { in: () => { let count: number = 0; this._providers.forEach((provider: Provider<any>) => { count += provider.select().get().length; }); if (count !== 1) { return; } this._providers.forEach((provider: Provider<any>) => { if (provider.select().get().length === 1) { provider.edit().in(); } }); }, out: () => { this._providers.forEach((provider: Provider<any>) => { provider.edit().out(); }); }, }; } private _store(): { load(): TSelectedEntities; save(entities: TSelectedEntities): void; restore(provider: string): void; drop(): void; } { const self = this; return { load: () => { if (self._session === undefined) { return []; } const stored: TSelectedEntities | undefined = self._session .getScope() .get<TSelectedEntities>(PROVIDERS_SCOPE_KEY); return stored === undefined ? [] : stored.slice(); }, save: (entities: TSelectedEntities) => { if (self._session === undefined) { return; } self._session .getScope() .set<TSelectedEntities>(PROVIDERS_SCOPE_KEY, entities.slice()); }, restore: (provider: string) => { const stored = self._store().load(); this._providers.forEach((target: Provider<any>) => { if (provider !== target.getGuid()) { return; } target.select().drop(self.SENDER); target.select().apply(self.SENDER, stored); if (stored.length === 1) { const entity = target.get().find((e) => e.getGUID() === stored[0]); entity !== undefined && this._subjects.select.next({ entity: entity, provider: target, guids: stored, }); } }); if (stored.length === 0) { this._subjects.select.next(undefined); } }, drop: () => { if (self._session === undefined) { return; } self._session.getScope().delete(PROVIDERS_SCOPE_KEY); }, }; } private _onSelectionEntity(event: ISelectEvent) { if (event.sender === this.SENDER) { // Ignore events triggered by holder return; } if (!this._keyboard.ctrl() && !this._keyboard.shift()) { this._providers.forEach((provider: Provider<any>) => { // Drop selection on all others providers if (provider.getGuid() !== event.provider.getGuid()) { provider.select().drop(this.SENDER); } }); } else if (this._keyboard.shift()) { this._providers.forEach((provider: Provider<any>) => { // Force selection provider.select().apply(this.SENDER, event.guids); }); } let guids: string[] = []; this._providers.forEach((provider: Provider<any>) => { guids = guids.concat(provider.select().get()); }); if (guids.length === 1) { this._providers.forEach((provider: Provider<any>) => { if (provider.select().get().length === 1) { this._subjects.select.next({ entity: event.entity, provider: provider, guids: provider.select().get(), }); } }); } else { this._subjects.select.next(undefined); } this._providers.forEach((provider: Provider<any>) => { provider.setLastSelection(guids.length > 0 ? event.entity : undefined); }); this._store().save(guids); } private _onContextMenuEvent(event: IContextMenuEvent) { const isActionAvailable = (action: EActions, insel: Array<Provider<any>>, _entities: Entity<any>[]) => { let count: number = 0; insel.forEach((provider: Provider<any>) => { (provider.actions(event.entity, _entities) as any)[action] !== undefined && (count += 1); }); return count === insel.length; }; let entities = this.select().getEntities(); if (entities.length === 0) { // Context menu is called without active selection // Set selection to target element event.provider.select().set({ guid: event.entity.getGUID() }); entities = [event.entity]; } else if (entities.length === 1) { if (entities[0].getGUID() !== event.entity.getGUID()) { this.select().drop(); event.provider.select().set({ guid: event.entity.getGUID() }); entities = [event.entity]; } } else if (entities.length > 1) { if (entities.map((entity) => entity.getGUID()).indexOf(event.entity.getGUID()) === -1) { // Context menu is called out of selection this.select().drop(); event.provider.select().set({ guid: event.entity.getGUID() }); entities = [event.entity]; } } const providers = this.select().getProviders(); const actions: { activate: boolean; deactivate: boolean; remove: boolean; edit: boolean; } = { activate: isActionAvailable(EActions.activate, providers, entities), deactivate: isActionAvailable(EActions.deactivate, providers, entities), remove: isActionAvailable(EActions.remove, providers, entities), edit: isActionAvailable(EActions.edit, providers, entities), }; event.items = []; if (providers.length === 1 && entities.length === 1 && actions.edit) { event.items.push({ caption: 'Edit', handler: () => { const actions = providers[0].actions(event.entity, entities); actions.edit !== undefined && actions.edit(); }, shortcut: 'Enter', }); } event.items.length > 0 && event.items.push({ /* Delimiter */ }); actions.activate && event.items.push({ caption: 'Activate', handler: () => { providers.forEach((provider: Provider<any>) => { const actions = provider.actions(event.entity, entities); actions.activate !== undefined && actions.activate(); }); }, }); actions.deactivate && event.items.push({ caption: 'Deactivate', handler: () => { providers.forEach((provider: Provider<any>) => { const actions = provider.actions(event.entity, entities); actions.deactivate !== undefined && actions.deactivate(); }); }, }); actions.remove && event.items.push({ caption: 'Remove', handler: () => { providers.forEach((provider: Provider<any>) => { const actions = provider.actions(event.entity, entities); actions.remove !== undefined && actions.remove(); }); }, }); event.items.length > 0 && event.items.push({ /* Delimiter */ }); actions.activate && event.items.push({ caption: 'Activate All', handler: () => { providers.forEach((provider: Provider<any>) => { const actions = provider.actions(event.entity, provider.get()); actions.activate !== undefined && actions.activate(); }); }, }); actions.deactivate && event.items.push({ caption: 'Deactivate All', handler: () => { providers.forEach((provider: Provider<any>) => { const actions = provider.actions(event.entity, provider.get()); actions.deactivate !== undefined && actions.deactivate(); }); }, }); actions.remove && event.items.push({ caption: 'Remove All', handler: () => { providers.forEach((provider: Provider<any>) => { const actions = provider.actions(event.entity, provider.get()); actions.remove !== undefined && actions.remove(); }); }, }); this._providers.forEach((provider: Provider<any>) => { const custom: IMenuItem[] = provider.getContextMenuItems( event.entity, this.select().getEntities(), ); if (custom.length > 0 && event.items !== undefined) { event.items.push({ /* Delimiter */ }); event.items = event.items.concat(custom); } }); this._subjects.context.next(event); } private _onDoubleclickEvent(event: IDoubleclickEvent) { event.provider.search(event.entity); } private _onChange() { this._subjects.change.next(); } private _onReload(provider: string) { this._store().restore(provider); } private _onSessionChange(session: Session | undefined) { this._session = session; } }
the_stack
import * as Cmd from '../cmd' import { Init, normalize, Context, Component, CmdType } from '../index' export * from './hash' export * from './memoize' import { set, merge, setDeep, setDeepMutable, get, isFn, noop, isPojo, clone, OverrideLength, weakVal, } from '../utils' import { ActionReturn, ActionState, ActionCmdReturn, StandardActionReturn, ActionType, ActionsType, InitObjReturn, GeneralActionType, } from '../types' import { dispatcher } from '../dispatcher' export type Dt<T extends string, D = null> = { tag: T data: D } & { __tsTag: 'DateType' } /** * ADT Helper for TS * e.g. * ```ts * type Msg = * | Dt<'fetchBook', number> * | Dt<'updateBook', Book> * * let msg = dt('fetchBook', 1) * switch(msg.tag) { * case 'fetchBook': * //... * break * case 'updateBook': * //... * break * default: * never(msg) // incomplete check from TS * break * } * ``` */ export function dt<T extends string, D = null>(tag: T, data: D = null as any) { return { tag, data } as Dt<T, D> } /** @public */ export const never = (f: never) => f /** @public */ export function mkInit<S, A>(state: S, cmd: Cmd.CmdType<A> = Cmd.none): ActionCmdReturn<S, A> { return { state, cmd } } export type Fn1<T1, R> = (a1: T1) => R /** @public */ export function compose<T1, T2, R>(fn1: Fn1<T1, T2>, fn2: Fn1<T2, R>): Fn1<T1, R> export function compose<T1, T2, T3, R>( fn1: Fn1<T1, T2>, fn2: Fn1<T2, T3>, fn3: Fn1<T3, R>, ): Fn1<T1, R> export function compose<T1, T2, T3, T4, R>( fn1: Fn1<T1, T2>, fn2: Fn1<T2, T3>, fn3: Fn1<T3, T4>, fn4: Fn1<T4, R>, ): Fn1<T1, R> export function compose<T1, T2, T3, T4, T5, R>( fn1: Fn1<T1, T2>, fn2: Fn1<T2, T3>, fn3: Fn1<T3, T4>, fn4: Fn1<T4, T5>, fn5: Fn1<T5, R>, ): Fn1<T1, R> export function compose<T1, T2, T3, T4, T5, T6, R>( fn1: Fn1<T1, T2>, fn2: Fn1<T2, T3>, fn3: Fn1<T3, T4>, fn4: Fn1<T4, T5>, fn5: Fn1<T5, T6>, fn6: Fn1<T6, R>, ): Fn1<T1, R> export function compose<T1, T2, T3, T4, T5, T6, T7, R>( fn1: Fn1<T1, T2>, fn2: Fn1<T2, T3>, fn3: Fn1<T3, T4>, fn4: Fn1<T4, T5>, fn5: Fn1<T5, T6>, fn6: Fn1<T6, T7>, fn7: Fn1<T7, R>, ): Fn1<T1, R> export function compose<T1, T2, T3, T4, T5, T6, T7, T8, R>( fn1: Fn1<T1, T2>, fn2: Fn1<T2, T3>, fn3: Fn1<T3, T4>, fn4: Fn1<T4, T5>, fn5: Fn1<T5, T6>, fn6: Fn1<T6, T7>, fn7: Fn1<T7, T8>, fn8: Fn1<T8, R>, ): Fn1<T1, R> export function compose<T1, T2, T3, T4, T5, T6, T7, T8, T9, R>( fn1: Fn1<T1, T2>, fn2: Fn1<T2, T3>, fn3: Fn1<T3, T4>, fn4: Fn1<T4, T5>, fn5: Fn1<T5, T6>, fn6: Fn1<T6, T7>, fn7: Fn1<T7, T8>, fn8: Fn1<T8, T9>, fn9: Fn1<T9, R>, ): Fn1<T1, R> export function compose<R>(...fns: Function[]): Fn1<any, R> { return arg => fns.reduce((arg, fn) => fn(arg), arg) } /** @public */ export function defaults<T>(value: T | null | undefined, defaults: T): T { return value == null ? defaults : value } export interface CombinedComps< T extends { [k: string]: [Component, InitObjReturn<any, any>] }, A extends { [k: string]: any } > { /** Combined state object for child components */ state: { [k in keyof T]: T[k][1]['state'] } /** Combined and mapped cmd object for child components */ cmd: Cmd.Sub<A>[] /** * Splited mapped cmd object for child components, useful for router config, so you can call init cmd when each page routing. * */ cmds: { [k in keyof T]: Cmd.Sub<A>[] } /** Combined action object for child components */ actions: { [k in keyof T]: T[k][0]['actions'] } /** Combined view function for child components */ views: { [k in keyof T]: T[k][0]['view'] } /** * helper function for render routes views, so you can do * `subComps.render('somePage', state, actions)` instead of * `SomePage.views(state.somePage, actions.somePage)` or * `<SomePage.views * state={state.somePage} * actions={actions.somePage} * />` */ render: <K extends Extract<keyof T, keyof S>, S>( k: K, state: S, actions: ActionsType<S, any>, ) => any } export function combine< T extends { [k: string]: [Component, InitObjReturn<any, any>] }, A extends { [k: string]: any } >(arg: T, _acts: A = null as any): CombinedComps<T, A> { let state = {} as { [k in keyof T]: T[k][1]['state'] } let cmd = Cmd.none as Cmd.CmdType<A> let cmds = {} as { [k in keyof T]: Cmd.CmdType<A> } let actions = {} as { [k in keyof T]: T[k][0]['actions'] } let views = {} as { [k in keyof T]: T[k][0]['view'] } for (const key in arg) { let comp = arg[key][0] let init = normalize<any, any>(arg[key][1]) state[key] = init.state actions[key] = comp.actions views[key] = comp.view cmds[key] = Cmd.map(_ => _[key], init.cmd) cmd = Cmd.batch(cmd, cmds[key]) } return { state, cmd, cmds, views, actions, render(k, state, actions) { let view = views[k] if (view.length === 1) { return (view as any)({ state: state[k], actions: actions[k] }) } else { return view(state[k], actions[k]) } }, } } /** * @internal * run action and return a normalized result ([State, CmdType<>]), * this is useful to write High-Order-Action, which take an action and return a wrapped action. * @param result result of `action(msg: Data)` * @param state * @param actions */ export function runActionResult<S, A>( result: ActionReturn<S, A> | ((state: S, actions: A) => ActionReturn<S, A>), ): StandardActionReturn<S, A> { let res: any = result let { state, actions, parentState, parentActions, } = dispatcher.getContext() isFn(res) && (res = res.call(actions, state, actions, parentState, parentActions)) && isFn(res) && (res = res.call(actions, actions)) // action can be a function that return a promise or undefined(callback) return normalize<S, A>(res, state) } /** @internal */ export function withParents<S, A, PS, PA, A1>( action: (a1: A1) => (s: S, a: A) => any, wrapper?: ( action: (a1: A1) => StandardActionReturn<S, A>, parentState: PS, parentActions: PA, state: S, actions: A, ) => ActionReturn<S, A>, ) export function withParents<S, A, PS, PA, A1, A2>( action: (a1: A1, a2: A2) => (s: S, a: A) => any, wrapper?: ( action: (a1: A1, a2: A2) => StandardActionReturn<S, A>, parentState: PS, parentActions: PA, state: S, actions: A, ) => ActionReturn<S, A>, ) export function withParents<S, A, PS, PA, A1, A2, A3>( action: (a1: A1, a2: A2, a3: A3) => (s: S, a: A) => any, wrapper?: ( action: (a1: A1, a2: A2, a3: A3) => StandardActionReturn<S, A>, parentState: PS, parentActions: PA, state: S, actions: A, ) => ActionReturn<S, A>, ) export function withParents<S, A, PS, PA, A1, A2, A3, A4>( action: (a1: A1, a2: A2, a3: A3, a4: A4) => (s: S, a: A) => any, wrapper?: ( action: (a1: A1, a2: A2, a3: A3, a4: A4) => StandardActionReturn<S, A>, parentState: PS, parentActions: PA, state: S, actions: A, ) => ActionReturn<S, A>, ) export function withParents<S, A, PS, PA, A1, A2, A3, A4, A5>( action: (a1: A1, a2: A2, a3: A3, a4: A4, a5: A5) => (s: S, a: A) => any, wrapper?: ( action: (a1: A1, a2: A2, a3: A3, a4: A4, a5: A5) => StandardActionReturn<S, A>, parentState: PS, parentActions: PA, state: S, actions: A, ) => ActionReturn<S, A>, ) /** * Wrap a child action with parentState, parentActions. * @internal * @deprecated Deprecated for `overrideAction` * @param action The action to be wrapped * @param wrapper * @param parentState * @param parentActions */ export function withParents<S, A, PS, PA>( action: GeneralActionType<S, A>, wrapper?: ( action: (...args) => StandardActionReturn<S, A>, parentState: PS, parentActions: PA, state: S, actions: A, ) => ActionReturn<S, A>, ): any { if (!wrapper) { return action } const wrapped = (state: S, actions: A, parentState: PS, parentActions: PA) => { const nactions = (...args) => runActionResult(action(...args)) return wrapper(nactions, parentState, parentActions, state, actions) } return wrapped } /** * @deprecated Deprecated for `overrideAction` * @internal */ export const wrapActions = withParents export function overrideAction<PS, PA, S, A, A1>( parentActions: PA, getter: (_: PA) => (a1: A1) => any, wrapper?: ( a1: A1, ) => ( action: <S = any, A = any>(a1: A1) => StandardActionReturn<S, A>, parentState: PS, parentActions: PA, state: S, actions: A, ) => ActionReturn<S, A>, ) export function overrideAction<S, A, PS, PA, A1, A2>( parentActions: PA, getter: (_: PA) => (a1: A1, a2: A2) => any, wrapper?: ( a1: A1, a2: A2, ) => ( action: <S = any, A = any>(a1: A1, a2: A2) => StandardActionReturn<S, A>, parentState: PS, parentActions: PA, state: S, actions: A, ) => ActionReturn<S, A>, ) export function overrideAction<S, A, PS, PA, A1, A2, A3>( parentActions: PA, getter: (_: PA) => (a1: A1, a2: A2, a3: A3) => any, wrapper?: ( a1: A1, a2: A2, a3: A3, ) => ( action: <S = any, A = any>(a1: A1, a2: A2, a3: A3) => StandardActionReturn<S, A>, parentState: PS, parentActions: PA, state: S, actions: A, ) => ActionReturn<S, A>, ) export function overrideAction<S, A, PS, PA, A1, A2, A3, A4>( parentActions: PA, getter: (_: PA) => (a1: A1, a2: A2, a3: A3, a4: A4) => any, wrapper?: ( a1: A1, a2: A2, a3: A3, a4: A4, ) => ( action: <S = any, A = any>(a1: A1, a2: A2, a3: A3, a4: A4) => StandardActionReturn<S, A>, parentState: PS, parentActions: PA, state: S, actions: A, ) => ActionReturn<S, A>, ) /** * Wrap a child action with parentState, parentActions. * @internal * @param action The action to be wrapped * @param wrapper * @param parentState * @param parentActions */ export function overrideAction<S, A, PS, PA>( parentActions: PA, getter: (_: PA) => GeneralActionType<S, A>, wrapper?: ( ...args ) => ( action: (...args) => StandardActionReturn<S, A>, parentState: PS, parentActions: PA, state: S, actions: A, ) => ActionReturn<S, A>, ): any { if (!wrapper) { return parentActions } let action = getter(parentActions) const wrapped = (...args) => { const normalAction = (...args) => { let ret = runActionResult(action(...args)) return ret } let ctx = dispatcher.getContext() return wrapper(...args)(normalAction, ctx.parentState, ctx.parentActions, ctx.state, ctx.actions) } let keys = (getter.toString().match(/((?:[\w_$]+\.)+[\w_$]+)/) || [])[1].split('.').slice(1) let cursor = parentActions let replaced = false for (let i = 0; i < keys.length; i++) { const key = keys[i] if (cursor[key] === action) { cursor[key] = wrapped replaced = true break } cursor = cursor[key] = { ...cursor[key] } } weakVal(wrapped, OverrideLength, keys.length) if (!replaced) { console.error(new Error(`Cannot find action in parentActions`), parentActions, getter) } return parentActions }
the_stack
import Vue, { Component, CreateElement, VNode, VNodeData } from 'vue'; import { ViewActionType } from '../interface/common'; import { RouteInfo, Router, RouteEventType } from '../interface/router'; import invokeHook from '../utils/invokeHook'; interface Data { routeInfo?: RouteInfo<Component>; preRouteInfo?: RouteInfo<Component>; nextRouteInfo?: RouteInfo<Component>; vnodeCache: Map<string, VNode>; transitionType?: string; customTransition?: Transition; needDestroyedRouteId?: string; preRenderMode: PreRenderMode; } type Transition = TransitionOptions | string; interface TransitionDurationConfig { enter: number; leave: number; } interface TransitionOptions { name: string; appear?: boolean; duration: number | TransitionDurationConfig; mode: string; tag: string; } type PropsTypes<T> = () => T; interface PageViewProps { path: string; query: { [key: string]: string }; params: { [key: string]: string }; state?: unknown; } const enum PreRenderMode { PRE_RENDERING = 'preRendering', RENDERING_CANCELED = 'renderingCanceled', NONE = 'none' } const NO_TRANSITION = '__NO_TRANSITION'; export default Vue.extend({ name: 'RouterView', data() { return {} as Data; }, props: { router: Object as PropsTypes<Router<Component> | undefined>, supportPreRender: Boolean as PropsTypes<boolean>, transition: ([Object, String] as unknown) as PropsTypes<TransitionOptions | string | undefined> }, render(h: CreateElement): VNode { if (!this.routeInfo) { return h(); } const vnode = this.renderRoute(h, this.routeInfo); let vNodes = [vnode]; if (this.nextRouteInfo && this.supportPreRender) { const nextVNode = this.renderRoute(h, this.nextRouteInfo); vNodes = [nextVNode, vnode]; } return this.renderTransition(h, vNodes); }, created() { this.vnodeCache = new Map(); this.transitionType = undefined; const router = this.getRouter(); this.routeInfo = router.currentRouteInfo; this.preRenderMode = PreRenderMode.NONE; router.on(RouteEventType.CHANGE, this.handleRouteChange); router.on(RouteEventType.DESTROY, this.handleRouteDestroy); if (this.supportPreRender) { router.on(RouteEventType.WILL_CHANGE, this.handleRouteWillChange); router.on(RouteEventType.CANCEL_CHANGE, this.handleRouteChangeCancel); } }, destroyed() { const event = this.getRouter(); event.off(RouteEventType.CHANGE, this.handleRouteChange); event.off(RouteEventType.WILL_CHANGE, this.handleRouteWillChange); event.off(RouteEventType.CANCEL_CHANGE, this.handleRouteChangeCancel); event.off(RouteEventType.DESTROY, this.handleRouteDestroy); }, methods: { renderRoute(h: CreateElement, routeInfo: RouteInfo<Component>): VNode { const { config, route } = routeInfo; const cachedVNode = this.vnodeCache.get(route.id); const vNodeData: VNodeData = { props: this.getPageViewProps(routeInfo) }; // const transition = this.customTransition === undefined ? this.transition : this.customTransition; // let transitionName = ''; // if (typeof transition === 'string') { // transitionName = `${transition}-${this.transitionType}`; // } else if (transition && transition.name) { // transitionName = `${transition.name}-${this.transitionType}`; // } // if (vNodeData.class === undefined) { // vNodeData.class = {}; // } // const transitionStage = isNext ? 'enter' : 'leave'; // if (this.isPreRendering()) { // vNodeData.class[`${transitionName}-${transitionStage}`] = this.isPreRendering(); // vNodeData.class[`${transitionName}-${transitionStage}-active`] = this.isPreRendering(); // } const vnode = h(config.component, vNodeData); if (cachedVNode !== undefined) { vnode.componentInstance = cachedVNode.componentInstance; } this.vnodeCache.set(route.id, vnode); if (vnode.data !== undefined) { vnode.data.keepAlive = true; } if (vnode.tag !== undefined) { vnode.tag = `${vnode.tag}-${route.id}`; vnode.key = `__route-${route.id}`; } return vnode; }, renderTransition(h: CreateElement, vNodes: VNode[]): VNode { const vnodeData = { props: this.getTransitionProps(), on: this.getTransitionListener() }; const transitionVnode = h(this.supportPreRender ? 'transition-group' : 'transition', vnodeData, vNodes); return transitionVnode; }, getRouter(): Router<Component> { // eslint-disable-next-line @typescript-eslint/no-explicit-any return this.router || (this as any).$router; }, handleRouteChange(type: string, routeInfo?: RouteInfo<Component>, transition?: unknown): void { if (routeInfo === undefined) { return; } if (this.routeInfo && this.routeInfo.route.id === routeInfo.route.id) { return; } this.preRouteInfo = this.routeInfo; this.routeInfo = routeInfo; this.nextRouteInfo = undefined; this.transitionType = type; this.setCustomTransition(routeInfo, transition); this.$forceUpdate(); }, handleRouteWillChange(type: string, routeInfo?: RouteInfo<Component>, transition?: unknown): void { if (routeInfo === undefined) { return; } this.nextRouteInfo = routeInfo; this.transitionType = type; this.preRenderMode = PreRenderMode.PRE_RENDERING; this.setCustomTransition(routeInfo, transition); this.$forceUpdate(); }, handleRouteChangeCancel(routeInfo: RouteInfo<Component>): void { if ( routeInfo === undefined || this.nextRouteInfo === undefined || (routeInfo && this.nextRouteInfo && routeInfo.route.id !== this.nextRouteInfo.route.id) ) { return; } this.nextRouteInfo = undefined; this.transitionType = undefined; this.customTransition = undefined; this.preRenderMode = PreRenderMode.RENDERING_CANCELED; this.$forceUpdate(); }, handleRouteDestroy(ids: string[]): void { ids.forEach(id => { if (this.preRouteInfo && this.preRouteInfo.route.id !== id) { this.destroyComponent(id); } else { // Pre vnode will be deleted after transition leave this.needDestroyedRouteId = id; } }); }, destroyComponent(id: string): void { const instance = this.getRouteComponentInstance(id); if (instance) { instance.$destroy(); } this.vnodeCache.delete(id); }, getRouteComponentInstance(id: string): Vue | undefined { const vnode = this.vnodeCache.get(id); return vnode && vnode.componentInstance; }, getTransitionProps(): Partial<TransitionOptions> { const props: Partial<TransitionOptions> = { appear: true, tag: 'div' }; if (!this.transitionType || this.isPreRendering()) { props.name = NO_TRANSITION; return props; } const transition = this.customTransition === undefined ? this.transition : this.customTransition; if (transition && transition !== NO_TRANSITION) { if (typeof transition === 'string') { props.name = `${transition}-${this.transitionType}`; } else { if (transition.name) { props.name = `${transition.name}-${this.transitionType}`; } props.duration = transition.duration; props.mode = transition.mode; } } return props; }, getTransitionListener() { return { beforeEnter: this.handleTransitionBeforeEnter, afterEnter: this.handleTransitionAfterEnter, beforeLeave: this.handleTransitionBeforeLeave, afterLeave: this.handleTransitionAfterLeave }; }, getPageViewProps(routeInfo: RouteInfo<Component>): PageViewProps { const props: PageViewProps = { path: '/', params: {}, query: {}, state: undefined }; const { path, params, query, state } = routeInfo.route; Object.assign(props, { params, query, state, path }); return props; }, handleTransitionBeforeEnter(): void { if (this.routeInfo === undefined) return; if (this.preRenderMode === PreRenderMode.NONE) { const component = this.getRouteComponentInstance(this.routeInfo.route.id); if (component !== undefined) { invokeHook(component, ViewActionType.WILL_APPEAR); } } else if (this.preRenderMode === PreRenderMode.PRE_RENDERING) { const currentComponent = this.getRouteComponentInstance(this.routeInfo.route.id); if (currentComponent === undefined) return; invokeHook(currentComponent, ViewActionType.WILL_DISAPPEAR); const routeId = (this.nextRouteInfo && this.nextRouteInfo.route.id) || ''; const nextComponent = this.getRouteComponentInstance(routeId); if (nextComponent === undefined) return; invokeHook(nextComponent, ViewActionType.WILL_APPEAR); } }, handleTransitionAfterEnter(): void { if (this.routeInfo === undefined || this.isPreRendering()) return; const routeId = this.routeInfo.route.id; const component = this.getRouteComponentInstance(routeId); if (component === undefined) return; invokeHook(component, ViewActionType.DID_APPEAR); }, handleTransitionBeforeLeave(): void { if (this.preRenderMode === PreRenderMode.NONE && this.preRouteInfo !== undefined) { const component = this.getRouteComponentInstance(this.preRouteInfo.route.id); if (component !== undefined) { invokeHook(component, ViewActionType.WILL_DISAPPEAR); } } }, handleTransitionAfterLeave(): void { if (this.preRouteInfo !== undefined) { const component = this.getRouteComponentInstance(this.preRouteInfo.route.id); if (component !== undefined) { invokeHook(component, ViewActionType.DID_DISAPPEAR); } } if (this.preRenderMode === PreRenderMode.PRE_RENDERING && this.routeInfo !== undefined) { const component = this.getRouteComponentInstance(this.routeInfo.route.id); if (component !== undefined) { invokeHook(component, ViewActionType.DID_APPEAR); } this.preRenderMode = PreRenderMode.NONE; } if (this.preRenderMode === PreRenderMode.RENDERING_CANCELED && this.routeInfo !== undefined) { const component = this.getRouteComponentInstance(this.routeInfo.route.id); if (component !== undefined) { invokeHook(component, ViewActionType.DID_APPEAR); } this.preRenderMode = PreRenderMode.NONE; } if (this.needDestroyedRouteId) { this.destroyComponent(this.needDestroyedRouteId); this.needDestroyedRouteId = undefined; } }, setCustomTransition(routeInfo: RouteInfo<Component>, transition: unknown): void { this.customTransition = undefined; if (this.isTransition(routeInfo.config.transition)) { this.customTransition = routeInfo.config.transition; } if (this.isTransition(transition)) { this.customTransition = transition; } }, isTransition(transition: unknown): transition is Transition { return transition !== undefined; }, isPreRendering(): boolean { return this.nextRouteInfo !== undefined; } } });
the_stack
import { Component } from '@angular/core'; import { AppConstants } from 'app.constants'; import { CreatorDashboardBackendApiService } from 'domain/creator_dashboard/creator-dashboard-backend-api.service'; import { CreatorDashboardConstants } from './creator-dashboard-page.constants'; import { RatingComputationService } from 'components/ratings/rating-computation/rating-computation.service'; import { UrlInterpolationService } from 'domain/utilities/url-interpolation.service'; import { LoaderService } from 'services/loader.service'; import { UserService } from 'services/user.service'; import { AlertsService } from 'services/alerts.service'; import { DateTimeFormatService } from 'services/date-time-format.service'; import { ThreadStatusDisplayService } from 'pages/exploration-editor-page/feedback-tab/services/thread-status-display.service'; import { ExplorationCreationService } from 'components/entity-creation-services/exploration-creation.service'; import { downgradeComponent } from '@angular/upgrade/static'; import { forkJoin } from 'rxjs'; import { WindowRef } from 'services/contextual/window-ref.service'; import { CreatorDashboardData } from 'domain/creator_dashboard/creator-dashboard-backend-api.service'; import { ProfileSummary } from 'domain/user/profile-summary.model'; import { CreatorExplorationSummary } from 'domain/summary/creator-exploration-summary.model'; import { CollectionSummary } from 'domain/collection/collection-summary.model'; import { ExplorationRatings } from 'domain/summary/learner-exploration-summary.model'; import { CreatorDashboardStats } from 'domain/creator_dashboard/creator-dashboard-stats.model'; @Component({ selector: 'oppia-creator-dashboard-page', templateUrl: './creator-dashboard-page.component.html' }) export class CreatorDashboardPageComponent { activeTab: string; myExplorationsView: string; publishText: string; currentSortType: string; isCurrentSortDescending: boolean; currentSubscribersSortType: string; isCurrentSubscriptionSortDescending: boolean; canReviewActiveThread: boolean; EXPLORATION_DROPDOWN_STATS; canCreateCollections: boolean; explorationsList: CreatorExplorationSummary[]; collectionsList: CollectionSummary[]; subscribersList: ProfileSummary[]; lastWeekStats: { totalPlays: number}; dashboardStats: CreatorDashboardStats; relativeChangeInTotalPlays: number; getLocaleAbbreviatedDatetimeString: (millisSinceEpoch: number) => string; getHumanReadableStatus: (status: string) => string; emptyDashboardImgUrl: string; getAverageRating: ( (ratingFrequencies: ExplorationRatings) => number | null); SUBSCRIPTION_SORT_BY_KEYS = CreatorDashboardConstants.SUBSCRIPTION_SORT_BY_KEYS; EXPLORATIONS_SORT_BY_KEYS = CreatorDashboardConstants.EXPLORATIONS_SORT_BY_KEYS; DEFAULT_EMPTY_TITLE = 'Untitled'; HUMAN_READABLE_EXPLORATIONS_SORT_BY_KEYS = CreatorDashboardConstants.HUMAN_READABLE_EXPLORATIONS_SORT_BY_KEYS; HUMAN_READABLE_SUBSCRIPTION_SORT_BY_KEYS = CreatorDashboardConstants.HUMAN_READABLE_SUBSCRIPTION_SORT_BY_KEYS; DEFAULT_TWITTER_SHARE_MESSAGE_DASHBOARD = AppConstants.DEFAULT_TWITTER_SHARE_MESSAGE_EDITOR; constructor( private creatorDashboardBackendApiService: CreatorDashboardBackendApiService, private ratingComputationService: RatingComputationService, private urlInterpolationService: UrlInterpolationService, private loaderService: LoaderService, private userService: UserService, private alertsService: AlertsService, private dateTimeFormatService: DateTimeFormatService, private threadStatusDisplayService: ThreadStatusDisplayService, private explorationCreationService: ExplorationCreationService, private windowRef: WindowRef, ) {} EXP_PUBLISH_TEXTS = { defaultText: ( 'This exploration is private. Publish it to receive statistics.'), smText: 'Publish the exploration to receive statistics.' }; userDashboardDisplayPreference = ( AppConstants.ALLOWED_CREATOR_DASHBOARD_DISPLAY_PREFS.CARD); setActiveTab(newActiveTabName: string): void { this.activeTab = newActiveTabName; } getExplorationUrl(explorationId: string): string { return '/create/' + explorationId; } getCollectionUrl(collectionId: string): string { return '/collection_editor/create/' + collectionId; } setMyExplorationsView(newViewType: string): void { this.myExplorationsView = newViewType; this.creatorDashboardBackendApiService.postExplorationViewAsync( newViewType).then(() => {}); } checkMobileView(): boolean { return (this.windowRef.nativeWindow.innerWidth < 500); } showUsernamePopover(subscriberUsername: string | string[]): string { // The popover on the subscription card is only shown if the length // of the subscriber username is greater than 10 and the user hovers // over the truncated username. if (subscriberUsername.length > 10) { return 'mouseenter'; } else { return 'none'; } } getTrustedResourceUrl(imageFileName: string): string { return decodeURIComponent(imageFileName); } updatesGivenScreenWidth(): void { if (this.checkMobileView()) { // For mobile users, the view of the creators // exploration list is shown only in // the card view and can't be switched to list view. this.myExplorationsView = ( AppConstants.ALLOWED_CREATOR_DASHBOARD_DISPLAY_PREFS.CARD); this.publishText = this.EXP_PUBLISH_TEXTS.smText; } else { // For computer users or users operating in larger screen size // the creator exploration list will come back to its previously // selected view (card or list) when resized from mobile view. this.myExplorationsView = this.userDashboardDisplayPreference; this.publishText = this.EXP_PUBLISH_TEXTS.defaultText; } } setExplorationsSortingOptions(sortType: string): void { if (sortType === this.currentSortType) { this.isCurrentSortDescending = !this.isCurrentSortDescending; } else { this.currentSortType = sortType; } } setSubscriptionSortingOptions(sortType: string): void { if (sortType === this.currentSubscribersSortType) { this.isCurrentSubscriptionSortDescending = ( !this.isCurrentSubscriptionSortDescending); } else { this.currentSubscribersSortType = sortType; } } sortSubscriptionFunction(): string { return this.currentSubscribersSortType; } sortByFunction(): string { if ( this.currentSortType === CreatorDashboardConstants.EXPLORATIONS_SORT_BY_KEYS.RATING) { // TODO(sll): Find a better way to sort explorations according to // average ratings. Currently there is no parameter as such // average ratings in entities received by SortByPipe. return 'default'; } else { return this.currentSortType; } } getCompleteThumbnailIconUrl(iconUrl: string): string { return this.urlInterpolationService.getStaticImageUrl(iconUrl); } ngOnInit(): void { this.loaderService.showLoadingScreen('Loading'); let userInfoPromise = this.userService.getUserInfoAsync(); userInfoPromise.then((userInfo) => { this.canCreateCollections = userInfo.canCreateCollections(); }); let dashboardDataPromise = ( this.creatorDashboardBackendApiService.fetchDashboardDataAsync()); dashboardDataPromise.then( (response: CreatorDashboardData) => { // The following condition is required for Karma testing. The // Angular HttpClient returns an Observable which when converted // to a promise does not have the 'data' key but the AngularJS // mocks of services using HttpClient use $http which return // promise and the content is contained in the 'data' key. // Therefore the following condition checks for presence of // 'response.data' which would be the case in AngularJS testing // but assigns 'response' if the former is not present which is // the case with HttpClient. let responseData = response; this.currentSortType = ( CreatorDashboardConstants. EXPLORATIONS_SORT_BY_KEYS.OPEN_FEEDBACK); this.currentSubscribersSortType = CreatorDashboardConstants.SUBSCRIPTION_SORT_BY_KEYS.USERNAME; this.isCurrentSortDescending = true; this.isCurrentSubscriptionSortDescending = true; this.explorationsList = responseData.explorationsList; this.collectionsList = responseData.collectionsList; this.subscribersList = responseData.subscribersList; this.dashboardStats = responseData.dashboardStats; this.lastWeekStats = responseData.lastWeekStats; this.myExplorationsView = responseData.displayPreference; if (this.dashboardStats && this.lastWeekStats) { this.relativeChangeInTotalPlays = ( this.dashboardStats.totalPlays - ( this.lastWeekStats.totalPlays) ); } if (this.explorationsList.length === 0 && this.collectionsList.length > 0) { this.activeTab = 'myCollections'; } else { this.activeTab = 'myExplorations'; } } ); forkJoin([userInfoPromise, dashboardDataPromise]).subscribe(() => { this.loaderService.hideLoadingScreen(); }); this.getAverageRating = this.ratingComputationService .computeAverageRating; this.getLocaleAbbreviatedDatetimeString = ( this.dateTimeFormatService.getLocaleAbbreviatedDatetimeString); this.getHumanReadableStatus = ( this.threadStatusDisplayService.getHumanReadableStatus); this.emptyDashboardImgUrl = this.urlInterpolationService .getStaticImageUrl('/general/empty_dashboard.svg'); this.canReviewActiveThread = false; this.updatesGivenScreenWidth(); angular.element(this.windowRef.nativeWindow).on('resize', () => { this.updatesGivenScreenWidth(); }); } createNewExploration(): void { this.explorationCreationService.createNewExploration(); } returnZero(): number { // This function is used as a custom function to // sort heading in the list view. Directly assigning // keyvalue : 0 gives error "TypeError: The comparison function // must be either a function or undefined" . return 0; } } angular.module('oppia').directive('oppiaCreatorDashboardPage', downgradeComponent({ component: CreatorDashboardPageComponent }) as angular.IDirectiveFactory);
the_stack
import * as util from 'util' import { HttpKit } from '@tnwx/kits' import { AccessToken, QyAccessTokenApi } from '@tnwx/accesstoken' /** * @author Javen * @copyright javendev@126.com * @description 外部联系人管理 */ export class QyExContact { private static getFollowUserListUrl = 'https://qyapi.weixin.qq.com/cgi-bin/externalcontact/get_follow_user_list?access_token=%s' /** * 获取配置了客户联系功能的成员列表 * @param accessToken {AccessToken} */ public static async getFollowUserList(accessToken?: AccessToken) { if (!accessToken) { accessToken = await QyAccessTokenApi.getAccessToken() } let url = util.format(this.getFollowUserListUrl, accessToken.getAccessToken) return HttpKit.getHttpDelegate.httpGet(url) } private static addContactWayUrl = 'https://qyapi.weixin.qq.com/cgi-bin/externalcontact/add_contact_way?access_token=%s' /** * 配置客户联系「联系我」方式 * @param type 联系方式类型,1-单人, 2-多人 * @param scene 场景,1-在小程序中联系,2-通过二维码联系,3-在线问诊 * @param style 在小程序中联系时使用的控件样式 * @param remark 联系方式的备注信息,用于助记,不超过30个字符 * @param skipVerify 外部客户添加时是否无需验证,默认为true * @param state 自定义的state参数 * @param user 使用该联系方式的用户userID列表,在type为1时为必填,且只能有一个 * @param party 使用该联系方式的部门id列表,只在type为2时有效 * @param accessToken {AccessToken} */ public static async addContactWay( type: number, scene: number, style?: number, remark?: string, skipVerify?: boolean, state?: string, user?: Array<string>, party?: Array<string>, accessToken?: AccessToken ) { if (!accessToken) { accessToken = await QyAccessTokenApi.getAccessToken() } let url = util.format(this.addContactWayUrl, accessToken.getAccessToken) return HttpKit.getHttpDelegate.httpPost( url, JSON.stringify({ type: type, scene: scene, style: style, remark: remark, skip_verify: skipVerify, state: state, user: user, party: party }) ) } private static updateContactWayUrl = 'https://qyapi.weixin.qq.com/cgi-bin/externalcontact/update_contact_way?access_token=%s' /** * 更新企业已配置的「联系我」方式 * @param configId * @param style * @param remark * @param skipVerify * @param state * @param user * @param party * @param accessToken {AccessToken} */ public static async updateContactWay( configId: string, style?: number, remark?: string, skipVerify?: boolean, state?: string, user?: Array<string>, party?: Array<string>, accessToken?: AccessToken ) { if (!accessToken) { accessToken = await QyAccessTokenApi.getAccessToken() } let url = util.format(this.updateContactWayUrl, accessToken.getAccessToken) return HttpKit.getHttpDelegate.httpPost( url, JSON.stringify({ config_id: configId, style: style, remark: remark, skip_verify: skipVerify, state: state, user: user, party: party }) ) } private static getContactWayUrl = 'https://qyapi.weixin.qq.com/cgi-bin/externalcontact/get_contact_way?access_token=%s' /** * 获取企业已配置的「联系我」方式 * @param configId 联系方式的配置id * @param accessToken {AccessToken} */ public static async getContactWay(configId: string, accessToken?: AccessToken) { if (!accessToken) { accessToken = await QyAccessTokenApi.getAccessToken() } let url = util.format(this.getContactWayUrl, accessToken.getAccessToken) return HttpKit.getHttpDelegate.httpPost( url, JSON.stringify({ config_id: configId }) ) } private static delContactWayUrl = 'https://qyapi.weixin.qq.com/cgi-bin/externalcontact/del_contact_way?access_token=%s' /** * 删除企业已配置的「联系我」方式 * @param configId 联系方式的配置id * @param accessToken {AccessToken} */ public static async delContactWay(configId: string, accessToken?: AccessToken) { if (!accessToken) { accessToken = await QyAccessTokenApi.getAccessToken() } let url = util.format(this.delContactWayUrl, accessToken.getAccessToken) return HttpKit.getHttpDelegate.httpPost( url, JSON.stringify({ config_id: configId }) ) } private static getUserListUrl = 'https://qyapi.weixin.qq.com/cgi-bin/externalcontact/list?access_token=%s&userid=%s' /** * 获取客户列表 * @param userId 企业成员的userid * @param accessToken {AccessToken} */ public static async getUserList(userId: string, accessToken?: AccessToken) { if (!accessToken) { accessToken = await QyAccessTokenApi.getAccessToken() } let url = util.format(this.getUserListUrl, accessToken.getAccessToken, userId) return HttpKit.getHttpDelegate.httpGet(url) } private static getUserInfoUrl = 'https://qyapi.weixin.qq.com/cgi-bin/externalcontact/get?access_token=%s&external_userid=%s' /** * 获取客户列表 * @param externalUserId 外部联系人的userid * @param accessToken {AccessToken} */ public static async getUserInfo(externalUserId: string, accessToken?: AccessToken) { if (!accessToken) { accessToken = await QyAccessTokenApi.getAccessToken() } let url = util.format(this.getUserInfoUrl, accessToken.getAccessToken, externalUserId) return HttpKit.getHttpDelegate.httpGet(url) } private static updateRemarkUrl = 'https://qyapi.weixin.qq.com/cgi-bin/externalcontact/remark?access_token=%s' /** * 修改客户备注信息 * @param userId * @param externalUserId * @param remark * @param description * @param remarkCompany * @param remarkMobiles * @param remarkPicMediaid * @param accessToken */ public static async updateRemark( userId: string, externalUserId: string, remark?: string, description?: string, remarkCompany?: string, remarkMobiles?: Array<string>, remarkPicMediaid?: string, accessToken?: AccessToken ) { if (!accessToken) { accessToken = await QyAccessTokenApi.getAccessToken() } let url = util.format(this.updateRemarkUrl, accessToken.getAccessToken, externalUserId) return HttpKit.getHttpDelegate.httpPost( url, JSON.stringify({ userid: userId, external_userid: externalUserId, remark: remark, description: description, remark_company: remarkCompany, remark_mobiles: remarkMobiles, remark_mediaid: remarkPicMediaid }) ) } private static getCorpTagListUrl = 'https://qyapi.weixin.qq.com/cgi-bin/externalcontact/get_corp_tag_list?access_token=%s' /** * 获取企业标签库 * @param tagId * @param accessToken */ public static async getCorpTagList(tagId?: Array<string>, accessToken?: AccessToken) { if (!accessToken) { accessToken = await QyAccessTokenApi.getAccessToken() } let url = util.format(this.getCorpTagListUrl, accessToken.getAccessToken) return HttpKit.getHttpDelegate.httpPost( url, JSON.stringify({ tag_id: tagId }) ) } private static addCorpTagUrl = 'https://qyapi.weixin.qq.com/cgi-bin/externalcontact/add_corp_tag?access_token=%s' /** * 添加企业客户标签 * @param groupId 标签组id * @param groupName 标签组名称 * @param order 标签组次序值 * @param tag 标签列表 * @param accessToken {AccessToken} */ public static async addCorpTag(groupId?: string, groupName?: string, order?: number, tag?: Array<{ name: string; order?: number }>, accessToken?: AccessToken) { if (!accessToken) { accessToken = await QyAccessTokenApi.getAccessToken() } let url = util.format(this.addCorpTagUrl, accessToken.getAccessToken) return HttpKit.getHttpDelegate.httpPost( url, JSON.stringify({ group_id: groupId, group_name: groupName, order: order, tag: tag }) ) } private static editCorpTagUrl = 'https://qyapi.weixin.qq.com/cgi-bin/externalcontact/edit_corp_tag?access_token=%s' /** * 编辑企业客户标签 * @param id 标签或标签组的id列表 * @param name 新的标签或标签组名称 * @param order 标签/标签组的次序值 * @param accessToken {AccessToken} */ public static async editCorpTag(id: string, name?: string, order?: number, accessToken?: AccessToken) { if (!accessToken) { accessToken = await QyAccessTokenApi.getAccessToken() } let url = util.format(this.editCorpTagUrl, accessToken.getAccessToken) return HttpKit.getHttpDelegate.httpPost( url, JSON.stringify({ id: id, name: name, order: order }) ) } private static delCorpTagUrl = 'https://qyapi.weixin.qq.com/cgi-bin/externalcontact/del_corp_tag?access_token=%s' /** * 删除企业客户标签 * @param tagId 标签的id列表 * @param groupId 标签组的id列表 * @param accessToken {AccessToken} */ public static async delCorpTag(tagId: Array<string>, groupId?: Array<string>, accessToken?: AccessToken) { if (!accessToken) { accessToken = await QyAccessTokenApi.getAccessToken() } let url = util.format(this.delCorpTagUrl, accessToken.getAccessToken) return HttpKit.getHttpDelegate.httpPost( url, JSON.stringify({ tag_id: tagId, group_id: groupId }) ) } private static markTagUrl = 'https://qyapi.weixin.qq.com/cgi-bin/externalcontact/mark_tag?access_token=%s' /** * 编辑客户企业标签 * @param userId 添加外部联系人的userid * @param externalUserId 外部联系人userid * @param addTag 要标记的标签列表 * @param removeTag 要移除的标签列表 * @param accessToken */ public static async markTag(userId: string, externalUserId: string, addTag?: Array<string>, removeTag?: Array<string>, accessToken?: AccessToken) { if (!accessToken) { accessToken = await QyAccessTokenApi.getAccessToken() } let url = util.format(this.markTagUrl, accessToken.getAccessToken) return HttpKit.getHttpDelegate.httpPost( url, JSON.stringify({ userid: userId, external_userid: externalUserId, add_tag: addTag, remove_tag: removeTag }) ) } private static getGroupChatListUrl = 'https://qyapi.weixin.qq.com/cgi-bin/externalcontact/groupchat/list?access_token=%s' /** * 获取客户群列表 * @param offset 分页,偏移量 * @param limit 分页,预期请求的数据量,取值范围 1 ~ 1000 * @param statusFilter 群状态过滤。0 - 普通列表 1 - 离职待继承 2 - 离职继承中 3 - 离职继承完成 * @param ownerFilter 群主过滤。如果不填,表示获取全部群主的数据 * @param accessToken {AccessToken} */ public static async getGroupChatList( offset: number, limit: number, statusFilter = 0, ownerFilter?: { userid_list: Array<string>; partyid_list: Array<string> }, accessToken?: AccessToken ) { if (!accessToken) { accessToken = await QyAccessTokenApi.getAccessToken() } let url = util.format(this.getGroupChatListUrl, accessToken.getAccessToken) return HttpKit.getHttpDelegate.httpPost( url, JSON.stringify({ status_filter: statusFilter, owner_filter: ownerFilter, offset: offset, limit: limit }) ) } private static getGroupChatUrl = 'https://qyapi.weixin.qq.com/cgi-bin/externalcontact/groupchat/get?access_token=%s' /** * 获取客户群详情 * @param chatId 客户群ID * @param accessToken {AccessToken} */ public static async getGroupChat(chatId: string, accessToken?: AccessToken) { if (!accessToken) { accessToken = await QyAccessTokenApi.getAccessToken() } let url = util.format(this.getGroupChatUrl, accessToken.getAccessToken) return HttpKit.getHttpDelegate.httpPost( url, JSON.stringify({ chat_id: chatId }) ) } private static addMsgTemplateUrl = 'https://qyapi.weixin.qq.com/cgi-bin/externalcontact/add_msg_template?access_token=%s' /** * 添加企业群发消息任务 * @param externalUserId 客户的外部联系人id列表,不可与sender同时为空,最多可传入1万个客户 * @param sender 发送企业群发消息的成员userid,不可与external_userid同时为空 * @param text 文本消息 * @param image 图片消息 * @param link 链接消息 * @param miniprogram 小程序消息 * @param accessToken {AccessToken} */ public static async addMsgTemplate( externalUserId?: Array<string>, sender?: string, text?: { content: string }, image?: { media_id: string }, link?: { title: string url: string picurl?: string desc?: string }, miniprogram?: { title: string pic_media_id: string appid: string page: string }, accessToken?: AccessToken ) { if (!accessToken) { accessToken = await QyAccessTokenApi.getAccessToken() } let url = util.format(this.addMsgTemplateUrl, accessToken.getAccessToken) return HttpKit.getHttpDelegate.httpPost( url, JSON.stringify({ external_userid: externalUserId, sender: sender, text: text, image: image, link: link, miniprogram: miniprogram }) ) } private static getGroupMsgResultUrl = 'https://qyapi.weixin.qq.com/cgi-bin/externalcontact/get_group_msg_result?access_token=%s' /** * 获取企业群发消息发送结果 * @param msgId 群发消息的id * @param accessToken {AccessToken} */ public static async getGroupMsgResult(msgId: string, accessToken?: AccessToken) { if (!accessToken) { accessToken = await QyAccessTokenApi.getAccessToken() } let url = util.format(this.getGroupMsgResultUrl, accessToken.getAccessToken) return HttpKit.getHttpDelegate.httpPost( url, JSON.stringify({ msgid: msgId }) ) } private static sendWelcomeMsgUrl = 'https://qyapi.weixin.qq.com/cgi-bin/externalcontact/send_welcome_msg?access_token=%s' /** * 发送新客户欢迎语 * @param welcomeCode 通过添加外部联系人事件推送给企业的发送欢迎语的凭证,有效期为20秒 * @param text 文本消息 * @param image 图片消息 * @param link 链接消息 * @param miniprogram 小程序消息 * @param accessToken {AccessToken} */ public static async sendWelcomeMsg( welcomeCode: string, text?: { content: string }, image?: { media_id: string }, link?: { title: string url: string picurl?: string desc?: string }, miniprogram?: { title: string pic_media_id: string appid: string page: string }, accessToken?: AccessToken ) { if (!accessToken) { accessToken = await QyAccessTokenApi.getAccessToken() } let url = util.format(this.sendWelcomeMsgUrl, accessToken.getAccessToken) return HttpKit.getHttpDelegate.httpPost( url, JSON.stringify({ welcome_code: welcomeCode, text: text, image: image, link: link, miniprogram: miniprogram }) ) } private static addGroupWelcomeTemplateUrl = 'https://qyapi.weixin.qq.com/cgi-bin/externalcontact/group_welcome_template/add?access_token=%s' /** * 添加群欢迎语素材 * @param text 文本消息 * @param image 图片消息 * @param link 链接消息 * @param miniprogram 小程序消息 * @param accessToken {AccessToken} */ public static async addGroupWelcomeTemplate( text?: { content: string }, image?: { media_id: string }, link?: { title: string url: string picurl?: string desc?: string }, miniprogram?: { title: string pic_media_id: string appid: string page: string }, accessToken?: AccessToken ) { if (!accessToken) { accessToken = await QyAccessTokenApi.getAccessToken() } let url = util.format(this.addGroupWelcomeTemplateUrl, accessToken.getAccessToken) return HttpKit.getHttpDelegate.httpPost( url, JSON.stringify({ text: text, image: image, link: link, miniprogram: miniprogram }) ) } private static editGroupWelcomeTemplateUrl = 'https://qyapi.weixin.qq.com/cgi-bin/externalcontact/group_welcome_template/edit?access_token=%s' /** * 编辑群欢迎语素材 * @param templateId 群欢迎语的素材id * @param text 文本消息 * @param image 图片消息 * @param link 链接消息 * @param miniprogram 小程序消息 * @param accessToken {AccessToken} */ public static async editGroupWelcomeTemplate( templateId: string, text?: { content: string }, image?: { media_id: string }, link?: { title: string url: string picurl?: string desc?: string }, miniprogram?: { title: string pic_media_id: string appid: string page: string }, accessToken?: AccessToken ) { if (!accessToken) { accessToken = await QyAccessTokenApi.getAccessToken() } let url = util.format(this.editGroupWelcomeTemplateUrl, accessToken.getAccessToken) return HttpKit.getHttpDelegate.httpPost( url, JSON.stringify({ template_id: templateId, text: text, image: image, link: link, miniprogram: miniprogram }) ) } private static getGroupWelcomeTemplateUrl = 'https://qyapi.weixin.qq.com/cgi-bin/externalcontact/group_welcome_template/get?access_token=%s' /** * 获取群欢迎语素材 * @param templateId 群欢迎语的素材id * @param accessToken {AccessToken} */ public static async getGroupWelcomeTemplate(templateId: string, accessToken?: AccessToken) { if (!accessToken) { accessToken = await QyAccessTokenApi.getAccessToken() } let url = util.format(this.getGroupWelcomeTemplateUrl, accessToken.getAccessToken) return HttpKit.getHttpDelegate.httpPost( url, JSON.stringify({ template_id: templateId }) ) } private static delGroupWelcomeTemplateUrl = 'https://qyapi.weixin.qq.com/cgi-bin/externalcontact/group_welcome_template/del?access_token=%s' /** * 获取群欢迎语素材 * @param templateId 群欢迎语的素材id * @param accessToken {AccessToken} */ public static async delGroupWelcomeTemplate(templateId: string, accessToken?: AccessToken) { if (!accessToken) { accessToken = await QyAccessTokenApi.getAccessToken() } let url = util.format(this.delGroupWelcomeTemplateUrl, accessToken.getAccessToken) return HttpKit.getHttpDelegate.httpPost( url, JSON.stringify({ template_id: templateId }) ) } private static getUnAssignedListUrl = 'https://qyapi.weixin.qq.com/cgi-bin/externalcontact/get_unassigned_list?access_token=%s' /** * 获取离职成员的客户列表 * @param pageId * @param pageSize * @param accessToken {AccessToken} */ public static async getUnAssignedList(pageId: number, pageSize: number, accessToken?: AccessToken) { if (!accessToken) { accessToken = await QyAccessTokenApi.getAccessToken() } let url = util.format(this.getUnAssignedListUrl, accessToken.getAccessToken) return HttpKit.getHttpDelegate.httpPost( url, JSON.stringify({ page_id: pageId, page_size: pageSize }) ) } private static transferContactUrl = 'https://qyapi.weixin.qq.com/cgi-bin/externalcontact/transfer?access_token=%s' /** * 离职成员的外部联系人再分配 * @param externalUserId 外部联系人的userid,注意不是企业成员的帐号 * @param handOverUserId 离职成员的userid * @param takeOverUserId 接替成员的userid * @param accessToken {AccessToken} */ public static async transferContact(externalUserId: string, handOverUserId: string, takeOverUserId: string, accessToken?: AccessToken) { if (!accessToken) { accessToken = await QyAccessTokenApi.getAccessToken() } let url = util.format(this.transferContactUrl, accessToken.getAccessToken) return HttpKit.getHttpDelegate.httpPost( url, JSON.stringify({ external_userid: externalUserId, handover_userid: handOverUserId, takeover_userid: takeOverUserId }) ) } private static transferGroupChatUrl = 'https://qyapi.weixin.qq.com/cgi-bin/externalcontact/groupchat/transfer?access_token=%s' /** * 离职成员的群再分配 * @param chatIdList 需要转群主的客户群ID列表 * @param newOwner 新群主ID * @param accessToken {AccessToken} */ public static async transferGroupChat(chatIdList: Array<string>, newOwner: string, accessToken?: AccessToken) { if (!accessToken) { accessToken = await QyAccessTokenApi.getAccessToken() } let url = util.format(this.transferGroupChatUrl, accessToken.getAccessToken) return HttpKit.getHttpDelegate.httpPost( url, JSON.stringify({ chat_id_list: chatIdList, new_owner: newOwner }) ) } private static getUserBehaviorDataUrl = 'https://qyapi.weixin.qq.com/cgi-bin/externalcontact/get_user_behavior_data?access_token=%s' /** * 获取联系客户统计数据 * @param startTime 数据起始时间 * @param endTime 数据结束时间 * @param userId 用户ID列表 * @param partyId 部门ID列表 * @param accessToken {AccessToken} */ public static async getUserBehaviorData(startTime: number, endTime: number, userId?: Array<string>, partyId?: Array<number>, accessToken?: AccessToken) { if (!accessToken) { accessToken = await QyAccessTokenApi.getAccessToken() } let url = util.format(this.getUserBehaviorDataUrl, accessToken.getAccessToken) return HttpKit.getHttpDelegate.httpPost( url, JSON.stringify({ userid: userId, partyid: partyId, start_time: startTime, end_time: endTime }) ) } private static getGroupChatStatisticUrl = 'https://qyapi.weixin.qq.com/cgi-bin/externalcontact/groupchat/statistic?access_token=%s' /** * 获取客户群统计数据 * @param dayBeginTime 开始时间,填当天开始的0分0秒(否则系统自动处理为当天的0分0秒)。取值范围:昨天至前60天 * @param ownerFilter 群主过滤,如果不填,表示获取全部群主的数据 * @param orderBy 排序方式。1 - 新增群的数量 2 - 群总数 3 - 新增群人数 4 - 群总人数 * @param orderAsc 是否升序。0-否;1-是。默认降序 * @param offset 分页,偏移量, 默认为0 * @param limit 分页,预期请求的数据量,默认为500,取值范围 1 ~ 1000 * @param accessToken {AccessToken} */ public static async getGroupChatStatistic( dayBeginTime: number, ownerFilter?: { userid_list?: Array<string> partyid_list?: Array<number> }, orderBy?: number, orderAsc?: number, offset?: number, limit?: number, accessToken?: AccessToken ) { if (!accessToken) { accessToken = await QyAccessTokenApi.getAccessToken() } let url = util.format(this.getGroupChatStatisticUrl, accessToken.getAccessToken) return HttpKit.getHttpDelegate.httpPost( url, JSON.stringify({ day_begin_time: dayBeginTime, owner_filter: ownerFilter, order_by: orderBy, order_asc: orderAsc, offset: offset, limit: limit }) ) } }
the_stack
import { PropertyFactory, BaseProperty, ArrayProperty, MapProperty, SetProperty, NodeProperty, } from "@fluid-experimental/property-properties"; import { PropertyProxy } from ".."; import { vector2DTemplate, vector3DTemplate, enumUnoDosTresTemplate, bookDataTemplate, collectionConstants, genericTemplate, } from "./testSchemas"; // --------------------- unit testing ---------------------------------- describe("JS-Object-like property accessing ", function() { let rootNode; let testProperty; // ---------- toJs()----------------------------- let state; /** * @inheritdoc */ function prepareRootNode() { // Creating custom properties testProperty = PropertyFactory.create("autodesk.appframework.tests:myGenericTemplate-1.0.0"); // Naming the custom properties (i.e. inserting them into the root node) rootNode.insert("myTestProperty", testProperty); rootNode.insert("myBook", PropertyFactory.create(bookDataTemplate.typeid, "single")); rootNode.insert("constantCollections", PropertyFactory.create(collectionConstants.typeid)); // Create an Array of NodeProperties, we should be able to create arrays of collections rootNode.insert("myGenericArray", PropertyFactory.create(vector2DTemplate.typeid, "array")); rootNode.get("myGenericArray").push(PropertyFactory.create("Int32", "array", [0, 1, 2, 3])); rootNode.get("myGenericArray").push(PropertyFactory.create("Int32", "map", { a: 0, b: 1, c: 2 })); rootNode.get("myGenericArray").push(PropertyFactory.create("NamedProperty", "set")); rootNode.get("myGenericArray").get(2).set(PropertyFactory.create("NamedProperty", "single")); rootNode.get("myGenericArray").get(2).set(PropertyFactory.create("NamedProperty", "single")); rootNode.insert("myGenericMap", PropertyFactory.create("NodeProperty", "map")); rootNode.get("myGenericMap").insert("array", PropertyFactory.create("Int32", "array", [0, 1, 2, 3])); rootNode.get("myGenericMap").insert("map", PropertyFactory.create("Int32", "map", { a: 0, b: 1, c: 2 })); rootNode.get("myGenericMap").insert("set", PropertyFactory.create("NamedProperty", "set")); rootNode.get("myGenericMap").get("set").set(PropertyFactory.create("NamedProperty", "single")); rootNode.get("myGenericMap").get("set").set(PropertyFactory.create("NamedProperty", "single")); // Calling things from PropertyProxy state = PropertyProxy.proxify(rootNode); } /** * @inheritdoc */ function prerequisite() { PropertyFactory.register(bookDataTemplate); PropertyFactory.register(vector2DTemplate); PropertyFactory.register(vector3DTemplate); PropertyFactory.register(genericTemplate); PropertyFactory.register(collectionConstants); PropertyFactory.register(enumUnoDosTresTemplate); rootNode = PropertyFactory.create("NodeProperty"); prepareRootNode(); } beforeAll(prerequisite); describe("Proxy properties directly", function() { it("should throw if something other than a property is supplied", function() { const someNonProperty = { a: 1, name: "property" }; expect(() => { PropertyProxy.proxify(someNonProperty as any); }).toThrow("PropertyProxy-000"); }); it("should return the value if a value property is supplied", function() { const value = PropertyProxy.proxify(rootNode.get("myTestProperty").get("myF32Number")); expect(typeof value).toEqual("number"); expect(value).toEqual(3); }); it("should be able to proxy Array/Map/SetProperties directly", function() { const proxiedI32Array = PropertyProxy.proxify(rootNode.resolvePath("myTestProperty.myI32Array") as ArrayProperty); expect(proxiedI32Array.length).toEqual(5); const proxiedI32Map = PropertyProxy.proxify(rootNode.resolvePath("myTestProperty.myMap") as MapProperty); expect(proxiedI32Map.size).toEqual(3); const proxiedSet = PropertyProxy.proxify(rootNode.resolvePath("myTestProperty.myBookSet") as SetProperty); expect(proxiedSet.size).toEqual(3); }); }); describe("JSON.stringify", function() { it("should not throw if called on state", function() { expect(() => { JSON.stringify(state); }).not.toThrow(); }); it("should give return {} if called on proxied Map/SetProperties", function() { expect(JSON.stringify(state.myTestProperty.myMap)).toEqual("{}"); expect(JSON.stringify(state.myTestProperty.myBookSet)).toEqual("{}"); }); }); describe("The following work as JS object: ", function() { beforeEach(function() { rootNode.remove("myTestProperty"); rootNode.insert("myTestProperty", PropertyFactory.create(genericTemplate.typeid)); }); it("The property that is registered", function() { expect(typeof (testProperty)).toEqual("object"); }); it("should be able to obtain the proxied property and its direct children via getProperty()", function() { expect(state.getProperty()).toEqual(rootNode.getRoot()); expect(state.getProperty("myTestProperty")).toEqual(rootNode.get("myTestProperty")); expect(state.getProperty(["myTestProperty"])).toEqual(rootNode.get("myTestProperty")); expect(state.getProperty("myTestProperty.myVector")).toBeUndefined(); expect(() => { state.getProperty(["myTestProperty", "myVector"]); }).toThrow("PropertyProxy-010"); }); it("should be possible to use the `in` operator", function() { expect("myF32Number" in state.myTestProperty).toEqual(true); expect(0 in state.myTestProperty.myI32Array).toEqual(true); expect("someThingThatIsNoChild" in state.myTestProperty).toEqual(false); expect( rootNode.resolvePath("myTestProperty.myI32Array").getLength() in state.myTestProperty.myI32Array).toEqual( false); }); describe("NodeProperty", function() { it("should be able to insert primitive and non-primitive properties", function() { state.myFirstPrimitivePropertyInsertedViaProxy = PropertyFactory.create("Int32", "single", 42); expect(rootNode.get("myFirstPrimitivePropertyInsertedViaProxy").getValue()).toEqual(42); state.myFirstNonPrimitivePropertyInsertedViaProxy = PropertyFactory.create( vector2DTemplate.typeid, "single", { x: 1, y: 2 }, ); expect(rootNode.get("myFirstNonPrimitivePropertyInsertedViaProxy").get("x").getValue()).toEqual(1); expect(rootNode.get("myFirstNonPrimitivePropertyInsertedViaProxy").get("y").getValue()).toEqual(2); // add a proxied property that has a parent and already is in the tree should throw expect(() => { state.mySecondNonPrimitivePropertyInsertedViaProxy = state.myFirstNonPrimitivePropertyInsertedViaProxy; }).toThrow(); // setting non-properties should not work expect(() => { state.shouldNotWork = 1; }).toThrow(); expect(() => { state.shouldNotWork2 = { a: 1 }; }).toThrow(); expect(() => { state.shouldNotWork3 = undefined; }).toThrow(); // Trying to set on non-dynamic property should not work expect(() => { state.myBook.year = PropertyFactory.create("Int32", "single", 1977); }).toThrow( "PropertyProxy-001"); }); it("should be able to delete primitive and non-primitive properties", function() { let removed = delete state.myFirstPrimitivePropertyInsertedViaProxy; expect(removed).toEqual(true); expect(rootNode.get("myFirstPrimitivePropertyInsertedViaProxy")).toBeUndefined(); removed = delete state.myFirstNonPrimitivePropertyInsertedViaProxy; expect(removed).toEqual(true); expect(rootNode.get("myFirstNonPrimitivePropertyInsertedViaProxy")).toBeUndefined(); // Trying to delete something that is not a child of a NodeProperty should throw expect(() => { delete state.myTestProperty.myVector; }).toThrow("PropertyProxy-006"); }); }); describe("ReferenceProperties", function() { beforeEach(function() { rootNode.resolvePath("myTestProperty.myReference*").setValue("myVector"); }); describe("single", function() { it("should access the referenced property", function() { expect(state.myTestProperty.myReference.getProperty()).toEqual( state.myTestProperty.myVector.getProperty()); rootNode.resolvePath("myTestProperty.myReference*").setValue("myI32Array[0]"); expect(state.myTestProperty.myReference).toEqual(0); rootNode.resolvePath("myTestProperty.myReference*").setValue("/myTestProperty.myI32Array[0]"); expect(state.myTestProperty.myReference).toEqual(0); rootNode.resolvePath("myTestProperty.myReference*").setValue("myComplexArray[0]"); expect(state.myTestProperty.myReference.getProperty()).toEqual( state.myTestProperty.myComplexArray[0].getProperty()); rootNode.resolvePath("myTestProperty.myReference*").setValue("/myTestProperty.myComplexArray[0]"); expect(state.myTestProperty.myReference.getProperty()).toEqual( state.myTestProperty.myComplexArray[0].getProperty()); }); it("should be able to resolve multi-hop references", function() { expect(state.myTestProperty.myMultiHopReference.getProperty()).toEqual( state.myTestProperty.myVector.getProperty()); }); it("should be able to change the referenced property", function() { let oldValue = rootNode.resolvePath("myTestProperty.myVector").getValues(); state.myTestProperty.myReference = { x: 7, y: 8 }; expect(rootNode.resolvePath("myTestProperty.myVector.x").getValue()).toEqual(7); expect(rootNode.resolvePath("myTestProperty.myVector.y").getValue()).toEqual(8); rootNode.resolvePath("myTestProperty.myVector").setValues(oldValue); oldValue = rootNode.resolvePath("myTestProperty.myI32Array[0]"); rootNode.resolvePath("myTestProperty.myReference*").setValue("myI32Array[0]"); state.myTestProperty.myReference = 10; expect(state.myTestProperty.myReference).toEqual(10); rootNode.resolvePath("myTestProperty.myI32Array").set(0, oldValue); rootNode.resolvePath("myTestProperty.myReference*").setValue("/myTestProperty.myI32Array[0]"); state.myTestProperty.myReference = 10; expect(state.myTestProperty.myReference).toEqual(10); rootNode.resolvePath("myTestProperty.myI32Array").set(0, oldValue); oldValue = rootNode.resolvePath("myTestProperty.myComplexArray[0]").getValues(); rootNode.resolvePath("myTestProperty.myReference*").setValue("myComplexArray[0]"); state.myTestProperty.myReference = { x: 7, y: 8 }; expect(rootNode.resolvePath("myTestProperty.myComplexArray[0].x").getValue()).toEqual(7); expect(rootNode.resolvePath("myTestProperty.myComplexArray[0].y").getValue()).toEqual(8); rootNode.resolvePath("myTestProperty.myComplexArray[0]").setValues(oldValue); rootNode.resolvePath("myTestProperty.myReference*").setValue("/myTestProperty.myComplexArray[0]"); state.myTestProperty.myReference = { x: 7, y: 8 }; expect(rootNode.resolvePath("myTestProperty.myComplexArray[0].x").getValue()).toEqual(7); expect(rootNode.resolvePath("myTestProperty.myComplexArray[0].y").getValue()).toEqual(8); rootNode.resolvePath("myTestProperty.myComplexArray[0]").setValues(oldValue); }); it("should be able to access the stored path via *", function() { expect(state.myTestProperty["myReference*"]).toEqual("myVector"); expect(state.myTestProperty["myMultiHopReference*"]).toEqual("myReference"); }); it("should be able to obtain the reference property via getProperty() from the parent", function() { expect( state.myTestProperty.getProperty(["myReference", BaseProperty.PATH_TOKENS.REF]), ).toEqual(rootNode.resolvePath("myTestProperty.myReference*")); expect( state.myTestProperty.getProperty("myReference", { referenceResolutionMode: BaseProperty.REFERENCE_RESOLUTION.NEVER }, ), ).toEqual(rootNode.resolvePath("myTestProperty.myReference*")); }); it("should be able to assign another path/property to reference another property", function() { // Relative Path state.myTestProperty["myReference*"] = "myF32Number"; expect( state.myTestProperty.getProperty("myReference"), ).toEqual(state.myTestProperty.getProperty("myF32Number")); // Complicated Relative Path state.myTestProperty["myReference*"] = "../myBook"; expect(state.myTestProperty.getProperty("myReference")).toEqual(state.getProperty("myBook")); // Property state.myTestProperty["myReference*"] = state.myTestProperty.myVector; expect(state.myTestProperty.myReference.getProperty()).toEqual( state.myTestProperty.myVector.getProperty()); // Absolute Path state.myTestProperty["myReference*"] = "/myTestProperty.myF32Number"; expect( state.myTestProperty.getProperty("myReference"), ).toEqual(state.myTestProperty.getProperty("myF32Number")); }); it("should throw if setting via * is used on a non reference property", function() { expect(() => { state.myTestProperty["myF32Number*"] = "something"; }).toThrow("PropertyProxy-008"); expect(() => { state.myTestProperty.myMap.set("firstNumber*", "something"); }).toThrow("PropertyProxy-008"); expect(() => { state.myTestProperty.myI32Array["0*"] = "something"; }).toThrow("PropertyProxy-008"); }); it("should throw if not in in the same tree and referenced via absolute path", function() { const prop = PropertyFactory.create<NodeProperty>("NodeProperty", "single"); prop.insert("ref", PropertyFactory.create("Reference", "single", "/myTestProperty.myF32Number")); const proxiedProp = PropertyProxy.proxify(prop); expect(() => { proxiedProp.ref = 5; }).toThrow("PropertyProxy-009"); rootNode.insert("prop", prop); expect(() => { proxiedProp.ref = 3; }).not.toThrow(); rootNode.remove("prop"); }); it("should throw if trying to set a non valid reference", function() { rootNode.resolvePath("myTestProperty.myReference*").setValue("relativeInvalid"); expect(() => { state.myTestProperty.myReference = 10; }).toThrow("PropertyProxy-009"); rootNode.resolvePath("myTestProperty.myReference*").setValue("/absoluteInvalid"); expect(() => { state.myTestProperty.myReference = 10; }).toThrow("PropertyProxy-009"); rootNode.resolvePath("myTestProperty.myReference*").setValue("myVector"); }); }); describe.skip("RepositoryReference", function() { it("should return property if accessed via * syntax", function() { expect(state["repoRef*"].getProperty()).toEqual(rootNode.resolvePath("repoRef*")); expect(state.repoRefArray["0*"].getProperty()).toEqual(rootNode.resolvePath("repoRefArray[0]*")); expect(state.repoRefMap.get("a*").getProperty()).toEqual(rootNode.resolvePath("repoRefMap[a]*")); }); it("should be able to access properties in the repository reference", function() { expect(state.repoRef.myTestProperty.myF32Number).toEqual(3); expect(state.repoRefArray[0].myTestProperty.myF32Number).toEqual(3); expect(state.repoRefMap.get("a").myTestProperty.myF32Number).toEqual(3); }); it("should not be able to assign something via the proxy", function() { expect(() => { state.repoRef = state.repoRef = { myTestProperty: { myVector: { x: 8, y: 7 } } }; }).toThrow(); }); }); describe("array", function() { let refArraySum = 0; let refArray; const refArrayEntriesToString: string[] = []; beforeAll(function() { refArray = rootNode.resolvePath("myTestProperty.myReferenceArray"); for (let i = 0; i < refArray.getLength(); ++i) { const entry = refArray.get(i); if (PropertyFactory.instanceOf(entry, "BaseProperty")) { if (PropertyFactory.instanceOf(entry, "ContainerProperty") && entry.has("x")) { refArraySum += entry.get("x").getValue(); } else { refArraySum += refArray.get(i).getValue(); } } else { refArraySum += entry; } } // Get ref entries value(s).toString() for (let i = 0; i < refArray.getLength(); ++i) { const entry = refArray.get(i); if (PropertyFactory.instanceOf(entry, "BaseProperty")) { if (entry.isPrimitiveType()) { refArrayEntriesToString.push(entry.getValue().toString()); } else { refArrayEntriesToString.push(entry.getValues().toString()); } } else { refArrayEntriesToString.push(entry.toString()); } } }); it("should access the referenced property", function() { // myF32Number expect(state.myTestProperty.myReferenceArray[0]).toEqual(3); expect(state.myTestProperty.myReferenceArray[1]).toEqual(3); expect(state.myTestProperty.myReferenceArray[2]).toEqual(3); // myVector expect(state.myTestProperty.myReferenceArray[3].x).toEqual(1); expect(state.myTestProperty.myReferenceArray[3].y).toEqual(2); expect(state.myTestProperty.myReferenceArray[4].x).toEqual(1); expect(state.myTestProperty.myReferenceArray[4].y).toEqual(2); expect(state.myTestProperty.myReferenceArray[5].x).toEqual(1); expect(state.myTestProperty.myReferenceArray[5].y).toEqual(2); // myI32Array[0] expect(state.myTestProperty.myReferenceArray[6]).toEqual(0); expect(state.myTestProperty.myReferenceArray[7]).toEqual(0); expect(state.myTestProperty.myReferenceArray[8]).toEqual(0); // myComplexArray[0] expect(state.myTestProperty.myReferenceArray[9].x).toEqual(1); expect(state.myTestProperty.myReferenceArray[9].y).toEqual(2); expect(state.myTestProperty.myReferenceArray[10].x).toEqual(1); expect(state.myTestProperty.myReferenceArray[10].y).toEqual(2); expect(state.myTestProperty.myReferenceArray[11].x).toEqual(1); expect(state.myTestProperty.myReferenceArray[11].y).toEqual(2); // myMap[0] expect(state.myTestProperty.myReferenceArray[12]).toEqual(1111); expect(state.myTestProperty.myReferenceArray[13]).toEqual(1111); expect(state.myTestProperty.myReferenceArray[14]).toEqual(1111); // myComplexMap[0] expect(state.myTestProperty.myReferenceArray[15].x).toEqual(10); expect(state.myTestProperty.myReferenceArray[15].y).toEqual(20); expect(state.myTestProperty.myReferenceArray[16].x).toEqual(10); expect(state.myTestProperty.myReferenceArray[16].y).toEqual(20); expect(state.myTestProperty.myReferenceArray[17].x).toEqual(10); expect(state.myTestProperty.myReferenceArray[17].y).toEqual(20); }); it("should access the referenced property in the presence of multi-hops", function() { // myF32Number expect(state.myTestProperty.myReferenceArray[18]).toEqual(3); expect(state.myTestProperty.myReferenceArray[19]).toEqual(3); expect(state.myTestProperty.myReferenceArray[20]).toEqual(3); // myVector expect(state.myTestProperty.myReferenceArray[21].x).toEqual(1); expect(state.myTestProperty.myReferenceArray[21].y).toEqual(2); expect(state.myTestProperty.myReferenceArray[22].x).toEqual(1); expect(state.myTestProperty.myReferenceArray[22].y).toEqual(2); expect(state.myTestProperty.myReferenceArray[23].x).toEqual(1); expect(state.myTestProperty.myReferenceArray[23].y).toEqual(2); // myI32Array[0] expect(state.myTestProperty.myReferenceArray[24]).toEqual(0); expect(state.myTestProperty.myReferenceArray[25]).toEqual(0); expect(state.myTestProperty.myReferenceArray[26]).toEqual(0); // myComplexArray[0] expect(state.myTestProperty.myReferenceArray[27].x).toEqual(1); expect(state.myTestProperty.myReferenceArray[27].y).toEqual(2); expect(state.myTestProperty.myReferenceArray[28].x).toEqual(1); expect(state.myTestProperty.myReferenceArray[28].y).toEqual(2); expect(state.myTestProperty.myReferenceArray[29].x).toEqual(1); expect(state.myTestProperty.myReferenceArray[29].y).toEqual(2); // myMap[0] expect(state.myTestProperty.myReferenceArray[30]).toEqual(1111); expect(state.myTestProperty.myReferenceArray[31]).toEqual(1111); expect(state.myTestProperty.myReferenceArray[32]).toEqual(1111); // myComplexMap[0] expect(state.myTestProperty.myReferenceArray[33].x).toEqual(10); expect(state.myTestProperty.myReferenceArray[33].y).toEqual(20); expect(state.myTestProperty.myReferenceArray[34].x).toEqual(10); expect(state.myTestProperty.myReferenceArray[34].y).toEqual(20); expect(state.myTestProperty.myReferenceArray[35].x).toEqual(10); expect(state.myTestProperty.myReferenceArray[35].y).toEqual(20); }); it("should be able to access stored reference path strings via *", function() { for (let i = 0; i < refArray.getLength(); ++i) { expect(state.myTestProperty.myReferenceArray[`${i}*`]).toEqual(refArray.getValue(i)); } }); it("for loop", function() { let sum = 0; for (const element of state.myTestProperty.myReferenceArray) { if (element.x) { sum += element.x; } else { sum += element; } } expect(refArraySum).toEqual(sum); }); it("for-of loop", function() { let sum = 0; for (const entry of state.myTestProperty.myReferenceArray) { if (entry.x) { sum += entry.x; } else { sum += entry; } } expect(refArraySum).toEqual(sum); }); it("check .concat() functionality", function() { const concat = state.myTestProperty.myReferenceArray.concat(state.myTestProperty.myI32Array); expect(concat.length).toEqual( refArray.getLength() + rootNode.resolvePath("myTestProperty.myI32Array").getLength()); const synthToString = `${refArrayEntriesToString.join(",") },${rootNode.resolvePath("myTestProperty.myI32Array").getEntriesReadOnly().toString()}`; expect(concat.toString()).toEqual(synthToString); }); it("check .entries() functionality", function() { const iterator = state.myTestProperty.myReferenceArray.entries(); // myFloat32Number expect(iterator.next().value[1]).toEqual(3); expect(iterator.next().value[1]).toEqual(3); expect(iterator.next().value[1]).toEqual(3); // myVector expect(iterator.next().value[1].x).toEqual(1); expect(iterator.next().value[1].x).toEqual(1); expect(iterator.next().value[1].x).toEqual(1); // myI32Array[0] expect(iterator.next().value[1]).toEqual(0); expect(iterator.next().value[1]).toEqual(0); expect(iterator.next().value[1]).toEqual(0); // myComplexArray[0] expect(iterator.next().value[1].x).toEqual(1); expect(iterator.next().value[1].x).toEqual(1); expect(iterator.next().value[1].x).toEqual(1); // myMap[firstNumber] expect(iterator.next().value[1]).toEqual(1111); expect(iterator.next().value[1]).toEqual(1111); expect(iterator.next().value[1]).toEqual(1111); // myComplexMap[firstEntry] expect(iterator.next().value[1].x).toEqual(10); expect(iterator.next().value[1].x).toEqual(10); expect(iterator.next().value[1].x).toEqual(10); }); it("check .every() functionality", function() { expect(state.myTestProperty.myReferenceArray.every((element) => { if (element.x) { return element.x <= 10; } else { return element <= 1111; } })).toEqual(true); expect(state.myTestProperty.myReferenceArray.every((element) => { if (element.x) { return element.x < 10; } else { return element < 1111; } })).toEqual(false); }); it("check .filter() functionality", function() { const filtered = state.myTestProperty.myReferenceArray.filter((element) => (element === 1111)); expect(filtered.length).toEqual(6); expect(filtered[0]).toEqual(1111); expect(filtered[1]).toEqual(1111); expect(filtered[2]).toEqual(1111); }); it("check .find() functionality", function() { expect(state.myTestProperty.myReferenceArray.find((element) => (element === 3))).toEqual(3); }); it("check .findIndex() functionality", function() { expect(state.myTestProperty.myReferenceArray.findIndex((element) => (element === 3))).toEqual(0); }); it("check .foreach() functionality", function() { rootNode.resolvePath("myTestProperty.myReferenceArray").push("myI32Array"); let referenceArraySum = 0; let numNonPrimitiveProps = 0; state.myTestProperty.myReferenceArray.forEach((el) => { if (el.getProperty && !el.getProperty().isPrimitiveType()) { numNonPrimitiveProps += 1; } if (el.getProperty && el.getProperty().getContext() === "array") { el.forEach((anotherEl) => { referenceArraySum += anotherEl; }); } }); expect(numNonPrimitiveProps).toEqual(18); expect(referenceArraySum).toEqual(100); rootNode.resolvePath("myTestProperty.myReferenceArray").pop(); }); it("check .includes() functionality", function() { expect(state.myTestProperty.myReferenceArray.includes(3)).toEqual(true); expect( state.myTestProperty.myReferenceArray.includes(rootNode.resolvePath("myTestProperty.myVector")), ).toEqual(true); expect(state.myTestProperty.myReferenceArray.includes({ x: 1, y: 2 })).toEqual(false); }); it("check .indexOf() functionality", function() { const rA = state.myTestProperty.myReferenceArray; expect(rA.indexOf(3)).toEqual(0); expect(rA.indexOf(state.myTestProperty.myVector)).toEqual(3); expect(rA.indexOf(state.myTestProperty.myComplexArray[0])).toEqual(9); }); it("check .join() functionality", function() { const joined = state.myTestProperty.myReferenceArray.join(" "); expect(joined).toEqual(refArrayEntriesToString.join(" ")); }); it("check .lastIndexOf() functionality", function() { const rA = state.myTestProperty.myReferenceArray; expect(rA.lastIndexOf(3)).toEqual(20); expect(rA.lastIndexOf(state.myTestProperty.myVector)).toEqual(23); expect(rA.lastIndexOf(state.myTestProperty.myComplexArray[0])).toEqual(29); }); it("check .map() functionality", function() { const result = state.myTestProperty.myReferenceArray.map((el) => { return (el < 1111 || el.x < 10); }); expect(result.toString()).toEqual( "true,true,true," + "true,true,true," + "true,true,true," + "true,true,true," + "false,false,false," + "false,false,false," + "true,true,true," + "true,true,true," + "true,true,true," + "true,true,true," + "false,false,false," + "false,false,false"); }); it("check .reduce() functionality", function() { expect(state.myTestProperty.myReferenceArray.reduce((accumulator, currentValue) => { return accumulator + (currentValue.x ? currentValue.x : currentValue); }, 0)).toEqual(refArraySum); }); it("check .reduceRight() functionality", function() { expect(state.myTestProperty.myReferenceArray.reduceRight((previousValue, currentValue) => { return previousValue + (currentValue.x ? currentValue.x : currentValue); }, 0)).toEqual(refArraySum); }); it("check .some() functionality", function() { expect(state.myTestProperty.myReferenceArray.some((element) => (element === 3))).toEqual(true); expect(state.myTestProperty.myReferenceArray.some((element) => (element === 4))).toEqual(false); }); it("check .toString() functionality", function() { const synthToString = refArrayEntriesToString.join(","); expect(state.myTestProperty.myReferenceArray.toString()).toEqual(synthToString); }); describe("Setting", function() { const reset = () => { rootNode.resolvePath("myTestProperty.myF32Number").setValue(3); rootNode.resolvePath("myTestProperty.myVector").setValues({ x: 1, y: 2 }); rootNode.resolvePath("myTestProperty.myI32Array").set(0, 0); rootNode.resolvePath("myTestProperty.myComplexArray").set(0, { x: 1, y: 2 }); rootNode.resolvePath("myTestProperty.myMap").set("firstNumber", 1111); rootNode.resolvePath("myTestProperty.myComplexMap[firstEntry]").setValues({ x: 10, y: 20 }); rootNode.resolvePath("myTestProperty.myReferenceArray").setValues([ "myF32Number", "../myTestProperty.myF32Number", "/myTestProperty.myF32Number", "myVector", "../myTestProperty.myVector", "/myTestProperty.myVector", "myI32Array[0]", "../myTestProperty.myI32Array[0]", "/myTestProperty.myI32Array[0]", "myComplexArray[0]", "/myTestProperty.myComplexArray[0]", "../myTestProperty.myComplexArray[0]", "myMap[firstNumber]", "../myTestProperty.myMap[firstNumber]", "/myTestProperty.myMap[firstNumber]", "myComplexMap[firstEntry]", "../myTestProperty.myComplexMap[firstEntry]", "/myTestProperty.myComplexMap[firstEntry]", "myReferenceArray[0]", "myReferenceArray[1]", "myReferenceArray[2]", "myReferenceArray[3]", "myReferenceArray[4]", "myReferenceArray[5]", "myReferenceArray[6]", "myReferenceArray[7]", "myReferenceArray[8]", "myReferenceArray[9]", "myReferenceArray[10]", "myReferenceArray[11]", "myReferenceArray[12]", "myReferenceArray[13]", "myReferenceArray[14]", "myReferenceArray[15]", "myReferenceArray[16]", "myReferenceArray[17]", ]); }; beforeEach(function() { reset(); }); it("should be able to change the referenced properties", function() { state.myTestProperty.myReferenceArray[0] = 4; expect(rootNode.resolvePath("myTestProperty.myF32Number").getValue()).toEqual(4); state.myTestProperty.myReferenceArray[1] = 5; expect(rootNode.resolvePath("myTestProperty.myF32Number").getValue()).toEqual(5); state.myTestProperty.myReferenceArray[2] = 6; expect(rootNode.resolvePath("myTestProperty.myF32Number").getValue()).toEqual(6); state.myTestProperty.myReferenceArray[3] = { x: 3, y: 4 }; expect(rootNode.resolvePath("myTestProperty.myVector.x").getValue()).toEqual(3); expect(rootNode.resolvePath("myTestProperty.myVector.y").getValue()).toEqual(4); state.myTestProperty.myReferenceArray[4] = { x: 5, y: 6 }; expect(rootNode.resolvePath("myTestProperty.myVector.x").getValue()).toEqual(5); expect(rootNode.resolvePath("myTestProperty.myVector.y").getValue()).toEqual(6); state.myTestProperty.myReferenceArray[5] = { x: 7, y: 8 }; expect(rootNode.resolvePath("myTestProperty.myVector.x").getValue()).toEqual(7); expect(rootNode.resolvePath("myTestProperty.myVector.y").getValue()).toEqual(8); state.myTestProperty.myReferenceArray[6] = 1; expect(rootNode.resolvePath("myTestProperty.myI32Array[0]")).toEqual(1); state.myTestProperty.myReferenceArray[7] = 2; expect(rootNode.resolvePath("myTestProperty.myI32Array[0]")).toEqual(2); state.myTestProperty.myReferenceArray[8] = 3; expect(rootNode.resolvePath("myTestProperty.myI32Array[0]")).toEqual(3); state.myTestProperty.myReferenceArray[9] = { x: 3, y: 4 }; expect(rootNode.resolvePath("myTestProperty.myComplexArray[0].x").getValue()).toEqual(3); expect(rootNode.resolvePath("myTestProperty.myComplexArray[0].y").getValue()).toEqual(4); state.myTestProperty.myReferenceArray[10] = { x: 5, y: 6 }; expect(rootNode.resolvePath("myTestProperty.myComplexArray[0].x").getValue()).toEqual(5); expect(rootNode.resolvePath("myTestProperty.myComplexArray[0].y").getValue()).toEqual(6); state.myTestProperty.myReferenceArray[11] = { x: 7, y: 8 }; expect(rootNode.resolvePath("myTestProperty.myComplexArray[0].x").getValue()).toEqual(7); expect(rootNode.resolvePath("myTestProperty.myComplexArray[0].y").getValue()).toEqual(8); state.myTestProperty.myReferenceArray[12] = 1; expect(rootNode.resolvePath("myTestProperty.myMap[firstNumber]")).toEqual(1); state.myTestProperty.myReferenceArray[13] = 2; expect(rootNode.resolvePath("myTestProperty.myMap[firstNumber]")).toEqual(2); state.myTestProperty.myReferenceArray[14] = 3; expect(rootNode.resolvePath("myTestProperty.myMap[firstNumber]")).toEqual(3); state.myTestProperty.myReferenceArray[15] = { x: 3, y: 4 }; expect(rootNode.resolvePath("myTestProperty.myComplexMap[firstEntry].x").getValue()).toEqual(3); expect(rootNode.resolvePath("myTestProperty.myComplexMap[firstEntry].y").getValue()).toEqual(4); state.myTestProperty.myReferenceArray[16] = { x: 5, y: 6 }; expect(rootNode.resolvePath("myTestProperty.myComplexMap[firstEntry].x").getValue()).toEqual(5); expect(rootNode.resolvePath("myTestProperty.myComplexMap[firstEntry].y").getValue()).toEqual(6); state.myTestProperty.myReferenceArray[17] = { x: 7, y: 8 }; expect(rootNode.resolvePath("myTestProperty.myComplexMap[firstEntry].x").getValue()).toEqual(7); expect(rootNode.resolvePath("myTestProperty.myComplexMap[firstEntry].y").getValue()).toEqual(8); }); it("should be able to change the referenced properties in the presence of multi-hops", function() { state.myTestProperty.myReferenceArray[18] = 4; expect(rootNode.resolvePath("myTestProperty.myF32Number").getValue()).toEqual(4); state.myTestProperty.myReferenceArray[19] = 5; expect(rootNode.resolvePath("myTestProperty.myF32Number").getValue()).toEqual(5); state.myTestProperty.myReferenceArray[20] = 6; expect(rootNode.resolvePath("myTestProperty.myF32Number").getValue()).toEqual(6); state.myTestProperty.myReferenceArray[21] = { x: 3, y: 4 }; expect(rootNode.resolvePath("myTestProperty.myVector.x").getValue()).toEqual(3); expect(rootNode.resolvePath("myTestProperty.myVector.y").getValue()).toEqual(4); state.myTestProperty.myReferenceArray[22] = { x: 5, y: 6 }; expect(rootNode.resolvePath("myTestProperty.myVector.x").getValue()).toEqual(5); expect(rootNode.resolvePath("myTestProperty.myVector.y").getValue()).toEqual(6); state.myTestProperty.myReferenceArray[23] = { x: 7, y: 8 }; expect(rootNode.resolvePath("myTestProperty.myVector.x").getValue()).toEqual(7); expect(rootNode.resolvePath("myTestProperty.myVector.y").getValue()).toEqual(8); state.myTestProperty.myReferenceArray[24] = 1; expect(rootNode.resolvePath("myTestProperty.myI32Array[0]")).toEqual(1); state.myTestProperty.myReferenceArray[25] = 2; expect(rootNode.resolvePath("myTestProperty.myI32Array[0]")).toEqual(2); state.myTestProperty.myReferenceArray[26] = 3; expect(rootNode.resolvePath("myTestProperty.myI32Array[0]")).toEqual(3); state.myTestProperty.myReferenceArray[27] = { x: 3, y: 4 }; expect(rootNode.resolvePath("myTestProperty.myComplexArray[0].x").getValue()).toEqual(3); expect(rootNode.resolvePath("myTestProperty.myComplexArray[0].y").getValue()).toEqual(4); state.myTestProperty.myReferenceArray[28] = { x: 5, y: 6 }; expect(rootNode.resolvePath("myTestProperty.myComplexArray[0].x").getValue()).toEqual(5); expect(rootNode.resolvePath("myTestProperty.myComplexArray[0].y").getValue()).toEqual(6); state.myTestProperty.myReferenceArray[29] = { x: 7, y: 8 }; expect(rootNode.resolvePath("myTestProperty.myComplexArray[0].x").getValue()).toEqual(7); expect(rootNode.resolvePath("myTestProperty.myComplexArray[0].y").getValue()).toEqual(8); state.myTestProperty.myReferenceArray[30] = 1; expect(rootNode.resolvePath("myTestProperty.myMap[firstNumber]")).toEqual(1); state.myTestProperty.myReferenceArray[31] = 2; expect(rootNode.resolvePath("myTestProperty.myMap[firstNumber]")).toEqual(2); state.myTestProperty.myReferenceArray[32] = 3; expect(rootNode.resolvePath("myTestProperty.myMap[firstNumber]")).toEqual(3); state.myTestProperty.myReferenceArray[33] = { x: 3, y: 4 }; expect(rootNode.resolvePath("myTestProperty.myComplexMap[firstEntry].x").getValue()).toEqual(3); expect(rootNode.resolvePath("myTestProperty.myComplexMap[firstEntry].y").getValue()).toEqual(4); state.myTestProperty.myReferenceArray[34] = { x: 5, y: 6 }; expect(rootNode.resolvePath("myTestProperty.myComplexMap[firstEntry].x").getValue()).toEqual(5); expect(rootNode.resolvePath("myTestProperty.myComplexMap[firstEntry].y").getValue()).toEqual(6); state.myTestProperty.myReferenceArray[35] = { x: 7, y: 8 }; expect(rootNode.resolvePath("myTestProperty.myComplexMap[firstEntry].x").getValue()).toEqual(7); expect(rootNode.resolvePath("myTestProperty.myComplexMap[firstEntry].y").getValue()).toEqual(8); }); it("should be able to assign another path/property to reference another property", function() { // Relative state.myTestProperty.myReferenceArray["0*"] = "myVector"; expect(rootNode.resolvePath("myTestProperty.myReferenceArray[0]")).toEqual( rootNode.resolvePath("myTestProperty.myVector")); // Complex Relative state.myTestProperty.myReferenceArray["0*"] = "../myBook"; expect(rootNode.resolvePath("myTestProperty.myReferenceArray[0]")).toEqual( rootNode.resolvePath("myBook")); // Property state.myTestProperty.myReferenceArray["0*"] = rootNode.resolvePath("myTestProperty.myF32Number"); expect(rootNode.resolvePath("myTestProperty.myReferenceArray[0]")).toEqual( rootNode.resolvePath("myTestProperty.myF32Number")); // Absolute Path state.myTestProperty.myReferenceArray["0*"] = "/myTestProperty.myVector"; expect(rootNode.resolvePath("myTestProperty.myReferenceArray[0]")).toEqual( rootNode.resolvePath("myTestProperty.myVector")); }); it("should be able to assign a new iterable", function() { state.myTestProperty.myReferenceArray = [ "myVector", "../myTestProperty", rootNode.resolvePath("myTestProperty.myF32Number"), "/myBook", ]; expect(rootNode.resolvePath("myTestProperty.myReferenceArray[0]")).toEqual( rootNode.resolvePath("myTestProperty.myVector")); expect(rootNode.resolvePath("myTestProperty.myReferenceArray[1]")).toEqual( rootNode.resolvePath("myTestProperty")); expect(rootNode.resolvePath("myTestProperty.myReferenceArray[2]")).toEqual( rootNode.resolvePath("myTestProperty.myF32Number")); expect(rootNode.resolvePath("myTestProperty.myReferenceArray[3]")).toEqual( rootNode.resolvePath("myBook")); }); it("check .copyWithin() functionality", function() { state.myTestProperty.myReferenceArray.copyWithin(0, 3, 4); expect(rootNode.resolvePath("myTestProperty.myReferenceArray[0]")).toEqual( rootNode.resolvePath("myTestProperty.myReferenceArray[3]")); }); it("check .fill() functionality", function() { state.myTestProperty.myReferenceArray.fill("myVector"); for (let i = 0; i < state.myTestProperty.length; ++i) { expect(rootNode.resolvePath("myTestProperty.myReferenceArray").get(i)).toEqual( rootNode.resolvePath("myTestProperty.myVector")); } }); it("check .pop() functionality", function() { const proxiedRefArray = state.myTestProperty.myReferenceArray; const popped = proxiedRefArray.pop(); expect(popped.getProperty()) .toEqual(rootNode.resolvePath("myTestProperty.myComplexMap[firstEntry]")); }); it("check .push() functionality", function() { rootNode.resolvePath("myTestProperty.myReferenceArray").clear(); state.myTestProperty.myReferenceArray.push("myVector", rootNode.resolvePath("myTestProperty.myF32Number"), "/myBook"); expect(rootNode.resolvePath("myTestProperty.myReferenceArray[0]")).toEqual( rootNode.resolvePath("myTestProperty.myVector")); expect(rootNode.resolvePath("myTestProperty.myReferenceArray[1]")).toEqual( rootNode.resolvePath("myTestProperty.myF32Number")); expect(rootNode.resolvePath("myTestProperty.myReferenceArray[2]")).toEqual( rootNode.resolvePath("myBook")); }); it("check .reverse() functionality", function() { const values = state.myTestProperty.myReferenceArray.getProperty().getValues(); state.myTestProperty.myReferenceArray.reverse(); expect(state.myTestProperty.myReferenceArray.getProperty().getValues().toString()).toEqual( values.reverse().toString()); }); it("check .sort() functionality", function() { // Primitive rootNode.resolvePath("myTestProperty.myReferenceArray").setValues([ "myI32Array[0]", "myI32Array[1]", "myI32Array[2]", "myI32Array[3]", "myI32Array[4]", ]); state.myTestProperty.myReferenceArray.sort((a, b) => (b - a)); for (let i = 0; i < state.myTestProperty.myReferenceArray.length; ++i) { expect(rootNode.resolvePath("myTestProperty.myReferenceArray").get(i)).toEqual( rootNode.resolvePath("myTestProperty.myI32Array").get( state.myTestProperty.myReferenceArray.length - 1 - i)); } // Non-Primitive rootNode.resolvePath("myTestProperty.myReferenceArray").setValues([ "myComplexArray[0]", "myComplexArray[1]", ]); state.myTestProperty.myReferenceArray.sort((a, b) => (b.x - a.x)); for (let i = 0; i < state.myTestProperty.myReferenceArray.length; ++i) { expect(rootNode.resolvePath("myTestProperty.myReferenceArray") .get(i).get("x").getValue()).toEqual( rootNode.resolvePath("myTestProperty.myComplexArray").get( state.myTestProperty.myReferenceArray.length - 1 - i).get("x").getValue()); } // Mix and multi-hops rootNode.resolvePath("myTestProperty.myReferenceArray").setValues([ "myComplexArray[1]", "myMultiHopReference", "myI32Array[0]", ]); state.myTestProperty.myReferenceArray.sort((a, b) => { if (a.x) { a = a.x; } if (b.x) { b = b.x; } return (a - b); }); expect(rootNode.resolvePath("myTestProperty.myReferenceArray") .getValue(0)).toEqual("myI32Array[0]"); expect(rootNode.resolvePath("myTestProperty.myReferenceArray") .getValue(1)).toEqual("myMultiHopReference"); expect(rootNode.resolvePath("myTestProperty.myReferenceArray") .getValue(2)).toEqual("myComplexArray[1]"); }); it("check .splice() functionality", function() { const removed = state.myTestProperty.myReferenceArray .splice(0, 6, "myI32Array[0]", "myI32Array[1]"); expect(removed.length).toEqual(6); expect(removed[0]).toEqual(rootNode.resolvePath("myTestProperty.myF32Number").getValue()); expect(removed[1]).toEqual(rootNode.resolvePath("myTestProperty.myF32Number").getValue()); expect(removed[2]).toEqual(rootNode.resolvePath("myTestProperty.myF32Number").getValue()); expect(removed[3].getProperty()).toEqual(rootNode.resolvePath("myTestProperty.myVector")); expect(removed[4].getProperty()).toEqual(rootNode.resolvePath("myTestProperty.myVector")); expect(removed[5].getProperty()).toEqual(rootNode.resolvePath("myTestProperty.myVector")); expect(rootNode.resolvePath("myTestProperty.myReferenceArray").getLength()).toEqual(32); expect(rootNode.resolvePath("myTestProperty.myReferenceArray[0]")).toEqual(0); expect(rootNode.resolvePath("myTestProperty.myReferenceArray[1]")).toEqual(10); }); it("check .shift() functionality", function() { const first = state.myTestProperty.myReferenceArray.shift(); expect(first).toEqual(rootNode.resolvePath("myTestProperty.myF32Number").getValue()); }); it("check .swap() functionality", function() { state.myTestProperty.myReferenceArray.swap(0, 3); expect(rootNode.resolvePath("myTestProperty.myReferenceArray[0]")).toEqual( rootNode.resolvePath("myTestProperty.myVector")); expect(rootNode.resolvePath("myTestProperty.myReferenceArray[3]")).toEqual( rootNode.resolvePath("myTestProperty.myF32Number")); }); it("check .unshift() functionality", function() { rootNode.resolvePath("myTestProperty.myReferenceArray").clear(); state.myTestProperty.myReferenceArray.unshift("myVector", rootNode.resolvePath("myTestProperty.myF32Number"), "/myBook", "../myTestProperty"); expect(rootNode.resolvePath("myTestProperty.myReferenceArray[0]")).toEqual( rootNode.resolvePath("myTestProperty.myVector")); expect(rootNode.resolvePath("myTestProperty.myReferenceArray[1]")).toEqual( rootNode.resolvePath("myTestProperty.myF32Number")); expect(rootNode.resolvePath("myTestProperty.myReferenceArray[2]")).toEqual( rootNode.resolvePath("myBook")); expect(rootNode.resolvePath("myTestProperty.myReferenceArray[3]")).toEqual( rootNode.resolvePath("myTestProperty")); }); it("should throw if trying to set a property referenced via an absolute path " + "if the ReferenceArray is not yet in the property tree", function() { // Property not in property tree const tempRefArray = PropertyProxy.proxify(PropertyFactory.create("Reference", "array", [ "/myTestProperty.myF32Number", ])); expect(() => { tempRefArray[0] = 100; }).toThrow(); }); it("should throw if trying to set invalid references", function() { rootNode.resolvePath("myTestProperty.myReferenceArray").clear(); rootNode.resolvePath("myTestProperty.myReferenceArray").insert(0, "relativeInvalid"); rootNode.resolvePath("myTestProperty.myReferenceArray").insert(1, "/absoluteInvalid"); expect(() => { state.myTestProperty.myReferenceArray[0] = 100; }).toThrow("PropertyProxy-009"); expect(() => { state.myTestProperty.myReferenceArray[1] = 100; }).toThrow("PropertyProxy-009"); }); }); }); describe("Map", function() { let refMapSum = 0; let refMap; beforeAll(function() { refMap = rootNode.resolvePath("myTestProperty.myReferenceMap"); const refMapIds = refMap.getIds(); for (const id of refMapIds) { const entry = refMap.get(id); if (PropertyFactory.instanceOf(entry, "BaseProperty")) { if (PropertyFactory.instanceOf(entry, "ContainerProperty") && entry.has("x")) { refMapSum += entry.get("x").getValue(); } else { refMapSum += entry.getValue(); } } else { refMapSum += entry; } } }); it("should be able to access the referenced properties", function() { const rM = state.myTestProperty.myReferenceMap; // myF32Number expect(rM.get("a")).toEqual(3); expect(rM.get("b")).toEqual(3); expect(rM.get("c")).toEqual(3); // myVector expect(rM.get("d").x).toEqual(1); expect(rM.get("d").y).toEqual(2); expect(rM.get("e").x).toEqual(1); expect(rM.get("e").y).toEqual(2); expect(rM.get("f").x).toEqual(1); expect(rM.get("f").y).toEqual(2); // myI32Array[0] expect(rM.get("g")).toEqual(0); expect(rM.get("h")).toEqual(0); expect(rM.get("i")).toEqual(0); // myComplexArray[0] expect(rM.get("j").x).toEqual(1); expect(rM.get("j").y).toEqual(2); expect(rM.get("k").x).toEqual(1); expect(rM.get("k").y).toEqual(2); expect(rM.get("l").x).toEqual(1); expect(rM.get("l").y).toEqual(2); // myMap[0] expect(rM.get("m")).toEqual(1111); expect(rM.get("n")).toEqual(1111); expect(rM.get("o")).toEqual(1111); // myComplexMap[0] expect(rM.get("p").x).toEqual(10); expect(rM.get("p").y).toEqual(20); expect(rM.get("q").x).toEqual(10); expect(rM.get("q").y).toEqual(20); expect(rM.get("r").x).toEqual(10); expect(rM.get("r").y).toEqual(20); }); it("should be able to access the referenced properties in the presence of multi-hops", function() { const rM = state.myTestProperty.myReferenceMap; // myF32Number expect(rM.get("aa")).toEqual(3); expect(rM.get("bb")).toEqual(3); expect(rM.get("cc")).toEqual(3); // myVector expect(rM.get("dd").x).toEqual(1); expect(rM.get("dd").y).toEqual(2); expect(rM.get("ee").x).toEqual(1); expect(rM.get("ee").y).toEqual(2); expect(rM.get("ff").x).toEqual(1); expect(rM.get("ff").y).toEqual(2); // myI32Array[0] expect(rM.get("gg")).toEqual(0); expect(rM.get("hh")).toEqual(0); expect(rM.get("ii")).toEqual(0); // myComplexArray[0] expect(rM.get("jj").x).toEqual(1); expect(rM.get("jj").y).toEqual(2); expect(rM.get("kk").x).toEqual(1); expect(rM.get("kk").y).toEqual(2); expect(rM.get("ll").x).toEqual(1); expect(rM.get("ll").y).toEqual(2); // myMap[0] expect(rM.get("mm")).toEqual(1111); expect(rM.get("nn")).toEqual(1111); expect(rM.get("oo")).toEqual(1111); // myComplexMap[0] expect(rM.get("pp").x).toEqual(10); expect(rM.get("pp").y).toEqual(20); expect(rM.get("qq").x).toEqual(10); expect(rM.get("qq").y).toEqual(20); expect(rM.get("rr").x).toEqual(10); expect(rM.get("rr").y).toEqual(20); }); it("should be able to access stored reference path strings via *", function() { const rM = state.myTestProperty.myReferenceMap; const refMapIds = refMap.getIds(); for (const id of refMapIds) { expect(rM.get(`${id}*`)).toEqual(refMap.getValue(id)); } }); it("check .entries() functionality", function() { const entriesIterator = state.myTestProperty.myReferenceMap.entries(); let next; // myF32Number next = entriesIterator.next().value; expect(next[0]).toEqual("a"); next = entriesIterator.next().value; expect(next[0]).toEqual("b"); next = entriesIterator.next().value; expect(next[0]).toEqual("c"); // myVector next = entriesIterator.next().value; expect(next[0]).toEqual("d"); next = entriesIterator.next().value; expect(next[0]).toEqual("e"); next = entriesIterator.next().value; expect(next[0]).toEqual("f"); // myI32Array[0] next = entriesIterator.next().value; expect(next[0]).toEqual("g"); next = entriesIterator.next().value; expect(next[0]).toEqual("h"); next = entriesIterator.next().value; expect(next[0]).toEqual("i"); // myComplexArray[0] next = entriesIterator.next().value; expect(next[0]).toEqual("j"); next = entriesIterator.next().value; expect(next[0]).toEqual("k"); next = entriesIterator.next().value; expect(next[0]).toEqual("l"); // myMap[0] next = entriesIterator.next().value; expect(next[0]).toEqual("m"); next = entriesIterator.next().value; expect(next[0]).toEqual("n"); next = entriesIterator.next().value; expect(next[0]).toEqual("o"); // myComplexMap[0] next = entriesIterator.next().value; expect(next[0]).toEqual("p"); next = entriesIterator.next().value; expect(next[0]).toEqual("q"); next = entriesIterator.next().value; expect(next[0]).toEqual("r"); next = entriesIterator.next(); let sum = 0; while (next.done !== true) { if (next.value[1].x) { sum += next.value[1].x; } else { sum += next.value[1]; } next = entriesIterator.next(); } sum += sum; expect(sum).toEqual(refMapSum); }); it("check .forEach() functionality", function() { let sum = 0; state.myTestProperty.myReferenceMap.forEach((el) => { if (el.x) { sum += el.x; } else { sum += el; } }); expect(sum).toEqual(refMapSum); }); it("check .values() functionality", function() { const valuesIterator = state.myTestProperty.myReferenceMap.values(); let sum = 0; let next = valuesIterator.next(); while (next.done !== true) { if (next.value.x) { sum += next.value.x; } else { sum += next.value; } next = valuesIterator.next(); } expect(sum).toEqual(refMapSum); }); describe("Setting", function() { const reset = () => { rootNode.resolvePath("myTestProperty.myF32Number").setValue(3); rootNode.resolvePath("myTestProperty.myVector").setValues({ x: 1, y: 2 }); rootNode.resolvePath("myTestProperty.myI32Array").set(0, 0); rootNode.resolvePath("myTestProperty.myComplexArray").set(0, { x: 1, y: 2 }); rootNode.resolvePath("myTestProperty.myMap").set("firstNumber", 1111); rootNode.resolvePath("myTestProperty.myComplexMap[firstEntry]").setValues({ x: 10, y: 20 }); rootNode.resolvePath("myTestProperty.myReferenceMap").setValues({ a: "myF32Number", b: "../myTestProperty.myF32Number", c: "/myTestProperty.myF32Number", d: "myVector", e: "../myTestProperty.myVector", f: "/myTestProperty.myVector", g: "myI32Array[0]", h: "../myTestProperty.myI32Array[0]", i: "/myTestProperty.myI32Array[0]", j: "myComplexArray[0]", k: "/myTestProperty.myComplexArray[0]", l: "../myTestProperty.myComplexArray[0]", m: "myMap[firstNumber]", n: "../myTestProperty.myMap[firstNumber]", o: "/myTestProperty.myMap[firstNumber]", p: "myComplexMap[firstEntry]", q: "../myTestProperty.myComplexMap[firstEntry]", r: "/myTestProperty.myComplexMap[firstEntry]", aa: "myReferenceMap[a]", bb: "myReferenceMap[b]", cc: "myReferenceMap[c]", dd: "myReferenceMap[d]", ee: "myReferenceMap[e]", ff: "myReferenceMap[f]", gg: "myReferenceMap[g]", hh: "myReferenceMap[h]", ii: "myReferenceMap[i]", jj: "myReferenceMap[j]", kk: "myReferenceMap[k]", ll: "myReferenceMap[l]", mm: "myReferenceMap[m]", nn: "myReferenceMap[n]", oo: "myReferenceMap[o]", pp: "myReferenceMap[p]", qq: "myReferenceMap[q]", rr: "myReferenceMap[r]", }); }; beforeEach(function() { reset(); }); it("should be able to change the referenced properties", function() { const rM = state.myTestProperty.myReferenceMap; rM.set("a", 4); expect(rootNode.resolvePath("myTestProperty.myF32Number").getValue()).toEqual(4); rM.set("b", 5); expect(rootNode.resolvePath("myTestProperty.myF32Number").getValue()).toEqual(5); rM.set("c", 6); expect(rootNode.resolvePath("myTestProperty.myF32Number").getValue()).toEqual(6); rM.set("d", { x: 3, y: 4 }); expect(rootNode.resolvePath("myTestProperty.myVector.x").getValue()).toEqual(3); expect(rootNode.resolvePath("myTestProperty.myVector.y").getValue()).toEqual(4); rM.set("e", { x: 5, y: 6 }); expect(rootNode.resolvePath("myTestProperty.myVector.x").getValue()).toEqual(5); expect(rootNode.resolvePath("myTestProperty.myVector.y").getValue()).toEqual(6); rM.set("f", { x: 7, y: 8 }); expect(rootNode.resolvePath("myTestProperty.myVector.x").getValue()).toEqual(7); expect(rootNode.resolvePath("myTestProperty.myVector.y").getValue()).toEqual(8); rM.set("g", 1); expect(rootNode.resolvePath("myTestProperty.myI32Array[0]")).toEqual(1); rM.set("h", 2); expect(rootNode.resolvePath("myTestProperty.myI32Array[0]")).toEqual(2); rM.set("i", 3); expect(rootNode.resolvePath("myTestProperty.myI32Array[0]")).toEqual(3); rM.set("j", { x: 3, y: 4 }); expect(rootNode.resolvePath("myTestProperty.myComplexArray[0].x").getValue()).toEqual(3); expect(rootNode.resolvePath("myTestProperty.myComplexArray[0].y").getValue()).toEqual(4); rM.set("k", { x: 5, y: 6 }); expect(rootNode.resolvePath("myTestProperty.myComplexArray[0].x").getValue()).toEqual(5); expect(rootNode.resolvePath("myTestProperty.myComplexArray[0].y").getValue()).toEqual(6); rM.set("l", { x: 7, y: 8 }); expect(rootNode.resolvePath("myTestProperty.myComplexArray[0].x").getValue()).toEqual(7); expect(rootNode.resolvePath("myTestProperty.myComplexArray[0].y").getValue()).toEqual(8); rM.set("m", 1); expect(rootNode.resolvePath("myTestProperty.myMap[firstNumber]")).toEqual(1); rM.set("n", 2); expect(rootNode.resolvePath("myTestProperty.myMap[firstNumber]")).toEqual(2); rM.set("o", 3); expect(rootNode.resolvePath("myTestProperty.myMap[firstNumber]")).toEqual(3); rM.set("p", { x: 3, y: 4 }); expect(rootNode.resolvePath("myTestProperty.myComplexMap[firstEntry].x").getValue()).toEqual(3); expect(rootNode.resolvePath("myTestProperty.myComplexMap[firstEntry].y").getValue()).toEqual(4); rM.set("q", { x: 5, y: 6 }); expect(rootNode.resolvePath("myTestProperty.myComplexMap[firstEntry].x").getValue()).toEqual(5); expect(rootNode.resolvePath("myTestProperty.myComplexMap[firstEntry].y").getValue()).toEqual(6); rM.set("r", { x: 7, y: 8 }); expect(rootNode.resolvePath("myTestProperty.myComplexMap[firstEntry].x").getValue()).toEqual(7); expect(rootNode.resolvePath("myTestProperty.myComplexMap[firstEntry].y").getValue()).toEqual(8); }); it("should be able to change the referenced properties in the presence of multi-hops", function() { const rM = state.myTestProperty.myReferenceMap; rM.set("aa", 4); expect(rootNode.resolvePath("myTestProperty.myF32Number").getValue()).toEqual(4); rM.set("bb", 5); expect(rootNode.resolvePath("myTestProperty.myF32Number").getValue()).toEqual(5); rM.set("cc", 6); expect(rootNode.resolvePath("myTestProperty.myF32Number").getValue()).toEqual(6); rM.set("dd", { x: 3, y: 4 }); expect(rootNode.resolvePath("myTestProperty.myVector.x").getValue()).toEqual(3); expect(rootNode.resolvePath("myTestProperty.myVector.y").getValue()).toEqual(4); rM.set("ee", { x: 5, y: 6 }); expect(rootNode.resolvePath("myTestProperty.myVector.x").getValue()).toEqual(5); expect(rootNode.resolvePath("myTestProperty.myVector.y").getValue()).toEqual(6); rM.set("ff", { x: 7, y: 8 }); expect(rootNode.resolvePath("myTestProperty.myVector.x").getValue()).toEqual(7); expect(rootNode.resolvePath("myTestProperty.myVector.y").getValue()).toEqual(8); rM.set("gg", 1); expect(rootNode.resolvePath("myTestProperty.myI32Array[0]")).toEqual(1); rM.set("hh", 2); expect(rootNode.resolvePath("myTestProperty.myI32Array[0]")).toEqual(2); rM.set("ii", 3); expect(rootNode.resolvePath("myTestProperty.myI32Array[0]")).toEqual(3); rM.set("jj", { x: 3, y: 4 }); expect(rootNode.resolvePath("myTestProperty.myComplexArray[0].x").getValue()).toEqual(3); expect(rootNode.resolvePath("myTestProperty.myComplexArray[0].y").getValue()).toEqual(4); rM.set("kk", { x: 5, y: 6 }); expect(rootNode.resolvePath("myTestProperty.myComplexArray[0].x").getValue()).toEqual(5); expect(rootNode.resolvePath("myTestProperty.myComplexArray[0].y").getValue()).toEqual(6); rM.set("ll", { x: 7, y: 8 }); expect(rootNode.resolvePath("myTestProperty.myComplexArray[0].x").getValue()).toEqual(7); expect(rootNode.resolvePath("myTestProperty.myComplexArray[0].y").getValue()).toEqual(8); rM.set("mm", 1); expect(rootNode.resolvePath("myTestProperty.myMap[firstNumber]")).toEqual(1); rM.set("nn", 2); expect(rootNode.resolvePath("myTestProperty.myMap[firstNumber]")).toEqual(2); rM.set("oo", 3); expect(rootNode.resolvePath("myTestProperty.myMap[firstNumber]")).toEqual(3); rM.set("pp", { x: 3, y: 4 }); expect(rootNode.resolvePath("myTestProperty.myComplexMap[firstEntry].x").getValue()).toEqual(3); expect(rootNode.resolvePath("myTestProperty.myComplexMap[firstEntry].y").getValue()).toEqual(4); rM.set("qq", { x: 5, y: 6 }); expect(rootNode.resolvePath("myTestProperty.myComplexMap[firstEntry].x").getValue()).toEqual(5); expect(rootNode.resolvePath("myTestProperty.myComplexMap[firstEntry].y").getValue()).toEqual(6); rM.set("rr", { x: 7, y: 8 }); expect(rootNode.resolvePath("myTestProperty.myComplexMap[firstEntry].x").getValue()).toEqual(7); expect(rootNode.resolvePath("myTestProperty.myComplexMap[firstEntry].y").getValue()).toEqual(8); }); it("should be able to assign another path/property to reference another property", function() { const rM = state.myTestProperty.myReferenceMap; // Relative rM.set("a*", "myVector"); expect(rootNode.resolvePath("myTestProperty.myReferenceMap[a]")).toEqual( rootNode.resolvePath("myTestProperty.myVector")); // Property rM.set("a*", rootNode.resolvePath("myTestProperty.myF32Number")); expect(rootNode.resolvePath("myTestProperty.myReferenceMap[a]")).toEqual( rootNode.resolvePath("myTestProperty.myF32Number")); // Absolute Path rM.set("a*", "/myTestProperty.myVector"); expect(rootNode.resolvePath("myTestProperty.myReferenceMap[a]")).toEqual( rootNode.resolvePath("myTestProperty.myVector")); }); it("should be able to assign a new iterable", function() { state.myTestProperty.myReferenceMap = [ ["a", "myVector"], ["b", rootNode.resolvePath("myTestProperty.myF32Number")], ["c", "/myBook"], ]; expect(rootNode.resolvePath("myTestProperty.myReferenceMap[a]")).toEqual( rootNode.resolvePath("myTestProperty.myVector")); expect(rootNode.resolvePath("myTestProperty.myReferenceMap[b]")).toEqual( rootNode.resolvePath("myTestProperty.myF32Number")); expect(rootNode.resolvePath("myTestProperty.myReferenceMap[c]")).toEqual( rootNode.resolvePath("myBook")); }); }); }); }); describe("Float32", function() { it("Reading Float32 number", function() { expect(state.myTestProperty.myF32Number).toEqual(3); }); it("Setting Float32 number", function() { state.myTestProperty.myF32Number = 5; expect(state.myTestProperty.myF32Number).toEqual(5); expect(rootNode.get("myTestProperty").get("myF32Number").getValue()).toEqual(5); // Property state.myTestProperty.myF32Number = PropertyFactory.create("Float32", "single", 10); expect(rootNode.resolvePath("myTestProperty.myF32Number").getValue()).toEqual(10); state.myTestProperty.myF32Number = PropertyProxy.proxify( PropertyFactory.create("Float32", "single", 11)); expect(rootNode.resolvePath("myTestProperty.myF32Number").getValue()).toEqual(11); expect(() => { state.myTestProperty.myF32Number = [1, 2, 3]; }).toThrow("PropertyProxy-007"); }); it("Obtain the property via the parent", function() { expect(state.myTestProperty.getProperty("myF32Number")).toEqual( rootNode.resolvePath("myTestProperty.myF32Number")); }); }); describe("Enum/EnumArray", function() { it("accessing", function() { expect(state.myTestProperty.myEnumCases.myEnum).toEqual(1); expect(state.myTestProperty.myEnumCases["myEnum^"]).toEqual("uno"); expect(state.myTestProperty.myEnumCases.myEnumArray[0]).toEqual(1); expect(state.myTestProperty.myEnumCases.myEnumArray["0^"]).toEqual("uno"); expect(state.myTestProperty.myEnumCases.refToEnum).toEqual(1); expect(state.myTestProperty.myEnumCases["refToEnum^"]).toEqual("uno"); expect(state.myTestProperty.myEnumCases.refToEnumArrayEntry).toEqual(2); expect(state.myTestProperty.myEnumCases["refToEnumArrayEntry^"]).toEqual("dos"); // Ref Array // ref to enum expect(state.myTestProperty.myEnumCases.refArrayToEnum[0]).toEqual(1); expect(state.myTestProperty.myEnumCases.refArrayToEnum["0^"]).toEqual("uno"); expect(state.myTestProperty.myEnumCases.refArrayToEnum[1]).toEqual(1); expect(state.myTestProperty.myEnumCases.refArrayToEnum["1^"]).toEqual("uno"); // ref to entry of enumArray expect(state.myTestProperty.myEnumCases.refArrayToEnum[2]).toEqual(1); expect(state.myTestProperty.myEnumCases.refArrayToEnum["2^"]).toEqual("uno"); expect(state.myTestProperty.myEnumCases.refArrayToEnum[3]).toEqual(1); expect(state.myTestProperty.myEnumCases.refArrayToEnum["3^"]).toEqual("uno"); // ref to ref to enum expect(state.myTestProperty.myEnumCases.refArrayToEnum[4]).toEqual(1); expect(state.myTestProperty.myEnumCases.refArrayToEnum["4^"]).toEqual("uno"); expect(state.myTestProperty.myEnumCases.refArrayToEnum[5]).toEqual(1); expect(state.myTestProperty.myEnumCases.refArrayToEnum["5^"]).toEqual("uno"); // ref to ref to entry of enumArray expect(state.myTestProperty.myEnumCases.refArrayToEnum[6]).toEqual(2); expect(state.myTestProperty.myEnumCases.refArrayToEnum["6^"]).toEqual("dos"); expect(state.myTestProperty.myEnumCases.refArrayToEnum[7]).toEqual(2); expect(state.myTestProperty.myEnumCases.refArrayToEnum["7^"]).toEqual("dos"); // Ref Map // ref to enum expect(state.myTestProperty.myEnumCases.refMapToEnum.get("a")).toEqual(1); expect(state.myTestProperty.myEnumCases.refMapToEnum.get("a^")).toEqual("uno"); expect(state.myTestProperty.myEnumCases.refMapToEnum.get("b")).toEqual(1); expect(state.myTestProperty.myEnumCases.refMapToEnum.get("b^")).toEqual("uno"); // ref to entry of enumArray expect(state.myTestProperty.myEnumCases.refMapToEnum.get("c")).toEqual(1); expect(state.myTestProperty.myEnumCases.refMapToEnum.get("c^")).toEqual("uno"); expect(state.myTestProperty.myEnumCases.refMapToEnum.get("d")).toEqual(1); expect(state.myTestProperty.myEnumCases.refMapToEnum.get("d^")).toEqual("uno"); // ref to ref to enum expect(state.myTestProperty.myEnumCases.refMapToEnum.get("e")).toEqual(1); expect(state.myTestProperty.myEnumCases.refMapToEnum.get("e^")).toEqual("uno"); expect(state.myTestProperty.myEnumCases.refMapToEnum.get("f")).toEqual(1); expect(state.myTestProperty.myEnumCases.refMapToEnum.get("f^")).toEqual("uno"); // ref to ref to entry of enumArray expect(state.myTestProperty.myEnumCases.refMapToEnum.get("g")).toEqual(2); expect(state.myTestProperty.myEnumCases.refMapToEnum.get("g^")).toEqual("dos"); expect(state.myTestProperty.myEnumCases.refMapToEnum.get("h")).toEqual(2); expect(state.myTestProperty.myEnumCases.refMapToEnum.get("h^")).toEqual("dos"); }); it("setting", function() { state.myTestProperty.myEnumCases.myEnum = 2; expect(rootNode.resolvePath("myTestProperty.myEnumCases.myEnum").getValue()).toEqual(2); expect(rootNode.resolvePath("myTestProperty.myEnumCases.myEnum").getEnumString()).toEqual("dos"); state.myTestProperty.myEnumCases.myEnum = "tres"; expect(rootNode.resolvePath("myTestProperty.myEnumCases.myEnum").getValue()).toEqual(3); expect(rootNode.resolvePath("myTestProperty.myEnumCases.myEnum").getEnumString()).toEqual("tres"); expect(() => { state.myTestProperty.myEnumCases.myEnum = "notAValidEnumString"; }).toThrow( ); expect(() => { state.myTestProperty.myEnumCases.myEnum = "100"; }).toThrow(); state.myTestProperty.myEnumCases.myEnumArray = ["dos", 1]; expect(rootNode.resolvePath("myTestProperty.myEnumCases.myEnumArray").get(0)).toEqual(2); expect(rootNode.resolvePath("myTestProperty.myEnumCases.myEnumArray").get(1)).toEqual(1); expect(state.myTestProperty.myEnumCases.myEnumArray.pop()).toEqual(1); expect(state.myTestProperty.myEnumCases.myEnumArray.shift()).toEqual(2); state.myTestProperty.myEnumCases.myEnumArray.push(2); expect(rootNode.resolvePath("myTestProperty.myEnumCases.myEnumArray").get(0)).toEqual(2); state.myTestProperty.myEnumCases.myEnumArray.unshift("uno"); expect(rootNode.resolvePath("myTestProperty.myEnumCases.myEnumArray").get(0)).toEqual(1); }); }); describe("(U)int64/(U)int64Array/(U)int64Map", function() { it("accessing", function() { const uint64Value = rootNode.resolvePath("myTestProperty.myUint64Int64Cases.myUint64").getValue(); const valueOfUint64ArrayAtZero = rootNode.resolvePath("myTestProperty.myUint64Int64Cases.myUint64Array[0]"); const valueOfInt64MapAtA = rootNode.resolvePath("myTestProperty.myUint64Int64Cases.myInt64Map[a]"); const stringVal = "4294967296"; expect(state.myTestProperty.myUint64Int64Cases.myUint64).toEqual(uint64Value); expect(state.myTestProperty.myUint64Int64Cases["myUint64^"]).toEqual(stringVal); expect(state.myTestProperty.myUint64Int64Cases.myUint64Array[0]).toEqual(valueOfUint64ArrayAtZero); expect(state.myTestProperty.myUint64Int64Cases.myUint64Array["0^"]).toEqual(stringVal); expect(state.myTestProperty.myUint64Int64Cases.myInt64Map.get("a")).toEqual(valueOfInt64MapAtA); expect(state.myTestProperty.myUint64Int64Cases.myInt64Map.get("a^")).toEqual(stringVal); expect(state.myTestProperty.myUint64Int64Cases.refToUint64).toEqual(uint64Value); expect(state.myTestProperty.myUint64Int64Cases["refToUint64^"]).toEqual(stringVal); expect(state.myTestProperty.myUint64Int64Cases.refToUint64ArrayEntry).toEqual(valueOfUint64ArrayAtZero); expect(state.myTestProperty.myUint64Int64Cases["refToUint64ArrayEntry^"]).toEqual(stringVal); expect(state.myTestProperty.myUint64Int64Cases.refToInt64MapEntry).toEqual(valueOfInt64MapAtA); expect(state.myTestProperty.myUint64Int64Cases["refToInt64MapEntry^"]).toEqual(stringVal); // Ref Array // ref to Uint64 expect(state.myTestProperty.myUint64Int64Cases.refArrayToUint64Int64[0]).toEqual(uint64Value); expect(state.myTestProperty.myUint64Int64Cases.refArrayToUint64Int64["0^"]).toEqual(stringVal); expect(state.myTestProperty.myUint64Int64Cases.refArrayToUint64Int64[1]).toEqual(uint64Value); expect(state.myTestProperty.myUint64Int64Cases.refArrayToUint64Int64["1^"]).toEqual(stringVal); // ref to entry of Uint64Array expect(state.myTestProperty.myUint64Int64Cases.refArrayToUint64Int64[2]) .toEqual(valueOfUint64ArrayAtZero); expect(state.myTestProperty.myUint64Int64Cases.refArrayToUint64Int64["2^"]).toEqual(stringVal); expect(state.myTestProperty.myUint64Int64Cases.refArrayToUint64Int64[3]) .toEqual(valueOfUint64ArrayAtZero); expect(state.myTestProperty.myUint64Int64Cases.refArrayToUint64Int64["3^"]).toEqual(stringVal); // ref to entry of Int64Map expect(state.myTestProperty.myUint64Int64Cases.refArrayToUint64Int64[4]).toEqual(valueOfInt64MapAtA); expect(state.myTestProperty.myUint64Int64Cases.refArrayToUint64Int64["4^"]).toEqual(stringVal); expect(state.myTestProperty.myUint64Int64Cases.refArrayToUint64Int64[5]).toEqual(valueOfInt64MapAtA); expect(state.myTestProperty.myUint64Int64Cases.refArrayToUint64Int64["5^"]).toEqual(stringVal); // ref to ref to Uint64 expect(state.myTestProperty.myUint64Int64Cases.refArrayToUint64Int64[6]).toEqual(uint64Value); expect(state.myTestProperty.myUint64Int64Cases.refArrayToUint64Int64["6^"]).toEqual(stringVal); expect(state.myTestProperty.myUint64Int64Cases.refArrayToUint64Int64[7]).toEqual(uint64Value); expect(state.myTestProperty.myUint64Int64Cases.refArrayToUint64Int64["7^"]).toEqual(stringVal); // ref to ref to entry of Uint64Array expect(state.myTestProperty.myUint64Int64Cases.refArrayToUint64Int64[8]) .toEqual(valueOfUint64ArrayAtZero); expect(state.myTestProperty.myUint64Int64Cases.refArrayToUint64Int64["8^"]).toEqual(stringVal); expect(state.myTestProperty.myUint64Int64Cases.refArrayToUint64Int64[9]) .toEqual(valueOfUint64ArrayAtZero); expect(state.myTestProperty.myUint64Int64Cases.refArrayToUint64Int64["9^"]).toEqual(stringVal); // ref to ref to entry of Int64Map expect(state.myTestProperty.myUint64Int64Cases.refArrayToUint64Int64[10]).toEqual(valueOfInt64MapAtA); expect(state.myTestProperty.myUint64Int64Cases.refArrayToUint64Int64["10^"]).toEqual(stringVal); expect(state.myTestProperty.myUint64Int64Cases.refArrayToUint64Int64[11]).toEqual(valueOfInt64MapAtA); expect(state.myTestProperty.myUint64Int64Cases.refArrayToUint64Int64["11^"]).toEqual(stringVal); // Ref Map // ref to Uint64 expect(state.myTestProperty.myUint64Int64Cases.refMapToUint64Int64.get("a")).toEqual(uint64Value); expect(state.myTestProperty.myUint64Int64Cases.refMapToUint64Int64.get("a^")).toEqual(stringVal); expect(state.myTestProperty.myUint64Int64Cases.refMapToUint64Int64.get("b")).toEqual(uint64Value); expect(state.myTestProperty.myUint64Int64Cases.refMapToUint64Int64.get("b^")).toEqual(stringVal); // ref to entry of Uint64Array expect(state.myTestProperty.myUint64Int64Cases.refMapToUint64Int64 .get("c")).toEqual(valueOfUint64ArrayAtZero); expect(state.myTestProperty.myUint64Int64Cases.refMapToUint64Int64.get("c^")).toEqual(stringVal); expect(state.myTestProperty.myUint64Int64Cases.refMapToUint64Int64 .get("d")).toEqual(valueOfUint64ArrayAtZero); expect(state.myTestProperty.myUint64Int64Cases.refMapToUint64Int64.get("d^")).toEqual(stringVal); // ref to entry of Int64Map expect(state.myTestProperty.myUint64Int64Cases.refMapToUint64Int64 .get("e")).toEqual(valueOfInt64MapAtA); expect(state.myTestProperty.myUint64Int64Cases.refMapToUint64Int64.get("e^")).toEqual(stringVal); expect(state.myTestProperty.myUint64Int64Cases.refMapToUint64Int64 .get("f")).toEqual(valueOfInt64MapAtA); expect(state.myTestProperty.myUint64Int64Cases.refMapToUint64Int64.get("f^")).toEqual(stringVal); // ref to ref to Uint64 expect(state.myTestProperty.myUint64Int64Cases.refMapToUint64Int64.get("g")).toEqual(uint64Value); expect(state.myTestProperty.myUint64Int64Cases.refMapToUint64Int64.get("g^")).toEqual(stringVal); expect(state.myTestProperty.myUint64Int64Cases.refMapToUint64Int64.get("h")).toEqual(uint64Value); expect(state.myTestProperty.myUint64Int64Cases.refMapToUint64Int64.get("h^")).toEqual(stringVal); // ref to ref to entry of Uint64Array expect(state.myTestProperty.myUint64Int64Cases.refMapToUint64Int64 .get("i")).toEqual(valueOfUint64ArrayAtZero); expect(state.myTestProperty.myUint64Int64Cases.refMapToUint64Int64.get("i^")).toEqual(stringVal); expect(state.myTestProperty.myUint64Int64Cases.refMapToUint64Int64 .get("j")).toEqual(valueOfUint64ArrayAtZero); expect(state.myTestProperty.myUint64Int64Cases.refMapToUint64Int64.get("j^")).toEqual(stringVal); // ref to ref to entry of Int64Map expect(state.myTestProperty.myUint64Int64Cases.refMapToUint64Int64 .get("k")).toEqual(valueOfInt64MapAtA); expect(state.myTestProperty.myUint64Int64Cases.refMapToUint64Int64.get("k^")).toEqual(stringVal); expect(state.myTestProperty.myUint64Int64Cases.refMapToUint64Int64 .get("l")).toEqual(valueOfInt64MapAtA); expect(state.myTestProperty.myUint64Int64Cases.refMapToUint64Int64.get("l^")).toEqual(stringVal); }); it("setting", function() { state.myTestProperty.myUint64Int64Cases.myUint64 = 1024; expect( rootNode.resolvePath("myTestProperty.myUint64Int64Cases.myUint64").getValue().getValueLow(), ).toEqual(1024); state.myTestProperty.myUint64Int64Cases.myUint64 = "4294967296"; expect( rootNode.resolvePath("myTestProperty.myUint64Int64Cases.myUint64").getValue().getValueHigh(), ).toEqual(1); // state.myTestProperty.myUint64Int64Cases.myEnumArray = ['dos', 1]; // rootNode.resolvePath('myTestProperty.myUint64Int64Cases.myEnumArray').get(0).should.equal(2); // rootNode.resolvePath('myTestProperty.myUint64Int64Cases.myEnumArray').get(1).should.equal(1); // state.myTestProperty.myUint64Int64Cases.myEnumArray.pop().should.equal(1); // state.myTestProperty.myUint64Int64Cases.myEnumArray.shift().should.equal(2); // state.myTestProperty.myUint64Int64Cases.myEnumArray.push(2); // rootNode.resolvePath('myTestProperty.myUint64Int64Cases.myEnumArray').get(0).should.equal(2); // state.myTestProperty.myUint64Int64Cases.myEnumArray.unshift('uno'); // rootNode.resolvePath('myTestProperty.myUint64Int64Cases.myEnumArray').get(0).should.equal(1); }); }); describe("Simple non primitive type (Vector)", function() { it("Reading", function() { expect(state.myTestProperty.myVector.x).toEqual(1); expect(state.myTestProperty.myVector.y).toEqual(2); }); it("Setting", function() { state.myTestProperty.myVector = { x: 3, y: 4 }; expect(rootNode.get("myTestProperty").get("myVector").get("x").getValue()).toEqual(3); expect(rootNode.get("myTestProperty").get("myVector").get("y").getValue()).toEqual(4); // Property state.myTestProperty.myVector = PropertyFactory.create(vector2DTemplate.typeid, "single", { x: 5, y: 6 }); expect(rootNode.get("myTestProperty").get("myVector").get("x").getValue()).toEqual(5); expect(rootNode.get("myTestProperty").get("myVector").get("y").getValue()).toEqual(6); state.myTestProperty.myVector = PropertyProxy.proxify( PropertyFactory.create(vector2DTemplate.typeid, "single", { x: 7, y: 8 })); expect(rootNode.get("myTestProperty").get("myVector").get("x").getValue()).toEqual(7); expect(rootNode.get("myTestProperty").get("myVector").get("y").getValue()).toEqual(8); expect(() => { state.myTestProperty.myF32Number = [{ x: 1, y: 2 }]; }).toThrow("PropertyProxy-007"); }); }); describe("Array (with primitive type entries)", function() { describe("Int32 array", function() { it("Reading from Array by directly accessing via indices", function() { expect(state.myTestProperty.myI32Array[0]).toEqual(0); expect(state.myTestProperty.myI32Array[1]).toEqual(10); expect(state.myTestProperty.myI32Array[2]).toEqual(20); }); it("Reading array length using .length", function() { expect(typeof (state.myTestProperty.myI32Array.length)).toEqual("number"); expect( state.myTestProperty.myI32Array.length, ).toEqual(rootNode.get("myTestProperty").get("myI32Array").getLength()); }); it("Looping through the array indices using a for loop", function() { const testArray = state.myTestProperty.myI32Array; const tempArray: any[] = []; // The array is defined as [0,10,20,30,...] // eslint-disable-next-line @typescript-eslint/prefer-for-of for (let i = 0; i < testArray.length; i++) { tempArray.push(testArray[i]); } expect(tempArray[0]).toEqual(testArray[0]); expect(tempArray[1]).toEqual(testArray[1]); expect(tempArray[2]).toEqual(testArray[2]); expect(tempArray[3]).toEqual(testArray[3]); }); it("Looping through the array entries using a for-of loop", function() { const testArray = state.myTestProperty.myI32Array; // The array is defined as [0,10,20,30,...] const tempArray: any[] = []; for (const entry of testArray) { tempArray.push(entry); } expect(tempArray[0]).toEqual(0); expect(tempArray[1]).toEqual(10); expect(tempArray[2]).toEqual(20); expect(tempArray[3]).toEqual(30); expect(tempArray[4]).toEqual(40); }); it("Looping through the array indices using a for-in loop", function() { const testArray = state.myTestProperty.myI32Array; // The array is defined as [0,1,2,3,...] const tempArray: string[] = []; // eslint-disable-next-line no-restricted-syntax for (const key in testArray) { // eslint-disable-line guard-for-in tempArray.push(key); } expect(tempArray.length).toEqual(testArray.length); expect(tempArray[0]).toEqual("0"); expect(tempArray[1]).toEqual("1"); expect(tempArray[2]).toEqual("2"); expect(tempArray[3]).toEqual("3"); expect(tempArray[4]).toEqual("4"); }); it("Proxy on array property should have a type of JS array", function() { const testArray = state.myTestProperty.myI32Array; expect(testArray instanceof Array).toEqual(true); expect(Array.isArray(testArray)).toEqual(true); }); it("check .concat() functionality", function() { const concat = state.myTestProperty.myI32Array .concat(["a", "b", "c"], state.myTestProperty.myI32Array); expect(concat.toString()).toEqual("0,10,20,30,40,a,b,c,0,10,20,30,40"); }); it("check .entries() functionality", function() { const iterator = state.myTestProperty.myI32Array.entries(); expect(iterator.next().value.toString()).toEqual("0,0"); expect(iterator.next().value.toString()).toEqual("1,10"); expect(iterator.next().value.toString()).toEqual("2,20"); expect(iterator.next().value.toString()).toEqual("3,30"); expect(iterator.next().value.toString()).toEqual("4,40"); expect(iterator.next().done).toEqual(true); }); it("check .every() functionality", function() { expect(state.myTestProperty.myI32Array.every((element) => (element < 50))).toEqual(true); expect(state.myTestProperty.myI32Array.every((element) => (element < 20))).toEqual(false); }); it("check .filter() functionality", function() { const filtered = state.myTestProperty.myI32Array.filter((element) => (element < 20)); expect(filtered.length).toEqual(2); expect(Object.getPrototypeOf(filtered)).toEqual(Object.getPrototypeOf([])); }); it("check .find() functionality", function() { expect(state.myTestProperty.myI32Array .find((element) => (element < 15 && element > 5))).toEqual(10); }); it("check .findIndex() functionality", function() { expect(state.myTestProperty.myI32Array. findIndex((element) => (element < 15 && element > 5))).toEqual(1); }); it("check .foreach() functionality", function() { const testArray = state.myTestProperty.myI32Array; const tempArray: number[] = []; const squareIt = function(element) { tempArray.push(element * element); }; testArray.forEach(squareIt); expect(tempArray[0]).toEqual(testArray[0] * testArray[0]); expect(tempArray[1]).toEqual(testArray[1] * testArray[1]); expect(tempArray[2]).toEqual(testArray[2] * testArray[2]); expect(tempArray[3]).toEqual(testArray[3] * testArray[3]); expect(tempArray[4]).toEqual(testArray[4] * testArray[4]); }); it("check .includes() functionality", function() { expect(state.myTestProperty.myI32Array.includes(20)).toEqual(true); expect(state.myTestProperty.myI32Array.includes(60)).toEqual(false); }); it("check .indexOf() functionality", function() { const testArray = state.myTestProperty.myI32Array; expect(testArray.indexOf(0)).toEqual(0); expect(testArray.indexOf(10)).toEqual(1); expect(testArray.indexOf(20)).toEqual(2); expect(testArray.indexOf(30)).toEqual(3); }); it("check .join() functionality", function() { expect(state.myTestProperty.myI32Array.join(" ")).toEqual("0 10 20 30 40"); }); it("check .keys() functionality", function() { const iterator = state.myTestProperty.myI32Array.keys(); expect(iterator.next().value).toEqual(0); expect(iterator.next().value).toEqual(1); expect(iterator.next().value).toEqual(2); expect(iterator.next().value).toEqual(3); expect(iterator.next().value).toEqual(4); expect(iterator.next().done).toEqual(true); }); it("check .lastIndexOf() functionality", function() { expect(state.myTestProperty.myI32Array.lastIndexOf(30)).toEqual(3); expect(state.myTestProperty.myI32Array.lastIndexOf(30, -3)).toEqual(-1); }); it("check .map() functionality", function() { const mapped = state.myTestProperty.myI32Array.map((element) => element * 2); expect(mapped.toString()).toEqual("0,20,40,60,80"); expect(Object.getPrototypeOf(mapped)).toEqual(Object.getPrototypeOf([])); }); it("check .reduce() functionality", function() { expect( state.myTestProperty.myI32Array.reduce( (accumulator, currentValue) => { return accumulator + currentValue; }, ), ).toEqual(100); expect( state.myTestProperty.myI32Array.reduce( (accumulator, currentValue) => { return accumulator + currentValue; }, -100, ), ).toEqual(0); }); it("check .reduceRight() functionality", function() { expect( state.myTestProperty.myI32Array.reduceRight( (previousValue, currentValue) => { return previousValue + currentValue; }, ), ).toEqual(100); expect( state.myTestProperty.myI32Array.reduceRight( (previousValue, currentValue) => { return previousValue + currentValue; }, -100, ), ).toEqual(0); }); it("check .some() functionality", function() { expect(state.myTestProperty.myI32Array.some((element) => (element > 10))).toEqual(true); expect(state.myTestProperty.myI32Array.some((element) => (element > 50))).toEqual(false); }); it("check .toString() functionality", function() { const testArray = state.myTestProperty.myI32Array; expect(testArray.toString()).toEqual("0,10,20,30,40"); }); it("check .values() functionality", function() { const iterator = state.myTestProperty.myI32Array.values(); expect(iterator.next().value).toEqual(0); expect(iterator.next().value).toEqual(10); expect(iterator.next().value).toEqual(20); expect(iterator.next().value).toEqual(30); expect(iterator.next().value).toEqual(40); expect(iterator.next().done).toEqual(true); }); it("should have the keys detectable through Object.keys() function", function() { const testArray = state.myTestProperty.myI32Array; const tempArray = Object.keys(testArray); expect(tempArray[0]).toEqual("0"); expect(tempArray[1]).toEqual("1"); expect(tempArray[2]).toEqual("2"); expect(tempArray[3]).toEqual("3"); expect(tempArray[4]).toEqual("4"); }); }); describe("Setting", function() { afterEach(function() { rootNode.get("myTestProperty").get("myI32Array").clear(); rootNode.get("myTestProperty").get("myI32Array").insertRange(0, [0, 10, 20, 30, 40]); }); it("should set via direct access", function() { state.myTestProperty.myI32Array[0] = 1; expect(state.myTestProperty.myI32Array[0]).toEqual(1); expect(rootNode.get("myTestProperty").get("myI32Array").get("0")).toEqual(1); expect(() => { state.myTestProperty.myI32Array[0] = [1, 2, 3]; }).toThrow("PropertyProxy-007"); }); it("should set an element out of range", function() { // Setting and element out of range state.myTestProperty.myI32Array[10] = 100; expect(rootNode.get("myTestProperty").get("myI32Array").getLength()).toEqual(11); expect(rootNode.get("myTestProperty").get("myI32Array").get("10")).toEqual(100); }); it("check .copyWithin() functionality", function() { state.myTestProperty.myI32Array.copyWithin(0, 3, 4); expect(rootNode.get("myTestProperty") .get("myI32Array").getValues().toString()).toEqual("30,10,20,30,40"); }); it("check .fill() functionality", function() { state.myTestProperty.myI32Array.fill(0); for (let i = 0; i < state.myTestProperty.myI32Array.length; i++) { expect(rootNode.get("myTestProperty").get("myI32Array").get(i.toString())).toEqual(0); } state.myTestProperty.myI32Array[0] = 0; state.myTestProperty.myI32Array[1] = 10; state.myTestProperty.myI32Array[2] = 20; state.myTestProperty.myI32Array[3] = 30; state.myTestProperty.myI32Array[4] = 40; }); it("check pop() functionality", function() { const popped = state.myTestProperty.myI32Array.pop(); expect(popped).toEqual(40); expect(state.myTestProperty.myI32Array.length).toEqual(4); expect(rootNode.get("myTestProperty").get("myI32Array").getLength()).toEqual(4); }); it("check .push() functionality", function() { const testArray = state.myTestProperty.myI32Array; expect(testArray.push(50)).toEqual(6); expect(testArray.length).toEqual(6); expect(testArray[5]).toEqual(50); expect(rootNode.get("myTestProperty").get("myI32Array").getLength()).toEqual(6); expect(rootNode.get("myTestProperty").get("myI32Array").get("5")).toEqual(50); // multiple elements expect(state.myTestProperty.myI32Array.push(60, 70)).toEqual(8); expect(rootNode.get("myTestProperty").get("myI32Array").getLength()).toEqual(8); expect(rootNode.get("myTestProperty").get("myI32Array").get("6")).toEqual(60); expect(rootNode.get("myTestProperty").get("myI32Array").get("7")).toEqual(70); // (proxied) property state.myTestProperty.myI32Array.push(PropertyFactory.create("Int32", "single", 80)); expect(rootNode.get("myTestProperty").get("myI32Array").getLength()).toEqual(9); expect(rootNode.get("myTestProperty").get("myI32Array").get("8")).toEqual(80); const proxied = PropertyProxy.proxify(PropertyFactory.create("Int32", "single", 90)); state.myTestProperty.myI32Array.push(proxied); expect(rootNode.get("myTestProperty").get("myI32Array").getLength()).toEqual(10); expect(rootNode.get("myTestProperty").get("myI32Array").get("9")).toEqual(90); state.myTestProperty.myI32Array.push( PropertyFactory.create("Int32", "single", 100), PropertyFactory.create("Int32", "single", 110), ); expect(rootNode.get("myTestProperty").get("myI32Array").getLength()).toEqual(12); expect(rootNode.get("myTestProperty").get("myI32Array").get("10")).toEqual(100); expect(rootNode.get("myTestProperty").get("myI32Array").get("11")).toEqual(110); expect(() => state.myTestProperty.myI32Array.push([1, 2, 3])).toThrow("PropertyProxy-002"); }); it("check .reverse() functionality", function() { state.myTestProperty.myI32Array.reverse(); expect(rootNode.resolvePath("myTestProperty.myI32Array") .getValues().toString()).toEqual("40,30,20,10,0"); }); it("check .shift() functionality", function() { const oldLength = state.myTestProperty.myI32Array.length; const first = state.myTestProperty.myI32Array.shift(); expect(first).toEqual(0); expect(state.myTestProperty.myI32Array.length).toEqual(oldLength - 1); expect(rootNode.resolvePath("myTestProperty.myI32Array").getLength()).toEqual(oldLength - 1); expect(rootNode.resolvePath("myTestProperty.myI32Array[0]")).toEqual(10); }); it("check .sort() functionality", function() { state.myTestProperty.myI32Array.sort((a, b) => b - a); expect(rootNode.resolvePath("myTestProperty.myI32Array") .getValues().toString()).toEqual("40,30,20,10,0"); }); it("check .splice() functionality", function() { const oldLength = state.myTestProperty.myI32Array.length; // Replace first element state.myTestProperty.myI32Array.splice(0, 1, 0); expect(state.myTestProperty.myI32Array.length).toEqual(oldLength); expect(rootNode.resolvePath("myTestProperty.myI32Array").getLength()).toEqual(oldLength); // Add some elements state.myTestProperty.myI32Array.splice(5, 0, 50, 60, 70); expect(state.myTestProperty.myI32Array.length).toEqual(oldLength + 3); expect(rootNode.resolvePath("myTestProperty.myI32Array").getLength()).toEqual(oldLength + 3); expect(rootNode.resolvePath("myTestProperty.myI32Array").getValues().toString()).toEqual( "0,10,20,30,40,50,60,70", ); // Remove added elements let removed = state.myTestProperty.myI32Array.splice(5, 3); expect(removed[0]).toEqual(50); expect(removed[1]).toEqual(60); expect(removed[2]).toEqual(70); expect(state.myTestProperty.myI32Array.length).toEqual(oldLength); expect(rootNode.resolvePath("myTestProperty.myI32Array").getLength()).toEqual(oldLength); expect(rootNode.resolvePath("myTestProperty.myI32Array") .getValues().toString()).toEqual("0,10,20,30,40"); // Re-add elements state.myTestProperty.myI32Array.splice(5, 0, 50, 60, 70); // Remove with negative index removed = state.myTestProperty.myI32Array.splice(-3, 3); expect(removed[0]).toEqual(50); expect(removed[1]).toEqual(60); expect(removed[2]).toEqual(70); expect(state.myTestProperty.myI32Array.length).toEqual(oldLength); expect(rootNode.resolvePath("myTestProperty.myI32Array").getLength()).toEqual(oldLength); // Re-add elements state.myTestProperty.myI32Array.splice(5, 0, 50, 60, 70); // Remove with negative index removed = state.myTestProperty.myI32Array.splice(5); expect(removed[0]).toEqual(50); expect(removed[1]).toEqual(60); expect(removed[2]).toEqual(70); expect(state.myTestProperty.myI32Array.length).toEqual(oldLength); expect(rootNode.resolvePath("myTestProperty.myI32Array").getLength()).toEqual(oldLength); // Add (proxied) properties state.myTestProperty.myI32Array.splice(5, 0, PropertyProxy.proxify(PropertyFactory.create("Int32", "single", 50)), PropertyFactory.create("Int32", "single", 60), ); expect(rootNode.resolvePath("myTestProperty.myI32Array").getLength()).toEqual(oldLength + 2); expect(rootNode.resolvePath("myTestProperty.myI32Array").getValues().toString()).toEqual( "0,10,20,30,40,50,60", ); expect(() => state.myTestProperty.myI32Array.splice(1, 0, [1, 2, 3])).toThrow("PropertyProxy-002"); }); it("check .unshift() functionality", function() { const oldLength = state.myTestProperty.myI32Array.length; state.myTestProperty.myI32Array.unshift(-10); expect(state.myTestProperty.myI32Array.length).toEqual(oldLength + 1); expect(rootNode.resolvePath("myTestProperty.myI32Array").getLength()).toEqual(oldLength + 1); expect(rootNode.resolvePath("myTestProperty.myI32Array[0]")).toEqual(-10); // Add (proxied) properties state.myTestProperty.myI32Array.unshift( PropertyProxy.proxify(PropertyFactory.create("Int32", "single", -20)), PropertyFactory.create("Int32", "single", -30), ); expect(rootNode.resolvePath("myTestProperty.myI32Array").getLength()).toEqual(oldLength + 3); expect(rootNode.resolvePath("myTestProperty.myI32Array").getValues().toString()).toEqual( "-20,-30,-10,0,10,20,30,40", ); expect(() => state.myTestProperty.myI32Array .unshift([1, 2, 3])).toThrow("PropertyProxy-002"); }); it("should be able to adjust array size be setting length", function() { state.myTestProperty.myI32Array.length = 10; expect(rootNode.resolvePath("myTestProperty.myI32Array").getLength()).toEqual(10); state.myTestProperty.myI32Array.length = 5; expect(rootNode.resolvePath("myTestProperty.myI32Array").getLength()).toEqual(5); expect(() => { state.myTestProperty.myI32Array.length = -10; }).toThrow(RangeError); }); it("should update proxy from remote changes", function() { const proxy = state.myTestProperty.myI32Array; expect(proxy[0]).toEqual(0); rootNode.get("myTestProperty").get("myI32Array").set(0, 42); expect(state.myTestProperty.myI32Array[0]) .toEqual(rootNode.resolvePath("myTestProperty.myI32Array[0]")); expect(proxy[0]).toEqual(rootNode.resolvePath("myTestProperty.myI32Array[0]")); rootNode.get("myTestProperty").get("myI32Array").push(888); expect(proxy.length).toEqual(rootNode.get("myTestProperty").get("myI32Array").getLength()); expect(state.myTestProperty.myI32Array.length).toEqual( rootNode.get("myTestProperty").get("myI32Array").getLength(), ); expect(proxy[proxy.length - 1]).toEqual(888); }); it("check behavior of .forEach() if a modification occurs in the loop", function() { const entries: any[] = []; state.myTestProperty.myI32Array.forEach((element) => { entries.push(element); if (element === 20) { state.myTestProperty.myI32Array.shift(); } }); // entries should not include 30 expect(entries.includes(30)).toEqual(false); }); it("should be possible to assign another ArrayProperty", function() { // This will fill the target with clones of the entry; state.myTestProperty.myI32Array = state.constantCollections.primitiveArray; const myI32Array = rootNode.resolvePath("myTestProperty.myI32Array"); expect(myI32Array.getLength()).toEqual(3); expect(myI32Array.get(0)).toEqual(42); expect(myI32Array.get(1)).toEqual(43); expect(myI32Array.get(2)).toEqual(44); }); it("should be possible to assign a new iterable", function() { const checkAssignment = () => { const myI32Array = rootNode.resolvePath("myTestProperty.myI32Array"); expect(myI32Array.getLength()).toEqual(4); expect(myI32Array.get(0)).toEqual(1); expect(myI32Array.get(1)).toEqual(2); expect(myI32Array.get(2)).toEqual(3); expect(myI32Array.get(3)).toEqual(4); myI32Array.clear(); }; // Assign pure javascript iterables const numbers = [1, 2, 3, 4]; state.myTestProperty.myI32Array = numbers; checkAssignment(); // Assign iterables of properties const numbersAsProperties = () => [ PropertyFactory.create("Int32", "single", numbers[0]), PropertyFactory.create("Int32", "single", numbers[1]), PropertyFactory.create("Int32", "single", numbers[2]), PropertyFactory.create("Int32", "single", numbers[3]), ]; state.myTestProperty.myI32Array = numbersAsProperties(); checkAssignment(); // Assign iterables of primitive properties in the property tree should work rootNode.insert("Int32Prop", PropertyFactory.create("Int32", "single", 42)); state.myTestProperty.myI32Array = [rootNode.resolvePath("Int32Prop")]; expect(rootNode.resolvePath("myTestProperty.myI32Array[0]")).toEqual( rootNode.resolvePath("Int32Prop").getValue()); rootNode.resolvePath("myTestProperty.myI32Array").clear(); // Assigning a non-iterable should throw expect(() => { state.myTestProperty.myI32Array = numbersAsProperties()[0]; }).toThrow("PropertyProxy-003"); }); }); }); describe("Array with complex type entries", function() { const arrayWithJsOutfit = [ { x: 1, y: 2 }, { x: 10, y: 20 }, ]; it("reading entries via directly accessing the indices", function() { const tempComplexArray = state.myTestProperty.myComplexArray; expect(tempComplexArray[0].x).toEqual(1); expect(tempComplexArray[0].y).toEqual(2); expect(tempComplexArray[1].x).toEqual(10); expect(tempComplexArray[1].y).toEqual(20); }); it("reading array-length using .length", function() { const tempComplexArray = state.myTestProperty.myComplexArray; expect(tempComplexArray.length).toEqual(2); }); it("Looping through the array indices using a for-in loop", function() { const testArray = state.myTestProperty.myComplexArray; // The array is defined as [0,1,2,3,...] const tempArray: string[] = []; // eslint-disable-next-line no-restricted-syntax for (const key in testArray) { // eslint-disable-line guard-for-in tempArray.push(key); } expect(tempArray.length).toEqual(testArray.length); expect(tempArray[0]).toEqual("0"); expect(tempArray[1]).toEqual("1"); }); it("Looping through the array indices using a for loop", function() { const tempComplexArray = state.myTestProperty.myComplexArray; for (let i = 0; i < tempComplexArray.length; i++) { if (i === 0) { expect(tempComplexArray[i].x).toEqual(1); expect(tempComplexArray[i].y).toEqual(2); } else if (i === 1) { expect(tempComplexArray[i].x).toEqual(10); expect(tempComplexArray[i].y).toEqual(20); } } }); it("Looping through the array entries using a for-of loop", function() { let counter = 0; for (const entry of state.myTestProperty.myComplexArray) { if (entry.x === 1) { expect(entry.y).toEqual(2); } else if (entry.x === 10) { expect(entry.y).toEqual(20); } counter++; } expect(counter).toEqual(2); }); it("check .every() functionality", function() { expect(state.myTestProperty.myComplexArray.every((element) => (element.x < 100))).toEqual(true); expect(state.myTestProperty.myComplexArray.every((element) => (element.y < 20))).toEqual(false); }); it("check .filter() functionality", function() { const filtered = state.myTestProperty.myComplexArray.filter((element) => (element.x < 10)); expect(filtered.length).toEqual(1); }); it("check .find() functionality", function() { expect(state.myTestProperty.myComplexArray.find( (element) => (element.x < 15 && element.x > 5)).x).toEqual(10); }); it("check .findIndex() functionality", function() { expect(state.myTestProperty.myComplexArray.findIndex(( element) => (element.x < 15 && element.x > 5))).toEqual(1); }); it("check .forEach() functionality", function() { let counter = 0; state.myTestProperty.myComplexArray.forEach((entry) => { if (entry.x === 1) { expect(entry.y).toEqual(2); } else if (entry.x === 10) { expect(entry.y).toEqual(20); } counter++; }); expect(counter).toEqual(2); }); it("check .includes() functionality", function() { expect(state.myTestProperty.myComplexArray.includes(state.myTestProperty.myComplexArray[0])).toEqual( arrayWithJsOutfit.includes(arrayWithJsOutfit[0]), ); expect( state.myTestProperty.myComplexArray .includes(rootNode.resolvePath("myTestProperty.myComplexArray[0]")), ).toEqual(true); expect( state.myTestProperty.myComplexArray.includes({ x: 1, y: 2 }), ).toEqual(arrayWithJsOutfit.includes({ x: 1, y: 2 })); expect(state.myTestProperty.myComplexArray.includes(state.myTestProperty.myComplexArray[0], 1)).toEqual( arrayWithJsOutfit.includes(arrayWithJsOutfit[0], 1), ); expect(state.myTestProperty.myComplexArray .includes(state.myTestProperty.myComplexArray[0], -1)).toEqual( arrayWithJsOutfit.includes(arrayWithJsOutfit[0], -1), ); expect(state.myTestProperty.myComplexArray .includes(state.myTestProperty.myComplexArray[0], -100)).toEqual( arrayWithJsOutfit.includes(arrayWithJsOutfit[0], -100), ); expect(state.myTestProperty.myComplexArray.includes(state.myTestProperty.myComplexArray[1], 2)).toEqual( arrayWithJsOutfit.includes(arrayWithJsOutfit[1], 2), ); expect(state.myTestProperty.myComplexArray .includes(state.myTestProperty.myComplexArray[1], 100)).toEqual( arrayWithJsOutfit.includes(arrayWithJsOutfit[1], 100), ); }); it("check .join() functionality", function() { expect(state.myTestProperty.myComplexArray.join(" ")).toEqual(arrayWithJsOutfit.join(" ")); }); it("check .lastIndexOf() functionality", function() { expect(state.myTestProperty.myComplexArray.lastIndexOf({ x: 1, y: 2 })).toEqual( arrayWithJsOutfit.lastIndexOf({ x: 1, y: 2 }), ); expect(state.myTestProperty.myComplexArray.lastIndexOf(state.myTestProperty.myComplexArray[1])).toEqual( arrayWithJsOutfit.lastIndexOf(arrayWithJsOutfit[1]), ); expect(state.myTestProperty.myComplexArray .lastIndexOf(state.myTestProperty.myComplexArray[1], 1)).toEqual( arrayWithJsOutfit.lastIndexOf(arrayWithJsOutfit[1], 1), ); expect(state.myTestProperty.myComplexArray .lastIndexOf(state.myTestProperty.myComplexArray[1], 2)).toEqual( arrayWithJsOutfit.lastIndexOf(arrayWithJsOutfit[1], 2), ); expect(state.myTestProperty.myComplexArray .lastIndexOf(state.myTestProperty.myComplexArray[1], -1)).toEqual( arrayWithJsOutfit.lastIndexOf(arrayWithJsOutfit[1], -1), ); expect( state.myTestProperty.myComplexArray .lastIndexOf(rootNode.resolvePath("myTestProperty.myComplexArray[0]")), ).toEqual(arrayWithJsOutfit.lastIndexOf(arrayWithJsOutfit[0])); expect( state.myTestProperty.myComplexArray.lastIndexOf(state.myTestProperty.myComplexArray[0], -1), ).toEqual(arrayWithJsOutfit.lastIndexOf(arrayWithJsOutfit[0], -1)); expect( state.myTestProperty.myComplexArray.lastIndexOf(state.myTestProperty.myComplexArray[0], -2), ).toEqual(arrayWithJsOutfit.lastIndexOf(arrayWithJsOutfit[0], -2)); expect( state.myTestProperty.myComplexArray.lastIndexOf(state.myTestProperty.myComplexArray[0], -3), ).toEqual(arrayWithJsOutfit.lastIndexOf(arrayWithJsOutfit[0], -3)); }); it("check .map() functionality", function() { expect(state.myTestProperty.myComplexArray.map((element) => element.x * 2).toString()).toEqual("2,20"); }); it("check .reduce() functionality", function() { expect(state.myTestProperty.myComplexArray.reduce((accumulator, currentValue) => { return accumulator.x + accumulator.y + currentValue.x + currentValue.y; })).toEqual(33); expect(state.myTestProperty.myComplexArray.reduce((accumulator, currentValue) => { return accumulator + currentValue.x + currentValue.y; }, -33)).toEqual(0); }); it("check .reduceRight() functionality", function() { expect(state.myTestProperty.myComplexArray.reduceRight((previousValue, currentValue) => { return previousValue.x + previousValue.y + currentValue.x + currentValue.y; })).toEqual(33); expect(state.myTestProperty.myComplexArray.reduceRight((previousValue, currentValue) => { return previousValue + currentValue.x + currentValue.y; }, -33)).toEqual(0); }); it("check .some() functionality", function() { expect(state.myTestProperty.myComplexArray.some((element) => (element.x > 1))).toEqual(true); expect(state.myTestProperty.myComplexArray.some((element) => (element.x > 50))).toEqual(false); }); it("check .toString() functionality", function() { const testArray = state.myTestProperty.myComplexArray; expect(testArray.toString()).toEqual(arrayWithJsOutfit.toString()); }); it("check .values() functionality", function() { const iterator = state.myTestProperty.myComplexArray.values(); expect(iterator.next().value.x).toEqual(1); expect(iterator.next().value.x).toEqual(10); expect(iterator.next().done).toEqual(true); }); describe("Setting", function() { afterEach(function() { rootNode.get("myTestProperty").get("myComplexArray").clear(); rootNode.get("myTestProperty").get("myComplexArray").insertRange(0, [ PropertyFactory .create("autodesk.appframework.tests:myVector2D-1.0.0", "single", { x: 1, y: 2 }), PropertyFactory .create("autodesk.appframework.tests:myVector2D-1.0.0", "single", { x: 10, y: 20 }), ], ); }); it("should set via direct access", function() { state.myTestProperty.myComplexArray[0].x = 42; expect(rootNode.resolvePath("myTestProperty.myComplexArray[0].x").getValue()).toEqual(42); expect(state.myTestProperty.myComplexArray[0].x).toEqual(42); state.myTestProperty.myComplexArray[0] = { x: 3, y: 4 }; expect(state.myTestProperty.myComplexArray[0].x).toEqual(3); expect(state.myTestProperty.myComplexArray[0].y).toEqual(4); expect(rootNode.resolvePath("myTestProperty.myComplexArray[0].x").getValue()).toEqual(3); expect(rootNode.resolvePath("myTestProperty.myComplexArray[0].y").getValue()).toEqual(4); // (proxied) properties state.myTestProperty.myComplexArray[0] = PropertyProxy.proxify( PropertyFactory.create(vector2DTemplate.typeid, "single", { x: 30, y: 40 }), ); expect(rootNode.get("myTestProperty") .get("myComplexArray").get("0").get("x").getValue()).toEqual(30); expect(rootNode.get("myTestProperty") .get("myComplexArray").get("0").get("y").getValue()).toEqual(40); state.myTestProperty.myComplexArray[0] = PropertyFactory.create( vector2DTemplate.typeid, "single", { x: 5, y: 6 }, ); expect(rootNode.get("myTestProperty") .get("myComplexArray").get("0").get("x").getValue()).toEqual(5); expect(rootNode.get("myTestProperty") .get("myComplexArray").get("0").get("y").getValue()).toEqual(6); // polymorphic state.myTestProperty.myComplexArray[0] = PropertyFactory.create( vector3DTemplate.typeid, "single", { x: 50, y: 60, z: 1 }, ); expect(rootNode.get("myTestProperty") .get("myComplexArray").get("0").get("x").getValue()).toEqual(50); expect(rootNode.get("myTestProperty") .get("myComplexArray").get("0").get("y").getValue()).toEqual(60); expect(rootNode.get("myTestProperty") .get("myComplexArray").get("0").get("z").getValue()).toEqual(1); }); it("should set an element out of range", function() { // Setting and element out of range state.myTestProperty.myComplexArray[10] = { x: 100, y: 100 }; expect(rootNode.get("myTestProperty") .get("myComplexArray").getLength()).toEqual(11); expect(rootNode.get("myTestProperty") .get("myComplexArray").get(10).get("x").getValue()).toEqual(100); expect(rootNode.get("myTestProperty") .get("myComplexArray").get(10).get("y").getValue()).toEqual(100); }); it("check .concat() functionality", function() { const concat = state.myTestProperty.myComplexArray.concat(["a", "b", "c"], state.myTestProperty.myComplexArray); expect(concat.length).toEqual(7); // should still be able to change underlying properties via the proxies in the concatenated array concat[0].x = 42; expect(state.myTestProperty.myComplexArray[0].x).toEqual(42); expect(concat[5].x).toEqual(42); expect(rootNode.resolvePath("myTestProperty.myComplexArray[0].x").getValue()).toEqual(42); }); it("check .copyWithin() functionality", function() { // add polymorphic entry rootNode.resolvePath("myTestProperty.myComplexArray").push( PropertyFactory.create(vector3DTemplate.typeid, "single", { x: 100, y: 100, z: 1 }), ); const entry = rootNode.resolvePath("myTestProperty.myComplexArray[2]"); state.myTestProperty.myComplexArray.copyWithin(0, 2, 3); expect(rootNode.get("myTestProperty").get("myComplexArray").get(0).getValues().x).toEqual(100); expect(rootNode.get("myTestProperty").get("myComplexArray").get(0).getValues().y).toEqual(100); expect(rootNode.get("myTestProperty").get("myComplexArray").get(0).getValues().z).toEqual(1); expect(rootNode.get("myTestProperty").get("myComplexArray").get(2).getValues().x).toEqual(100); expect(rootNode.get("myTestProperty").get("myComplexArray").get(2).getValues().y).toEqual(100); expect(rootNode.get("myTestProperty").get("myComplexArray").get(2).getValues().z).toEqual(1); expect(rootNode.get("myTestProperty").get("myComplexArray").get(2)).toEqual(entry); expect(() => { state.myTestProperty.myComplexArray[0] = rootNode.resolvePath("myTestProperty.myVector"); }).toThrow(); }); it("check .fill() functionality", function() { state.myTestProperty.myComplexArray.fill({ x: 1, y: 2 }); for (let i = 0; i < state.myTestProperty.myComplexArray.length; i++) { expect( rootNode.get("myTestProperty").get("myComplexArray").get(i.toString()).get("x").getValue(), ).toEqual(1); expect( rootNode.get("myTestProperty").get("myComplexArray").get(i.toString()).get("y").getValue(), ).toEqual(2); } state.myTestProperty.myComplexArray.fill( PropertyFactory.create(vector2DTemplate.typeid, "single", { x: 3, y: 4 }), ); for (let i = 0; i < state.myTestProperty.myComplexArray.length; i++) { expect( rootNode.get("myTestProperty").get("myComplexArray").get(i.toString()).get("x").getValue(), ).toEqual(3); expect( rootNode.get("myTestProperty").get("myComplexArray").get(i.toString()).get("y").getValue(), ).toEqual(4); } state.myTestProperty.myComplexArray.fill( PropertyProxy.proxify( PropertyFactory.create(vector2DTemplate.typeid, "single", { x: 5, y: 6 })), ); for (let i = 0; i < state.myTestProperty.myComplexArray.length; i++) { expect( rootNode.get("myTestProperty").get("myComplexArray").get(i.toString()).get("x").getValue(), ).toEqual(5); expect( rootNode.get("myTestProperty").get("myComplexArray").get(i.toString()).get("y").getValue(), ).toEqual(6); } // polymorphic state.myTestProperty.myComplexArray.fill( PropertyProxy.proxify(PropertyFactory.create(vector3DTemplate.typeid, "single", { x: 7, y: 8, z: 1 }))); for (let i = 0; i < state.myTestProperty.myComplexArray.length; i++) { expect( rootNode.get("myTestProperty").get("myComplexArray").get(i.toString()).get("x").getValue(), ).toEqual(7); expect( rootNode.get("myTestProperty").get("myComplexArray").get(i.toString()).get("y").getValue(), ).toEqual(8); expect( rootNode.get("myTestProperty").get("myComplexArray").get(i.toString()).get("z").getValue(), ).toEqual(1); } expect(() => { state.myTestProperty.myComplexArray[0] = rootNode.resolvePath("myTestProperty.myVector"); }).toThrow(); }); it("check .pop() functionality", function() { const popped = state.myTestProperty.myComplexArray.pop(); expect(popped.x).toEqual(10); expect(popped.y).toEqual(20); expect(rootNode.get("myTestProperty").get("myComplexArray").getLength()).toEqual(1); }); it("check .push() functionality", function() { expect(state.myTestProperty.myComplexArray.push({ x: 3, y: 4 })).toEqual(3); expect(rootNode.get("myTestProperty") .get("myComplexArray").getLength()).toEqual(3); expect(rootNode.get("myTestProperty") .get("myComplexArray").get("2").get("x").getValue()).toEqual(3); expect(rootNode.get("myTestProperty") .get("myComplexArray").get("2").get("y").getValue()).toEqual(4); // multiple elements expect(state.myTestProperty.myComplexArray.push({ x: 30, y: 40 }, { x: 5, y: 6 })).toEqual(5); expect(rootNode.get("myTestProperty") .get("myComplexArray").getLength()).toEqual(5); expect(rootNode.get("myTestProperty") .get("myComplexArray").get("3").get("x").getValue()).toEqual(30); expect(rootNode.get("myTestProperty") .get("myComplexArray").get("3").get("y").getValue()).toEqual(40); expect(rootNode.get("myTestProperty") .get("myComplexArray").get("4").get("x").getValue()).toEqual(5); expect(rootNode.get("myTestProperty") .get("myComplexArray").get("4").get("y").getValue()).toEqual(6); // (proxied) properties state.myTestProperty.myComplexArray.push( PropertyProxy.proxify(PropertyFactory.create(vector2DTemplate.typeid, "single", { x: 50, y: 60 })), PropertyFactory.create(vector2DTemplate.typeid, "single", { x: 7, y: 8 }), ); expect(rootNode.get("myTestProperty") .get("myComplexArray").getLength()).toEqual(7); expect(rootNode.get("myTestProperty") .get("myComplexArray").get("5").get("x").getValue()).toEqual(50); expect(rootNode.get("myTestProperty") .get("myComplexArray").get("5").get("y").getValue()).toEqual(60); expect(rootNode.get("myTestProperty") .get("myComplexArray").get("6").get("x").getValue()).toEqual(7); expect(rootNode.get("myTestProperty") .get("myComplexArray").get("6").get("y").getValue()).toEqual(8); // polymorphic state.myTestProperty.myComplexArray.push( PropertyFactory.create(vector3DTemplate.typeid, "single", { x: 70, y: 80, z: 1 }), ); expect(rootNode.get("myTestProperty") .get("myComplexArray").getLength()).toEqual(8); expect(rootNode.get("myTestProperty") .get("myComplexArray").get("7").get("x").getValue()).toEqual(70); expect(rootNode.get("myTestProperty") .get("myComplexArray").get("7").get("y").getValue()).toEqual(80); expect(rootNode.get("myTestProperty") .get("myComplexArray").get("7").get("z").getValue()).toEqual(1); }); it("check .reverse() functionality", function() { const entry = rootNode.resolvePath("myTestProperty.myComplexArray[0]"); state.myTestProperty.myComplexArray.reverse(); expect(rootNode.resolvePath("myTestProperty.myComplexArray[0].x").getValue()).toEqual(10); expect(rootNode.resolvePath("myTestProperty.myComplexArray[1].x").getValue()).toEqual(1); // Check that it still refers to the same property expect(rootNode.resolvePath("myTestProperty.myComplexArray[1]")).toEqual(entry); expect(() => { state.myTestProperty.myComplexArray[0] = rootNode.resolvePath("myTestProperty.myVector"); }).toThrow(); }); it("check .shift() functionality", function() { const oldLength = state.myTestProperty.myComplexArray.length; const first = state.myTestProperty.myComplexArray.shift(); expect(first.x).toEqual(1); expect(first.y).toEqual(2); expect(state.myTestProperty.myComplexArray.length).toEqual(oldLength - 1); expect(rootNode.resolvePath("myTestProperty.myComplexArray").getLength()).toEqual(oldLength - 1); expect(() => { state.myTestProperty.myComplexArray[0] = rootNode.resolvePath("myTestProperty.myVector"); }).toThrow(); }); it("check .sort() functionality", function() { const entry = rootNode.resolvePath("myTestProperty.myComplexArray[0]"); // add polymorphic entry rootNode.resolvePath("myTestProperty.myComplexArray").push( PropertyFactory.create(vector3DTemplate.typeid, "single", { x: 100, y: 100, z: 1 }), ); state.myTestProperty.myComplexArray.sort((a, b) => b.x - a.x); expect(rootNode.resolvePath("myTestProperty.myComplexArray[0].x").getValue()).toEqual(100); expect(rootNode.resolvePath("myTestProperty.myComplexArray[1].x").getValue()).toEqual(10); expect(rootNode.resolvePath("myTestProperty.myComplexArray[2].x").getValue()).toEqual(1); expect( rootNode.resolvePath("myTestProperty.myComplexArray[0]").getTypeid(), ).toEqual(vector3DTemplate.typeid); expect( rootNode.resolvePath("myTestProperty.myComplexArray[1]").getTypeid(), ).toEqual(vector2DTemplate.typeid); expect( rootNode.resolvePath("myTestProperty.myComplexArray[2]").getTypeid(), ).toEqual(vector2DTemplate.typeid); // Check that it still refers to the same property expect(rootNode.resolvePath("myTestProperty.myComplexArray[2]")).toEqual(entry); }); it("check .swap() functionality", function() { const entry0 = rootNode.resolvePath("myTestProperty.myComplexArray[0]"); const entry1 = rootNode.resolvePath("myTestProperty.myComplexArray[1]"); state.myTestProperty.myComplexArray.swap(0, 1); expect(rootNode.resolvePath("myTestProperty.myComplexArray[1]")).toEqual(entry0); expect(rootNode.resolvePath("myTestProperty.myComplexArray[0]")).toEqual(entry1); expect(rootNode.resolvePath("myTestProperty.myComplexArray[1].x").getValue()).toEqual(1); expect(rootNode.resolvePath("myTestProperty.myComplexArray[1].y").getValue()).toEqual(2); expect(rootNode.resolvePath("myTestProperty.myComplexArray[0].x").getValue()).toEqual(10); expect(rootNode.resolvePath("myTestProperty.myComplexArray[0].y").getValue()).toEqual(20); }); it("check .splice() functionality", function() { const oldLength = state.myTestProperty.myComplexArray.length; // Replace first element state.myTestProperty.myComplexArray.splice(0, 1, { x: 1, y: 2 }); expect(state.myTestProperty.myComplexArray.length).toEqual(oldLength); expect(rootNode.resolvePath("myTestProperty.myComplexArray").getLength()).toEqual(oldLength); // Add some elements state.myTestProperty.myComplexArray.splice(2, 0, { x: 3, y: 4 }, { x: 30, y: 40 }); expect(state.myTestProperty.myComplexArray.length).toEqual(oldLength + 2); expect(rootNode.resolvePath("myTestProperty.myComplexArray").getLength()).toEqual(oldLength + 2); const newArrayWithJsOutfit = [ { x: 1, y: 2 }, { x: 10, y: 20 }, { x: 3, y: 4 }, { x: 30, y: 40 }, ]; expect(rootNode.resolvePath("myTestProperty.myComplexArray").getValues().toString()).toEqual( newArrayWithJsOutfit.toString(), ); // Remove added elements const removed = state.myTestProperty.myComplexArray.splice(2, 2); expect(removed[0].x).toEqual(3); expect(removed[0].y).toEqual(4); expect(removed[1].x).toEqual(30); expect(removed[1].y).toEqual(40); expect(state.myTestProperty.myComplexArray.length).toEqual(oldLength); expect(rootNode.resolvePath("myTestProperty.myComplexArray").getLength()).toEqual(oldLength); arrayWithJsOutfit.splice(2, 2); expect(rootNode.resolvePath("myTestProperty.myComplexArray").getValues().toString()).toEqual( arrayWithJsOutfit.toString(), ); }); it("check .unshift() functionality", function() { const oldLength = state.myTestProperty.myComplexArray.length; expect(state.myTestProperty.myComplexArray.unshift({ x: -1, y: -2 })).toEqual(oldLength + 1); expect(state.myTestProperty.myComplexArray.length).toEqual(oldLength + 1); expect(rootNode.resolvePath("myTestProperty.myComplexArray").getLength()).toEqual(oldLength + 1); expect(rootNode.resolvePath("myTestProperty.myComplexArray[0].x").getValue()).toEqual(-1); expect(rootNode.resolvePath("myTestProperty.myComplexArray[0].y").getValue()).toEqual(-2); }); it("should be able to adjust array size be setting length", function() { state.myTestProperty.myComplexArray.length = 4; expect(rootNode.resolvePath("myTestProperty.myComplexArray").getLength()).toEqual(4); }); it("should be possible to assign another ArrayProperty", function() { // This will fill the target with clones of the entry; state.myTestProperty.myComplexArray = state.constantCollections.nonPrimitiveArray; const myComplexArray = rootNode.resolvePath("myTestProperty.myComplexArray"); expect(myComplexArray.getLength()).toEqual(2); expect(myComplexArray.get(0).get("x").getValue()).toEqual(42); expect(myComplexArray.get(0).get("y").getValue()).toEqual(43); expect(myComplexArray.get(1).get("x").getValue()).toEqual(44); expect(myComplexArray.get(1).get("y").getValue()).toEqual(45); }); it("should be possible to assign a new iterable", function() { const checkAssignment = () => { const complexArray = rootNode.resolvePath("myTestProperty.myComplexArray"); expect(complexArray.getLength()).toEqual(2); expect(complexArray.get(0).get("x").getValue()).toEqual(1); expect(complexArray.get(0).get("y").getValue()).toEqual(2); expect(complexArray.get(1).get("x").getValue()).toEqual(3); expect(complexArray.get(1).get("y").getValue()).toEqual(4); complexArray.clear(); }; // Assign pure javascript iterables const vectors = [{ x: 1, y: 2 }, { x: 3, y: 4 }]; state.myTestProperty.myComplexArray = vectors; checkAssignment(); // Assign iterables of properties const vectorsAsProperties = () => [ PropertyFactory.create(vector2DTemplate.typeid, "single", vectors[0]), PropertyFactory.create(vector2DTemplate.typeid, "single", vectors[1]), ]; state.myTestProperty.myComplexArray = vectorsAsProperties(); checkAssignment(); // Assign iterables of properties in the property tree should throw expect(() => { state.myTestProperty.myComplexArray = [rootNode.get("myTestProperty").get("myVector")]; }).toThrow(); // Assigning a non-iterable should throw expect(() => { state.myTestProperty.myComplexArray = vectorsAsProperties()[0]; }).toThrow("PropertyProxy-003"); }); }); }); describe("Array of collections", function() { it("should be able to access nested collections", function() { expect(state.myGenericArray.length).toEqual(3); // Nested array expect(state.myGenericArray[0][0]).toEqual(0); expect(state.myGenericArray[0][1]).toEqual(1); expect(state.myGenericArray[0][2]).toEqual(2); expect(state.myGenericArray[0][3]).toEqual(3); // Nested map expect(state.myGenericArray[1].get("a")).toEqual(0); expect(state.myGenericArray[1].get("b")).toEqual(1); expect(state.myGenericArray[1].get("c")).toEqual(2); // Nested set expect(state.myGenericArray[2].size).toEqual(2); }); it("should be able to set entries of nested collections", function() { state.myGenericArray[0][0] = 84; expect(rootNode.resolvePath("myGenericArray[0][0]")).toEqual(84); state.myGenericArray[1].set("a", 85); expect(rootNode.resolvePath("myGenericArray[1][a]")).toEqual(85); // Assign primitive array property state.myGenericArray[0] = state.constantCollections.primitiveArray; expect(rootNode.resolvePath("myGenericArray[0]").getLength()).toEqual(3); expect(rootNode.resolvePath("myGenericArray[0][0]")).toEqual(42); expect(rootNode.resolvePath("myGenericArray[0][1]")).toEqual(43); expect(rootNode.resolvePath("myGenericArray[0][2]")).toEqual(44); // Assign non-primitive array property rootNode.resolvePath("myGenericArray").shift(); rootNode.resolvePath("myGenericArray").unshift( PropertyFactory.create(vector2DTemplate.typeid, "array")); state.myGenericArray[0] = state.constantCollections.nonPrimitiveArray; expect(rootNode.resolvePath("myGenericArray[0]").getLength()).toEqual(2); expect(rootNode.resolvePath("myGenericArray[0][0].x").getValue()).toEqual(42); expect(rootNode.resolvePath("myGenericArray[0][0].y").getValue()).toEqual(43); expect(rootNode.resolvePath("myGenericArray[0][1].x").getValue()).toEqual(44); expect(rootNode.resolvePath("myGenericArray[0][1].y").getValue()).toEqual(45); // Assign primitive map property state.myGenericArray[1] = state.constantCollections.primitiveMap; expect(rootNode.resolvePath("myGenericArray[1]").getIds().length).toEqual(2); expect(rootNode.resolvePath("myGenericArray[1][a]")).toEqual(42); expect(rootNode.resolvePath("myGenericArray[1][b]")).toEqual(43); // Assign non-primitive map property rootNode.get("myGenericArray").remove(1); rootNode.get("myGenericArray").insert(1, PropertyFactory.create(vector2DTemplate.typeid, "map")); state.myGenericArray[1] = state.constantCollections.nonPrimitiveMap; expect(rootNode.resolvePath("myGenericArray[1]").getIds().length).toEqual(2); expect(rootNode.resolvePath("myGenericArray[1][a].x").getValue()).toEqual(42); expect(rootNode.resolvePath("myGenericArray[1][a].y").getValue()).toEqual(43); expect(rootNode.resolvePath("myGenericArray[1][b].x").getValue()).toEqual(44); expect(rootNode.resolvePath("myGenericArray[1][b].y").getValue()).toEqual(45); // Assign a set state.myGenericArray[2] = state.constantCollections.bookSet; const setEntries = rootNode.resolvePath("myGenericArray[2]").getAsArray(); expect(setEntries.length).toEqual(2); expect(setEntries[0].get("book").getValue()).toEqual("The Hobbit"); expect(setEntries[0].get("author").getValue()).toEqual("Tolkien"); expect(setEntries[1].get("book").getValue()).toEqual("Faust"); expect(setEntries[1].get("author").getValue()).toEqual("Goethe"); }); }); describe("Map", function() { describe("Int32 Maps", function() { it("should return the size of the map using .size property", function() { const testMap = state.myTestProperty.myMap; expect(testMap.size).toEqual(3); }); it("should be able to access the map using a for-of loop", function() { const testMap = state.myTestProperty.myMap; const tempArray: any[] = []; for (const entry of testMap) { tempArray.push(entry); } expect(tempArray[0][1]).toEqual(1111); expect(tempArray[1][1]).toEqual(2222); expect(tempArray[2][1]).toEqual(3333); }); it("should return true on `instanceOf` checks", function() { expect((state.myTestProperty.myMap instanceof Map)).toEqual(true); }); it("check .entries() functionality", function() { const entriesIterator = state.myTestProperty.myMap.entries(); expect(entriesIterator.next().value.toString()).toEqual("firstNumber,1111"); expect(entriesIterator.next().value.toString()).toEqual("secondNumber,2222"); expect(entriesIterator.next().value.toString()).toEqual("thirdNumber,3333"); }); it("check .forEach() functionality", function() { const testMap = state.myTestProperty.myMap; const tempArray: number[] = []; const squareIt = function(value) { tempArray.push(value * value); }; testMap.forEach(squareIt); expect(tempArray[0]).toEqual(1111 * 1111); expect(tempArray[1]).toEqual(2222 * 2222); expect(tempArray[2]).toEqual(3333 * 3333); }); it("check .get() functionality", function() { const testMap = state.myTestProperty.myMap; expect(testMap.get("firstNumber")).toEqual(1111); }); it("check .keys() functionality", function() { const keysIterator = state.myTestProperty.myMap.keys(); expect(keysIterator.next().value).toEqual("firstNumber"); expect(keysIterator.next().value).toEqual("secondNumber"); expect(keysIterator.next().value).toEqual("thirdNumber"); }); it("check .toString() functionality", function() { const testMap = state.myTestProperty.myMap; const mapWithJsOutfit = new Map(); mapWithJsOutfit.set("firstNumber", 1111); mapWithJsOutfit.set("secondNumber", 2222); mapWithJsOutfit.set("thirdNumber", 3333); expect(testMap.toString()).toEqual(mapWithJsOutfit.toString()); }); it("check .values() functionality", function() { const valuesIterator = state.myTestProperty.myMap.values(); expect(valuesIterator.next().value).toEqual(1111); expect(valuesIterator.next().value).toEqual(2222); expect(valuesIterator.next().value).toEqual(3333); }); it("should reflect remote changes", function() { const myMap = state.myTestProperty.myMap; expect(myMap.size).toEqual(3); rootNode.resolvePath("myTestProperty.myMap").insert("fourthNumber", 4444); expect(myMap.size).toEqual(4); expect(myMap.get("fourthNumber")).toEqual(4444); rootNode.resolvePath("myTestProperty.myMap").remove("fourthNumber"); expect(myMap.size).toEqual(3); }); describe("Setting", function() { afterEach(function() { rootNode.resolvePath("myTestProperty.myMap").clear(); rootNode.resolvePath("myTestProperty.myMap").insert("firstNumber", 1111); rootNode.resolvePath("myTestProperty.myMap").insert("secondNumber", 2222); rootNode.resolvePath("myTestProperty.myMap").insert("thirdNumber", 3333); }); it("check .clear() functionality", function() { expect(rootNode.resolvePath("myTestProperty.myMap").getIds().length).toEqual(3); state.myTestProperty.myMap.clear(); expect(rootNode.resolvePath("myTestProperty.myMap").getIds().length).toEqual(0); }); it("check .delete() functionality", function() { expect(state.myTestProperty.myMap.has("firstNumber")).toEqual(true); expect(rootNode.get("myTestProperty").get("myMap").has("firstNumber")).toEqual(true); expect(state.myTestProperty.myMap.delete("firstNumber")).toEqual(true); expect(state.myTestProperty.myMap.delete("nonExistingEntry")).toEqual(false); expect(state.myTestProperty.myMap.has("fistNumber")).toEqual(false); expect(rootNode.get("myTestProperty").get("myMap").has("firstNumber")).toEqual(false); }); it("check .set() functionality", function() { // Modify entry state.myTestProperty.myMap.set("firstNumber", 42); expect(rootNode.resolvePath("myTestProperty.myMap[firstNumber]")).toEqual(42); // Insert entry state.myTestProperty.myMap.set("fourthNumber", 4444); expect(rootNode.resolvePath("myTestProperty.myMap[fourthNumber]")).toEqual(4444); // Insert (proxied) property state.myTestProperty.myMap.set("fifthNumber", PropertyFactory.create("Int32", "single", 5555)); expect(rootNode.resolvePath("myTestProperty.myMap[fifthNumber]")).toEqual(5555); state.myTestProperty.myMap.set("sixthNumber", PropertyProxy.proxify(PropertyFactory.create("Int32", "single", 6666))); expect(rootNode.resolvePath("myTestProperty.myMap[sixthNumber]")).toEqual(6666); // Insert non matching property const floatProperty = PropertyFactory.create("Float32", "single", 7.7); state.myTestProperty.myMap.set("seventhNumber", floatProperty); expect(rootNode.resolvePath("myTestProperty.myMap[seventhNumber]")).toEqual(7); }); it("should be possible to assign another MapProperty", function() { state.myTestProperty.myMap = state.constantCollections.primitiveMap; const map = rootNode.resolvePath("myTestProperty.myMap"); expect(map.getIds().length).toEqual(2); expect(map.get("a")).toEqual(42); expect(map.get("b")).toEqual(43); }); it("should be possible to assign a new iterable", function() { const checkAssignment = () => { expect(rootNode.resolvePath("myTestProperty.myMap[a]")).toEqual(1); expect(rootNode.resolvePath("myTestProperty.myMap[b]")).toEqual(2); rootNode.resolvePath("myTestProperty.myMap").clear(); }; // Assign pure javascript iterables const entries: [string, number][] = [["a", 1], ["b", 2]]; state.myTestProperty.myMap = new Map<string, number>(entries); checkAssignment(); state.myTestProperty.myMap = entries; checkAssignment(); // Assign iterables of properties const entriesAsProperties = (): [string, BaseProperty][] => [ [entries[0][0], PropertyFactory.create("Int32", "single", entries[0][1])], [entries[1][0], PropertyFactory.create("Int32", "single", entries[1][1])], ]; state.myTestProperty.myMap = new Map(entriesAsProperties()); checkAssignment(); state.myTestProperty.myMap = entriesAsProperties(); checkAssignment(); }); }); }); describe("Maps with complex entries", function() { it("should return the size of the complex-map using .size property", function() { const testMap = state.myTestProperty.myComplexMap; expect(testMap.size).toEqual(3); }); it("for-of loop corresponding to complex-map", function() { const testMap = state.myTestProperty.myComplexMap; const tempArray: any[] = []; for (const entry of testMap) { tempArray.push(entry); } expect(tempArray[0][1].x).toEqual(10); expect(tempArray[0][1].y).toEqual(20); expect(tempArray[1][1].x).toEqual(30); expect(tempArray[1][1].y).toEqual(40); expect(tempArray[2][1].x).toEqual(50); expect(tempArray[2][1].y).toEqual(60); }); it("check .entries() functionality", function() { const entriesIterator = state.myTestProperty.myComplexMap.entries(); let first = false; let second = false; let third = false; let current = entriesIterator.next(); while (!current.done) { switch (current.value[0]) { case "firstEntry": first = true; expect(current.value[1].x).toEqual(10); expect(current.value[1].y).toEqual(20); break; case "secondEntry": second = true; expect(current.value[1].x).toEqual(30); expect(current.value[1].y).toEqual(40); break; case "thirdEntry": third = true; expect(current.value[1].x).toEqual(50); expect(current.value[1].y).toEqual(60); break; default: break; } current = entriesIterator.next(); } expect(first).toEqual(true); expect(second).toEqual(true); expect(third).toEqual(true); }); it("check .forEach() functionality", function() { let first = false; let second = false; let third = false; state.myTestProperty.myComplexMap.forEach(function(value, key) { switch (key) { case "firstEntry": first = true; expect(value.x).toEqual(10); expect(value.y).toEqual(20); break; case "secondEntry": second = true; expect(value.x).toEqual(30); expect(value.y).toEqual(40); break; case "thirdEntry": third = true; expect(value.x).toEqual(50); expect(value.y).toEqual(60); break; default: break; } }); expect(first).toEqual(true); expect(second).toEqual(true); expect(third).toEqual(true); }); it("should access a complex-map using get()", function() { const testMap = state.myTestProperty.myComplexMap; expect(testMap.get("firstEntry").x).toEqual(10); expect(testMap.get("firstEntry").y).toEqual(20); expect(testMap.get("secondEntry").x).toEqual(30); expect(testMap.get("secondEntry").y).toEqual(40); expect(testMap.get("thirdEntry").x).toEqual(50); expect(testMap.get("thirdEntry").y).toEqual(60); }); it("check .keys() functionality", function() { const keysIterator = state.myTestProperty.myComplexMap.keys(); expect(keysIterator.next().value).toEqual("firstEntry"); expect(keysIterator.next().value).toEqual("secondEntry"); expect(keysIterator.next().value).toEqual("thirdEntry"); }); it("check .toString() functionality", function() { const testMap = state.myTestProperty.myMap; const mapWithJsOutfit = new Map(); mapWithJsOutfit.set("firstEntry", { x: 10, y: 20 }); mapWithJsOutfit.set("secondEntry", { x: 30, y: 40 }); mapWithJsOutfit.set("thirdEntry", { x: 50, y: 60 }); expect(testMap.toString()).toEqual(mapWithJsOutfit.toString()); }); it("check .values() functionality", function() { const valuesIterator = state.myTestProperty.myComplexMap.values(); expect(valuesIterator.next().value.x).toEqual(10); expect(valuesIterator.next().value.x).toEqual(30); expect(valuesIterator.next().value.x).toEqual(50); }); it("should reflect remote changes", function() { const myComplexMap = state.myTestProperty.myComplexMap; expect(myComplexMap.size).toEqual(3); rootNode.resolvePath("myTestProperty.myComplexMap").insert("fourthEntry", PropertyFactory.create(vector2DTemplate.typeid, "single", { x: 70, y: 80 })); expect(myComplexMap.size).toEqual(4); expect(myComplexMap.get("fourthEntry").x).toEqual(70); rootNode.resolvePath("myTestProperty.myComplexMap").remove("fourthEntry"); expect(myComplexMap.size).toEqual(3); }); describe("Setting", function() { afterEach(function() { rootNode.resolvePath("myTestProperty.myComplexMap").clear(); rootNode.resolvePath("myTestProperty.myComplexMap").insert("firstEntry", PropertyFactory.create(vector2DTemplate.typeid, "single", { x: 10, y: 20 })); rootNode.resolvePath("myTestProperty.myComplexMap").insert("secondEntry", PropertyFactory.create(vector2DTemplate.typeid, "single", { x: 30, y: 40 })); rootNode.resolvePath("myTestProperty.myComplexMap").insert("thirdEntry", PropertyFactory.create(vector2DTemplate.typeid, "single", { x: 50, y: 60 })); }); it("check .clear() functionality", function() { expect(rootNode.resolvePath("myTestProperty.myComplexMap").getIds().length).toEqual(3); state.myTestProperty.myComplexMap.clear(); expect(rootNode.resolvePath("myTestProperty.myComplexMap").getIds().length).toEqual(0); }); it("check .delete() functionality", function() { expect(state.myTestProperty.myComplexMap.has("firstEntry")).toEqual(true); expect(rootNode.get("myTestProperty").get("myComplexMap").has("firstEntry")).toEqual(true); expect(state.myTestProperty.myComplexMap.delete("firstEntry")).toEqual(true); expect(state.myTestProperty.myComplexMap.delete("nonExistingEntry")).toEqual(false); expect(state.myTestProperty.myComplexMap.has("firstEntry")).toEqual(false); expect(rootNode.get("myTestProperty").get("myComplexMap").has("firstEntry")).toEqual(false); }); it("check .set() functionality", function() { // replace entry state.myTestProperty.myComplexMap.set("firstEntry", PropertyFactory .create("autodesk.appframework.tests:myVector2D-1.0.0", "single", { x: 7, y: 8 })); expect(rootNode.resolvePath("myTestProperty.myComplexMap[firstEntry].x").getValue()).toEqual(7); expect(rootNode.resolvePath("myTestProperty.myComplexMap[firstEntry].y").getValue()).toEqual(8); // replace with polymorphic state.myTestProperty.myComplexMap.set("firstEntry", PropertyFactory.create(vector3DTemplate.typeid, "single", { x: 10, y: 20, z: 1 })); expect(rootNode .resolvePath("myTestProperty.myComplexMap[firstEntry].x").getValue()).toEqual(10); expect(rootNode .resolvePath("myTestProperty.myComplexMap[firstEntry].y").getValue()).toEqual(20); expect(rootNode .resolvePath("myTestProperty.myComplexMap[firstEntry].z").getValue()).toEqual(1); // insert entry state.myTestProperty.myComplexMap.set("fourthEntry", { x: 70, y: 80 }); expect(rootNode .resolvePath("myTestProperty.myComplexMap[fourthEntry].x").getValue()).toEqual(70); expect(rootNode .resolvePath("myTestProperty.myComplexMap[fourthEntry].y").getValue()).toEqual(80); // proxied property state.myTestProperty.myComplexMap.set("fifthEntry", PropertyProxy.proxify(PropertyFactory.create(vector2DTemplate.typeid, "single", { x: 90, y: 100 }))); expect(rootNode .resolvePath("myTestProperty.myComplexMap[fifthEntry].x").getValue()).toEqual(90); expect(rootNode .resolvePath("myTestProperty.myComplexMap[fifthEntry].y").getValue()).toEqual(100); // polymorphic state.myTestProperty.myComplexMap.set("sixthEntry", PropertyFactory.create(vector3DTemplate.typeid, "single", { x: 110, y: 120, z: 1 })); expect(rootNode .resolvePath("myTestProperty.myComplexMap[sixthEntry].x").getValue()).toEqual(110); expect(rootNode .resolvePath("myTestProperty.myComplexMap[sixthEntry].y").getValue()).toEqual(120); expect(rootNode .resolvePath("myTestProperty.myComplexMap[sixthEntry].z").getValue()).toEqual(1); }); it("should be possible to assign another MapProperty", function() { state.myTestProperty.myComplexMap = state.constantCollections.nonPrimitiveMap; const myComplexMap = rootNode.resolvePath("myTestProperty.myComplexMap"); expect(myComplexMap.getIds().length).toEqual(2); expect(myComplexMap.get("a").get("x").getValue()).toEqual(42); expect(myComplexMap.get("a").get("y").getValue()).toEqual(43); expect(myComplexMap.get("b").get("x").getValue()).toEqual(44); expect(myComplexMap.get("b").get("y").getValue()).toEqual(45); }); it("should be possible to assign a new iterable", function() { const checkAssignment = () => { expect(rootNode.resolvePath("myTestProperty.myComplexMap[a].x").getValue()).toEqual(1); expect(rootNode.resolvePath("myTestProperty.myComplexMap[a].y").getValue()).toEqual(2); expect(rootNode.resolvePath("myTestProperty.myComplexMap[b].x").getValue()).toEqual(3); expect(rootNode.resolvePath("myTestProperty.myComplexMap[b].y").getValue()).toEqual(4); rootNode.resolvePath("myTestProperty.myComplexMap").clear(); }; // Assign pure javascript iterables const entries: [string, Record<string, number>][] = [["a", { x: 1, y: 2 }], ["b", { x: 3, y: 4 }]]; state.myTestProperty.myComplexMap = new Map(entries); checkAssignment(); state.myTestProperty.myComplexMap = entries; checkAssignment(); // Assign iterables of properties const entriesAsProperties = (): [string, BaseProperty][] => [ [entries[0][0], PropertyFactory.create(vector2DTemplate.typeid, "single", entries[0][1])], [entries[1][0], PropertyFactory.create(vector2DTemplate.typeid, "single", entries[1][1])], ]; state.myTestProperty.myComplexMap = new Map(entriesAsProperties()); checkAssignment(); state.myTestProperty.myComplexMap = entriesAsProperties(); checkAssignment(); }); }); }); describe("Map of collections", function() { it("should be able to access nested collection", function() { expect(state.myGenericMap.size).toEqual(3); // Nested array expect(state.myGenericMap.get("array")[0]).toEqual(0); expect(state.myGenericMap.get("array")[1]).toEqual(1); expect(state.myGenericMap.get("array")[2]).toEqual(2); expect(state.myGenericMap.get("array")[3]).toEqual(3); // Nested map expect(state.myGenericMap.get("map").get("a")).toEqual(0); expect(state.myGenericMap.get("map").get("b")).toEqual(1); expect(state.myGenericMap.get("map").get("c")).toEqual(2); // Nested set expect(state.myGenericMap.get("set").size).toEqual(2); }); it("should be able to set entries of nested collections", function() { state.myGenericMap.get("array")[0] = 84; expect(rootNode.resolvePath("myGenericMap[array][0]")).toEqual(84); state.myGenericMap.get("map").set("a", 85); expect(rootNode.resolvePath("myGenericMap[map][a]")).toEqual(85); // Assign primitive array property state.myGenericMap.set("array", state.constantCollections.primitiveArray.getProperty().clone()); expect(rootNode.resolvePath("myGenericMap[array]").getLength()).toEqual(3); expect(rootNode.resolvePath("myGenericMap[array][0]")).toEqual(42); expect(rootNode.resolvePath("myGenericMap[array][1]")).toEqual(43); expect(rootNode.resolvePath("myGenericMap[array][2]")).toEqual(44); // Assign non-primitive array property state.myGenericMap.set("array", state.constantCollections.nonPrimitiveArray.getProperty().clone()); expect(rootNode.resolvePath("myGenericMap[array]").getLength()).toEqual(2); expect(rootNode.resolvePath("myGenericMap[array][0].x").getValue()).toEqual(42); expect(rootNode.resolvePath("myGenericMap[array][0].y").getValue()).toEqual(43); expect(rootNode.resolvePath("myGenericMap[array][1].x").getValue()).toEqual(44); expect(rootNode.resolvePath("myGenericMap[array][1].y").getValue()).toEqual(45); // Assign primitive map property state.myGenericMap.set("map", state.constantCollections.primitiveMap.getProperty().clone()); expect(rootNode.resolvePath("myGenericMap[map]").getIds().length).toEqual(2); expect(rootNode.resolvePath("myGenericMap[map][a]")).toEqual(42); expect(rootNode.resolvePath("myGenericMap[map][b]")).toEqual(43); // Assign non-primitive map property state.myGenericMap.set("map", state.constantCollections.nonPrimitiveMap.getProperty().clone()); expect(rootNode.resolvePath("myGenericMap[map]").getIds().length).toEqual(2); expect(rootNode.resolvePath("myGenericMap[map][a].x").getValue()).toEqual(42); expect(rootNode.resolvePath("myGenericMap[map][a].y").getValue()).toEqual(43); expect(rootNode.resolvePath("myGenericMap[map][b].x").getValue()).toEqual(44); expect(rootNode.resolvePath("myGenericMap[map][b].y").getValue()).toEqual(45); // Assign a set state.myGenericMap.set("set", state.constantCollections.bookSet.getProperty().clone()); const setEntries = rootNode.resolvePath("myGenericMap[set]").getAsArray(); expect(setEntries.length).toEqual(2); expect(setEntries[0].get("book").getValue()).toEqual("The Hobbit"); expect(setEntries[0].get("author").getValue()).toEqual("Tolkien"); expect(setEntries[1].get("book").getValue()).toEqual("Faust"); expect(setEntries[1].get("author").getValue()).toEqual("Goethe"); }); }); }); describe("Set", function() { afterEach(function() { rootNode.resolvePath("myTestProperty.myBookSet").clear(); rootNode.resolvePath("myTestProperty.myBookSet").insert( PropertyFactory.create(bookDataTemplate.typeid, "single", { book: "Principia Mathematica", author: "Newton" })); rootNode.resolvePath("myTestProperty.myBookSet").insert( PropertyFactory.create(bookDataTemplate.typeid, "single", { book: "Chamber of Secrets", author: "Rowling" })); rootNode.resolvePath("myTestProperty.myBookSet").insert( PropertyFactory.create(bookDataTemplate.typeid, "single", { book: "Brief History of Time", author: "Hawking" })); }); it("should return the size of the map using .size property", function() { const testSet = state.myTestProperty.myBookSet; expect(testSet.size).toEqual(3); }); it("should be able to access the set using a for-of loop", function() { const testSet = state.myTestProperty.myBookSet; const tempArray: any[] = []; for (const value of testSet) { tempArray.push(value); } expect(tempArray[0].book === "Principia Mathematica"); expect(tempArray[0].author === "Newton"); expect(tempArray[1].book === "Chamber of Secrets"); expect(tempArray[1].author === "Rowling"); expect(tempArray[2].book === "Brief History of Time"); expect(tempArray[2].author === "Hawking"); }); it("should return true on `instanceOf` checks", function() { expect((state.myTestProperty.myBookSet instanceof Set)).toEqual(true); }); it("check .add() functionality", function() { // Add object state.myTestProperty.myBookSet.add({ author: "Tolkien", book: "The Hobbit" }); // Add property state.myTestProperty.myBookSet.add(PropertyFactory.create(bookDataTemplate.typeid, "single", { author: "Goethe", book: "Faust" })); const bookSet = rootNode.get("myTestProperty").get("myBookSet").getAsArray(); expect(bookSet[bookSet.length - 2].get("author").getValue()).toEqual("Tolkien"); expect(bookSet[bookSet.length - 2].get("book").getValue()).toEqual("The Hobbit"); expect(bookSet[bookSet.length - 1].get("author").getValue()).toEqual("Goethe"); expect(bookSet[bookSet.length - 1].get("book").getValue()).toEqual("Faust"); }); it("check .clear() functionality", function() { expect(rootNode.resolvePath("myTestProperty.myBookSet").getIds().length).toEqual(3); state.myTestProperty.myBookSet.clear(); expect(rootNode.resolvePath("myTestProperty.myBookSet").getIds().length).toEqual(0); }); it("check .delete() functionality", function() { const myProperty = PropertyFactory.create(bookDataTemplate.typeid, "single", { author: "Sagan", book: "Contact" }); state.myTestProperty.myBookSet.add(myProperty); expect(rootNode.get("myTestProperty").get("myBookSet").has(myProperty.getId())).toEqual(true); expect(state.myTestProperty.myBookSet.has(myProperty)).toEqual(true); // Should be able to delete it expect(state.myTestProperty.myBookSet.delete(myProperty)).toEqual(true); // Should no longer be able to delete it expect(state.myTestProperty.myBookSet.delete(myProperty)).toEqual(false); expect(rootNode.get("myTestProperty").get("myBookSet").has(myProperty.getId())).toEqual(false); expect(state.myTestProperty.myBookSet.has(myProperty)).toEqual(false); }); it("check .entries() functionality", function() { const entriesIterator = state.myTestProperty.myBookSet.entries(); expect(entriesIterator.next().value[0].author).toEqual("Newton"); expect(entriesIterator.next().value[0].author).toEqual("Rowling"); expect(entriesIterator.next().value[0].author).toEqual("Hawking"); expect(entriesIterator.next().done).toEqual(true); }); it("check .forEach() functionality", function() { const testSet = state.myTestProperty.myBookSet; const tempArray: any[] = []; const testCallback = function(entry) { tempArray.push(entry.book); tempArray.push(entry.author); }; testSet.forEach(testCallback); expect(tempArray[0]).toEqual("Principia Mathematica"); expect(tempArray[1]).toEqual("Newton"); expect(tempArray[2]).toEqual("Chamber of Secrets"); expect(tempArray[3]).toEqual("Rowling"); expect(tempArray[4]).toEqual("Brief History of Time"); expect(tempArray[5]).toEqual("Hawking"); }); it("check .toString() functionality", function() { const testSet = state.myTestProperty.myBookSet; const setWithJsOutFit = new Set(); expect(testSet.toString()).toEqual(setWithJsOutFit.toString()); }); it("check .values()/iterator functionality", function() { const testSet = state.myTestProperty.myBookSet; const tempArray: any[] = []; const iterator = testSet.values(); for (let i = 0; i < testSet.size; i++) { tempArray.push(iterator.next().value); expect(typeof (tempArray[i].guid)).toEqual("string"); } expect(tempArray[0].book).toEqual("Principia Mathematica"); expect(tempArray[0].author).toEqual("Newton"); expect(tempArray[1].book).toEqual("Chamber of Secrets"); expect(tempArray[1].author).toEqual("Rowling"); expect(tempArray[2].book).toEqual("Brief History of Time"); expect(tempArray[2].author).toEqual("Hawking"); }); it("should reflect remote changes", function() { const myBookSet = state.myTestProperty.myBookSet; expect(myBookSet.size).toEqual(3); rootNode.resolvePath("myTestProperty.myBookSet").insert( PropertyFactory .create(bookDataTemplate.typeid, "single", { author: "Tolkien", book: "The Hobbit" })); expect(myBookSet.size).toEqual(4); }); it("should be possible to assign a new iterable", function() { const checkAssignment = () => { let foundHobbit = false; let foundFaust = false; rootNode.resolvePath("myTestProperty.myBookSet").getAsArray().forEach(function(value) { if (value.get("book").getValue() === "The Hobbit") { foundHobbit = true; expect(value.get("author").getValue()).toEqual("Tolkien"); } else if (value.get("book").getValue() === "Faust") { foundFaust = true; expect(value.get("author").getValue()).toEqual("Goethe"); } }); expect(foundHobbit).toEqual(true); expect(foundFaust).toEqual(true); expect(rootNode.resolvePath("myTestProperty.myBookSet").getIds().length).toEqual(2); rootNode.resolvePath("myTestProperty.myBookSet").clear(); expect(rootNode.resolvePath("myTestProperty.myBookSet").getIds().length).toEqual(0); }; // Assign pure javascript iterables const books = [{ author: "Tolkien", book: "The Hobbit" }, { author: "Goethe", book: "Faust" }]; state.myTestProperty.myBookSet = new Set(books); checkAssignment(); state.myTestProperty.myBookSet = books; checkAssignment(); // Assign iterables of properties const booksAsProperties = () => [ PropertyFactory.create(bookDataTemplate.typeid, "single", books[0]), PropertyFactory.create(bookDataTemplate.typeid, "single", books[1]), ]; state.myTestProperty.myBookSet = new Set(booksAsProperties()); checkAssignment(); state.myTestProperty.myBookSet = booksAsProperties(); checkAssignment(); // Assign iterables of properties in the property tree should throw expect(() => { state.myTestProperty.myBookSet = new Set([rootNode.get("myBook")]); }).toThrow(); // Assigning a non-iterable should throw expect(() => { state.myTestProperty.myBookSet = booksAsProperties()[0]; }).toThrow( "PropertyProxy-003"); }); }); }); });
the_stack
import { AppState } from "./AppState"; import { Constants as C } from "./Constants"; import { PropsIntf } from "./intf/PropsIntf"; import * as J from "./JavaIntf"; import { PubSub } from "./PubSub"; import { Singletons } from "./Singletons"; import { PropTable } from "./widget/PropTable"; import { PropTableCell } from "./widget/PropTableCell"; import { PropTableRow } from "./widget/PropTableRow"; let S: Singletons; PubSub.sub(C.PUBSUB_SingletonsReady, (s: Singletons) => { S = s; }); export class Props implements PropsIntf { readOnlyPropertyList: Set<string> = new Set<string>(); allBinaryProps: Set<string> = new Set<string>(); // allProps: Map<string, J.NodeProp> = new Map<string, J.NodeProp>(); /* Holds the list of properties that are edited using something like a checkbox, or dropdown menu, or whatever, such that it would never make sense to display an edit field for editing their value in the editor */ controlBasedPropertyList: Set<string> = new Set<string>(); orderProps = (propOrder: string[], _props: J.PropertyInfo[]): J.PropertyInfo[] => { const propsNew: J.PropertyInfo[] = S.util.arrayClone(_props); let targetIdx: number = 0; for (const prop of propOrder) { targetIdx = this.moveNodePosition(propsNew, targetIdx, prop); } return propsNew; } /* copies all the binary properties from source node to destination node */ transferBinaryProps = (srcNode: J.NodeInfo, dstNode: J.NodeInfo): void => { if (!srcNode.properties) return; dstNode.properties = dstNode.properties || []; this.allBinaryProps.forEach(k => { let propVal = this.getNodePropVal(k, srcNode); if (propVal) { this.setNodePropVal(k, dstNode, propVal); } else { this.deleteProp(dstNode, k); } }); } moveNodePosition = (props: J.PropertyInfo[], idx: number, typeName: string): number => { const tagIdx: number = S.util.arrayIndexOfItemByProp(props, "name", typeName); if (tagIdx !== -1) { S.util.arrayMoveItem(props, tagIdx, idx++); } return idx; } /* * Toggles display of properties in the gui. */ propsToggle = async (state: AppState): Promise<void> => { state.showProperties = !state.showProperties; } deleteProp = (node: J.NodeInfo, propertyName: string): void => { if (node.properties) { for (let i = 0; i < node.properties.length; i++) { if (propertyName === node.properties[i].name) { // splice is how to delete array elements in js. node.properties.splice(i, 1); break; } } } } /* Moves all the properties listed in propList array to the end of the list of properties and keeps them in the order specified */ private movePropsToTop = (propsList: string[], props: J.PropertyInfo[]) => { for (const prop of propsList) { const tagIdx = S.util.arrayIndexOfItemByProp(props, "name", prop); if (tagIdx !== -1) { S.util.arrayMoveItem(props, tagIdx, 0); } } } /* Moves all the properties listed in propList array to the end of the list of properties and keeps them in the order specified */ private movePropsToEnd = (propsList: string[], props: J.PropertyInfo[]) => { for (const prop of propsList) { const tagIdx = S.util.arrayIndexOfItemByProp(props, "name", prop); if (tagIdx !== -1) { S.util.arrayMoveItem(props, tagIdx, props.length); } } } /* * properties will be null or a list of PropertyInfo objects. */ renderProperties = (properties: J.PropertyInfo[]): PropTable => { if (properties) { const propTable = new PropTable({ border: "1", className: "property-table" // "sourceClass" : "[propsTable]" }); properties.forEach(function (property: J.PropertyInfo) { // console.log("Render Prop: "+property.name); const propNameCell = new PropTableCell(property.name, { className: "prop-table-name-col" }); const valCellAttrs = { className: "prop-table-val-col" }; const propValCell: PropTableCell = new PropTableCell(property.value, valCellAttrs); const propTableRow = new PropTableRow({ className: "prop-table-row" }, [propNameCell, propValCell]); propTable.addChild(propTableRow); }); return propTable; } else { return null; } } /* * brute force searches on node (NodeInfo.java) object properties list, and returns the first property * (PropertyInfo.java) with name matching propertyName, else null. */ getNodeProp = (propName: string, node: J.NodeInfo): J.PropertyInfo => { if (!node || !node.properties) { return null; } return node.properties.find(p => p.name === propName); } getClientProp = (propName: string, node: J.NodeInfo): J.PropertyInfo => { if (!node || !node.clientProps) { return null; } return node.clientProps.find(p => p.name === propName); } /* Gets the crypto key from this node that will allow user to decrypt the node. If the user is the owner of the node this simply returns the ENC_KEY property but if not we look up in the ACL on the node a copy of the encrypted key that goes with the current user (us, logged in user), which should decrypt using our private key. */ getCryptoKey = (node: J.NodeInfo, state: AppState) => { if (!node) return null; let cipherKey = null; /* if we own this node then this cipherKey for it will be ENC_KEY for us */ if (state.userName === node.owner) { cipherKey = this.getNodePropVal(J.NodeProp.ENC_KEY, node); // console.log("getting cipherKey for node, from ENC_KEY: " + cipherKey); } /* else if the server has provided the cipher key to us from the ACL (AccessControl) then use it. */ else { cipherKey = node.cipherKey; // console.log("getting cipherKey from node.cipherKey (not your node): " + cipherKey); } return cipherKey; } isShared = (node: J.NodeInfo): boolean => { return !!node.ac; } isPublic = (node: J.NodeInfo): boolean => { return node && node.ac && !!node.ac.find(ace => ace.principalNodeId === "public"); } isPublicWritable = (node: J.NodeInfo): boolean => { return node && node.ac && !!node.ac.find(ace => ace.principalNodeId === "public" && this.hasPrivilege(ace, J.PrivilegeType.WRITE)); } isPublicReadOnly = (node: J.NodeInfo): boolean => { return node && node.ac && !!node.ac.find(ace => ace.principalNodeId === "public" && !this.hasPrivilege(ace, J.PrivilegeType.WRITE)); } getAcCount = (node: J.NodeInfo): number => { return node && node.ac ? node.ac.length : 0; } hasPrivilege = (ace: J.AccessControlInfo, priv: string): boolean => { if (!ace.privileges) return false; return !!ace.privileges.find(p => p.privilegeName.indexOf(priv) !== -1); } isMine = (node: J.NodeInfo, state: AppState): boolean => { if (!node || !state.userName || state.userName === J.PrincipalName.ANON) return false; return state.userName === node.owner; } isEncrypted = (node: J.NodeInfo): boolean => { return !!this.getNodePropVal(J.NodeProp.ENC_KEY, node); } hasBinary = (node: J.NodeInfo): boolean => { return !!this.getNodePropVal(J.NodeProp.BIN, node) || !!this.getNodePropVal(J.NodeProp.BIN_URL, node) || !!this.getNodePropVal(J.NodeProp.IPFS_LINK, node); } hasImage = (node: J.NodeInfo): boolean => { const target = this.getNodePropVal(J.NodeProp.BIN_MIME, node); return (target && target.startsWith("image/")); } hasAudio = (node: J.NodeInfo): boolean => { const target = this.getNodePropVal(J.NodeProp.BIN_MIME, node); return (target && target.startsWith("audio/")); } hasVideo = (node: J.NodeInfo): boolean => { const target = this.getNodePropVal(J.NodeProp.BIN_MIME, node); return (target && target.startsWith("video/")); } getNodePropVal = (propertyName: string, node: J.NodeInfo): string => { const prop: J.PropertyInfo = this.getNodeProp(propertyName, node); return prop ? prop.value : null; } getClientPropVal = (propertyName: string, node: J.NodeInfo): string => { const prop: J.PropertyInfo = this.getClientProp(propertyName, node); return prop ? prop.value : null; } setNodePropVal = (propertyName: string, node: J.NodeInfo, val: any): void => { let prop: J.PropertyInfo = this.getNodeProp(propertyName, node); /* If we found a property by propertyName, then set it's value */ if (prop) { prop.value = val; } /* Else this is a new property we must add (ret remains true here) */ else { prop = { name: propertyName, value: val }; if (!node.properties) { node.properties = []; } node.properties.push(prop); } } setNodeProp = (node: J.NodeInfo, newProp: J.PropertyInfo): void => { if (!newProp) return; const prop: J.PropertyInfo = this.getNodeProp(newProp.name, node); /* If we found a property by propertyName, then set it's value */ if (prop) { prop.value = newProp.value; } /* Else this is a new property we must add (ret remains true here) */ else { if (!node.properties) { node.properties = []; } node.properties.push(newProp); } } // here's the simple mode property hider! initConstants = () => { S.util.addAllToSet(this.allBinaryProps, [ // J.NodeProp.IMG_WIDTH, // J.NodeProp.IMG_HEIGHT, // J.NodeProp.IMG_SIZE, // J.NodeProp.BIN_MIME, // J.NodeProp.BIN, // J.NodeProp.BIN_URL, // J.NodeProp.BIN_FILENAME, // J.NodeProp.BIN_SIZE, // J.NodeProp.BIN_DATA_URL, J.NodeProp.IPFS_LINK, // J.NodeProp.IPFS_LINK_NAME, // J.NodeProp.IPFS_OK // ]); S.util.addAllToSet(this.readOnlyPropertyList, [ // J.NodeProp.IMG_WIDTH, // J.NodeProp.IMG_HEIGHT, // J.NodeProp.BIN, // J.NodeProp.BIN_MIME, // J.NodeProp.BIN_SIZE, // J.NodeProp.BIN_FILENAME, // J.NodeProp.JSON_HASH, // J.NodeProp.IPFS_LINK, // J.NodeProp.ENC_KEY, // J.NodeProp.TYPE_LOCK ]); S.util.addAllToSet(this.controlBasedPropertyList, [ // J.NodeProp.INLINE_CHILDREN, // J.NodeProp.NOWRAP, // J.NodeProp.SAVE_TO_IPFS, // J.NodeProp.LAYOUT, // J.NodeProp.PRIORITY, // J.NodeProp.IMG_SIZE, J.NodeProp.CHILDREN_IMG_SIZES ]); } /* This is kind of a hard-coded hack for the one particular type name where we are using it, but needs to work for all properties */ getInputClassForType = (typeName: string): string => { if (typeName === "duration") { return "durationTypeInput"; } return null; } getParentPath = (node: J.NodeInfo): string => { let slashIdx: number = node.path.lastIndexOf("/"); if (slashIdx === -1) return null; return node.path.substring(0, slashIdx); } }
the_stack
import { ILayoutRestorer, JupyterFrontEnd, JupyterFrontEndPlugin, } from "@jupyterlab/application"; import { Dialog, ICommandPalette, showDialog, } from "@jupyterlab/apputils"; import { PageConfig, } from "@jupyterlab/coreutils"; import { IDocumentManager, } from "@jupyterlab/docmanager"; import { Menu, } from "@lumino/widgets"; import { IFileBrowserFactory, } from "@jupyterlab/filebrowser"; import { ILauncher, } from "@jupyterlab/launcher"; import { IMainMenu, } from "@jupyterlab/mainmenu"; import { Widget, } from "@lumino/widgets"; import { IRequestResult, request, } from "requests-helper"; import "../style/index.css"; // tslint:disable: variable-name const extension: JupyterFrontEndPlugin<void> = { activate, autoStart: true, id: "jupyterlab_email", optional: [ILauncher], requires: [IDocumentManager, ICommandPalette, ILayoutRestorer, IMainMenu, IFileBrowserFactory], }; export class SendEmailWidget extends Widget { public constructor(accounts: string[] = [], hide_code = false, account_name = "", templates: string[] = [], signatures: string[] = [], headers: string[] = [], footers: string[] = [], user_templates: string[] = [], postprocessors: string[] = [], ) { const body = document.createElement("div"); body.style.display = "flex"; body.style.flexDirection = "column"; body.classList.add("jupyterlab_email_form"); const basic = document.createElement("div"); basic.style.flex = "1"; body.appendChild(basic); basic.appendChild(Private.buildLabel("Type:")); basic.appendChild(Private.buildSelect(["Email", "HTML Attachment", "PDF Attachment"], "type", "Email")); basic.appendChild(Private.buildLabel("Code or no Code:")); basic.appendChild(Private.buildSelect(["Code", "No code"], "code", "No code")); basic.appendChild(Private.buildLabel("Send email to:")); basic.appendChild(Private.buildTextarea("list, of, emails, default is to self")); basic.appendChild(Private.buildLabel("Email Subject:")); basic.appendChild(Private.buildTextarea("Subject of email")); /* Advanced options */ const advanced = document.createElement("div"); advanced.style.flex = "1"; const expand_div = document.createElement("div"); expand_div.style.display = "flex"; expand_div.style.flexDirection = "row"; const advanced_label = document.createElement("label"); advanced_label.textContent = "Advanced"; expand_div.appendChild(advanced_label); const advanced_button_open = document.createElement("button"); const advanced_span_open = document.createElement("span"); const advanced_button_close = document.createElement("button"); const advanced_span_close = document.createElement("span"); advanced_button_open.classList.add("jp-ToolbarButtonComponent"); advanced_button_close.classList.add("jp-ToolbarButtonComponent"); advanced_button_open.appendChild(advanced_span_open); advanced_button_close.appendChild(advanced_span_close); advanced_span_open.classList.add("jupyterlab_email_open"); advanced_span_close.classList.add("jupyterlab_email_close"); expand_div.appendChild(advanced_button_open); expand_div.appendChild(advanced_button_close); body.appendChild(expand_div); body.appendChild(advanced); advanced.style.display = "none"; advanced_button_open.style.display = "block"; advanced_button_close.style.display = "none"; advanced_button_open.onclick = () => { advanced.style.display = "block"; advanced_button_open.style.display = "none"; advanced_button_close.style.display = "block"; }; advanced_button_close.onclick = () => { advanced.style.display = "none"; advanced_button_open.style.display = "block"; advanced_button_close.style.display = "none"; }; if (accounts.length > 0) { advanced.appendChild(Private.buildLabel("Account:")); advanced.appendChild(Private.buildSelect(accounts, "accounts", account_name)); } advanced.appendChild(Private.buildLabel("Also attach as:")); advanced.appendChild(Private.buildSelect(["None", "PDF", "HTML", "Both"], "attach", "None")); if (templates.length > 0) { advanced.appendChild(Private.buildLabel("Template:")); advanced.appendChild(Private.buildSelect(templates, "templates")); } if (user_templates.length > 0) { advanced.appendChild(Private.buildLabel("User Templates (Overrides 'builtin' template choice):")); advanced.appendChild(Private.buildSelect(user_templates, "user_template")); } if (signatures.length > 0) { advanced.appendChild(Private.buildLabel("Signature:")); advanced.appendChild(Private.buildSelect(signatures, "signatures")); } if (headers.length > 0) { advanced.appendChild(Private.buildLabel("Header:")); advanced.appendChild(Private.buildSelect(headers, "headers")); } if (footers.length > 0) { advanced.appendChild(Private.buildLabel("Footer:")); advanced.appendChild(Private.buildSelect(footers, "footers")); } if (postprocessors.length > 0) { advanced.appendChild(Private.buildLabel("Post Processors:")); advanced.appendChild(Private.buildSelect(postprocessors, "postprocessor")); } super({ node: body }); } public getCode(): string { return this.codeNode.value; } public getEmail(): string { return this.emailNode.value; } public getType(): string { return this.typeNode.value; } public getTo(): string { return this.toNode.value; } public getSubject(): string { return this.subjectNode.value; } public getAlsoAttach(): string { return this.alsoAttachNode.value; } public getTemplate(): string { return this.templateNode ? this.templateNode.value : ""; } public getSignature(): string { return this.signatureNode ? this.signatureNode.value : ""; } public getHeader(): string { return this.headerNode ? this.headerNode.value : ""; } public getFooter(): string { return this.footerNode ? this.footerNode.value : ""; } public getUserTemplate(): string { return this.userTemplateNode ? this.userTemplateNode.value : ""; } public getPostprocessor(): string { return this.userTemplateNode ? this.userTemplateNode.value : ""; } public get typeNode(): HTMLSelectElement { return this.node.getElementsByTagName("select")[0]; } public get codeNode(): HTMLSelectElement { return this.node.getElementsByTagName("select")[1]; } public get emailNode(): HTMLSelectElement { return this.node.getElementsByTagName("select")[2]; } public get toNode(): HTMLTextAreaElement { return this.node.getElementsByTagName("textarea")[0]; } public get subjectNode(): HTMLTextAreaElement { return this.node.getElementsByTagName("textarea")[1]; } public get alsoAttachNode(): HTMLSelectElement { return this.node.getElementsByTagName("select")[3]; } public get templateNode(): HTMLSelectElement { return this.node.querySelector("select.templates"); } public get signatureNode(): HTMLSelectElement { return this.node.querySelector("select.signatures"); } public get headerNode(): HTMLSelectElement { return this.node.querySelector("select.headers"); } public get footerNode(): HTMLSelectElement { return this.node.querySelector("select.footers"); } public get userTemplateNode(): HTMLSelectElement { return this.node.querySelector("select.user_template"); } public get postprocessorNode(): HTMLSelectElement { return this.node.querySelector("select.postprocessor"); } } function activate(app: JupyterFrontEnd, docManager: IDocumentManager, palette: ICommandPalette, restorer: ILayoutRestorer, mainMenu: IMainMenu, browser: IFileBrowserFactory, launcher: ILauncher | null) { const commands = app.commands; const all_emails1: string[] = []; const all_accounts: string[] = []; const all_templates: string[] = []; const all_user_templates: string[] = []; const all_signatures: string[] = []; const all_headers: string[] = []; const all_footers: string[] = []; let loaded = false; // grab templates from serverextension request("get", PageConfig.getBaseUrl() + "email/get").then((res: IRequestResult) => { if (res.ok) { const info: any = res.json(); for (const template of info.templates) { all_templates.push(template); } for (const template of info.user_templates) { all_user_templates.push(template); } for (const signature of info.signatures) { all_signatures.push(signature); } for (const header of info.headers) { all_headers.push(header); } for (const footer of info.footers) { all_footers.push(footer); } for (const email of info.emails) { const command1 = "send-email:" + email; all_accounts.push(email); all_emails1.push(command1); const send_widget = new SendEmailWidget(all_accounts, false, email, all_templates, all_signatures, all_headers, all_footers, all_user_templates); app.commands.addCommand(command1, { execute: () => { showDialog({ body: send_widget, buttons: [Dialog.cancelButton(), Dialog.okButton({ label: "Ok" })], title: "Send email:", }).then((result) => { if (result.button.label === "Cancel") { return; } const folder = browser.defaultBrowser.model.path || ""; const context = docManager.contextForWidget(app.shell.currentWidget); const type = send_widget.getType(); // eslint-disable-next-line no-shadow const email = send_widget.getEmail(); const code = send_widget.getCode(); const to = send_widget.getTo(); const subject = send_widget.getSubject(); const also_attach = send_widget.getAlsoAttach(); const template = send_widget.getTemplate(); const user_template = send_widget.getUserTemplate(); const signature = send_widget.getSignature(); const header = send_widget.getHeader(); const footer = send_widget.getFooter(); const postprocessor = send_widget.getPostprocessor(); let path = ""; let model = {}; if (context) { path = context.path; model = context.model.toJSON(); } return new Promise((resolve) => { request("post", PageConfig.getBaseUrl() + "email/run", {}, {also_attach, code, email, folder, footer, header, model, path, postprocessor, signature, subject, template, to, type, user_template, }, {timeout: 30000}).then( // eslint-disable-next-line no-shadow (res: IRequestResult) => { if (res.ok) { showDialog({ buttons: [Dialog.okButton({ label: "Ok" })], title: "Mail sent!", }).then(() => { resolve(); }); } else { showDialog({ body: "Check the Jupyter logs for the exception.", buttons: [Dialog.okButton({ label: "Ok" })], title: "Something went wrong!", }).then(() => { resolve(); }); } }); }); }); }, isEnabled: () => { if (app.shell.currentWidget && docManager.contextForWidget(app.shell.currentWidget) && docManager.contextForWidget(app.shell.currentWidget).model) { return true; } return false; }, label: command1, }); palette.addItem({command: command1, category: "Email"}); const menu = new Menu({ commands }); menu.title.label = "Send Emails"; app.restored.then(() => { all_emails1.forEach((command) => { menu.addItem({command, args: {}}); }); if (mainMenu && !loaded) { loaded = true; mainMenu.fileMenu.addGroup([{ type: "submenu", submenu: menu }], 11); } }); } } }); // eslint-disable-next-line no-console console.log("JupyterLab extension jupyterlab_email is activated!"); } export default extension; export {activate as _activate}; // eslint-disable-next-line @typescript-eslint/no-namespace namespace Private { const default_none = document.createElement("option"); default_none.selected = false; default_none.disabled = true; default_none.hidden = false; default_none.style.display = "none"; default_none.value = ""; export function buildLabel(text: string): HTMLLabelElement { const label = document.createElement("label"); label.textContent = text; return label; } export function buildTextarea(text: string): HTMLTextAreaElement { const area = document.createElement("textarea"); area.placeholder = text; area.style.marginBottom = "15px"; return area; } export function buildSelect(list: string[], _class = "", def?: string): HTMLSelectElement { const select = document.createElement("select"); select.classList.add(_class); select.appendChild(default_none); for (const x of list) { const option = document.createElement("option"); option.value = x; option.textContent = x; select.appendChild(option); if (def && x === def) { option.selected = true; } } select.style.marginBottom = "15px"; select.style.minHeight = "25px"; return select; } }
the_stack
import {HtmlString, IHtmlElement, removeAllChildren} from "../ui/ui"; import {ContextMenu, SubMenu, TopMenu} from "../ui/menu"; import {FindBar} from "../ui/findBar"; import {BaseReceiver, BigTableView} from "../modules"; import {FullPage, PageTitle} from "../ui/fullPage"; import { assert, Converters, formatNumber, ICancellable, makeSpan, PartialResult, px, last, fractionToPercent, argmin, createComparisonFilter, cloneArray, } from "../util"; import { BucketsInfo, Comparison, FindResult, GenericLogs, Groups, IColumnDescription, NextKList, RecordOrder, RemoteObjectId, RowValue } from "../javaBridge"; import {OnCompleteReceiver, Receiver} from "../rpc"; import {TableMeta} from "../ui/receiver"; import {DataRangesReceiver} from "./dataRangesReceiver"; import {TimestampPlot} from "../ui/timestampPlot"; import {interpolateBlues} from "d3-scale-chromatic"; class FilteredDataset { constructor(public readonly remoteObjectId: RemoteObjectId, public readonly meta: TableMeta, public readonly title: string, // Value of nextKRows before filter was applied public readonly previousNextK: NextKList) {} } export class LogFileView extends BigTableView implements IHtmlElement { protected readonly topLevel: HTMLElement; protected readonly findBar: FindBar; public nextKList: NextKList; protected visibleColumns: Set<string>; protected color: Map<string, string>; // one per column public static readonly requestSize = 1000; // number of lines brought in one request protected contents: HTMLDivElement; protected wrap: boolean = false; protected bars: HTMLDivElement[]; protected plots: TimestampPlot[]; public readonly heatmapWidth: number = 15; protected tsIndex: number; // index of the timestamp column in data private maxTs: number; // maximum timestamp private minTs: number; // minimum timestamp private readonly box: HTMLDivElement; // box showing the visible data as an outline private readonly linePointer: HTMLDivElement; public static readonly increment = 250; // how many more rows to bring protected readonly barHolder: HTMLDivElement; protected contextMenu: ContextMenu; protected readonly filterHolder: HTMLDivElement; protected filters: FilteredDataset[]; constructor(remoteObjectId: RemoteObjectId, meta: TableMeta, protected readonly order: RecordOrder, public readonly timestampColumn: IColumnDescription, page: FullPage) { super(remoteObjectId, meta, page, "LogFile"); this.visibleColumns = new Set<string>(); this.color = new Map<string, string>(); this.topLevel.className = "logFileViewer"; this.bars = []; this.tsIndex = 0; // always first in sort order, and thus in NextKList this.filters = []; const header = document.createElement("header"); header.style.flex = "none"; this.topLevel.appendChild(header); const titleBar = document.createElement("div"); titleBar.className = "logFileTitle"; const wrap = document.createElement("div"); header.appendChild(wrap); wrap.appendChild(titleBar); this.findBar = new FindBar((n, f) => this.onFind(n, f), null); header.appendChild(this.findBar.getHTMLRepresentation()); this.findBar.show(false); this.filterHolder = document.createElement("div"); header.appendChild(this.filterHolder); const schemaDisplay = document.createElement("div"); header.appendChild(schemaDisplay); this.displaySchema(schemaDisplay); this.contextMenu = new ContextMenu(this.topLevel); const menu = new TopMenu([{ text: "View", help: "Control the view", subMenu: new SubMenu([{ text: "Wrap", help: "Change text wrapping", action: () => this.toggleWrap() }, { text: "Refresh", help: "Refresh current view", action: () => this.refresh() }]) }/*, { text: "Find", help: "Search specific values", subMenu: new SubMenu([{ text: "Find...", help: "Search for a string", action: () => this.showFindBar(true) }]) } */]); this.page.setMenu(menu); const middle = this.makeToplevelDiv("logFileContents"); middle.className = "logFileContents"; const div = document.createElement("div"); div.style.flex = "1"; div.style.minWidth = "100px"; middle.appendChild(div); this.contents = document.createElement("div"); div.appendChild(this.contents); this.contents.style.whiteSpace = "nowrap"; this.contents.className = "logFileData"; this.barHolder = document.createElement("div"); this.barHolder.style.display = "flex"; this.barHolder.style.flexDirection = "row"; this.barHolder.style.flexWrap = "nowrap"; this.barHolder.style.position = "relative"; middle.appendChild(this.barHolder); const barCount = 1; for (let i = 0; i < barCount; i++) { const bar = document.createElement("div"); bar.className = "logHeatmap"; bar.style.width = px(this.heatmapWidth); this.bars.push(bar); this.barHolder.appendChild(bar); } this.box = document.createElement("div"); this.box.style.position = "absolute"; this.box.style.left = px(0); this.box.style.right = px(0); this.box.style.border = "2px solid black"; this.linePointer = document.createElement("div"); this.linePointer.style.position = "absolute"; this.linePointer.style.left = px(0); this.linePointer.style.right = px(0); this.linePointer.style.border = "1px solid rgba(255, 255, 0, .5)"; this.linePointer.style.background = "rgba(255, 255, 0, .5)"; this.linePointer.style.height = px(0); this.barHolder.appendChild(this.box); this.barHolder.appendChild(this.linePointer); this.barHolder.onclick = (e) => this.scrollTimestamp(e); this.barHolder.onmousemove = (e) => this.onMouseMove(e); const footer = document.createElement("footer"); footer.className = "logFileFooter"; this.topLevel.appendChild(footer); const summary = this.createDiv("summary"); footer.appendChild(summary); } public getRemoteObjectId(): RemoteObjectId | null { if (this.filters.length > 0) return last(this.filters)!.remoteObjectId; return super.getRemoteObjectId()!; } public createSurfaces(): void { this.plots = this.bars.map( b => new TimestampPlot(b, interpolateBlues)); } public timestampPosition(ts: number): number { if (this.minTs >= this.maxTs) return 0; if (ts < this.minTs) return 0; if (ts > this.maxTs) return 1; return (ts - this.minTs) / (this.maxTs - this.minTs); } public positionToTimestamp(fraction: number): number { if (fraction <= 0) return this.minTs; if (fraction >= 1) return this.maxTs; return this.minTs + fraction * (this.maxTs - this.minTs); } public toggleWrap(): void { this.wrap = !this.wrap; if (!this.wrap) { this.contents.style.whiteSpace = "nowrap"; } else { this.contents.style.whiteSpace = "normal"; } this.resize(); } public getHeatmapHeight(): number { return this.contents.clientHeight; } protected export(): void { throw new Error("Method not implemented."); } private displaySchema(header: HTMLElement): void { const tbl = document.createElement("table"); tbl.style.tableLayout = "fixed"; tbl.style.width = "100%"; header.appendChild(tbl); const row = document.createElement("tr"); row.className = "logHeader"; tbl.appendChild(row); for (const col of this.order.sortOrientationList) { const colName = col.columnDescription.name; const cell = row.insertCell(); cell.textContent = colName; cell.style.textOverflow = "ellipsis"; cell.style.overflow = "hidden"; cell.title = colName + ": Click to color; right-click to toggle."; cell.onclick = () => this.rotateColor(colName, cell); cell.oncontextmenu = (e) => { this.check(colName, cell); e.preventDefault(); } cell.classList.add("selected"); this.color.set(colName, "black"); this.visibleColumns.add(colName); } } private static nextColor(current: string): string { switch (current) { case "black": return "red"; case "red": return "blue"; case "blue": return "green"; default: return "black"; } } public refresh(): void { for (const bar of this.bars) { removeAllChildren(bar); } let firstRow = null; if (this.nextKList != null && this.nextKList.rows.length > 0) firstRow = this.nextKList.rows[0].values; const rr = this.createNextKRequest(this.order, firstRow, this.nextKList.rows.length, null, null); rr.invoke(new LogFragmentReceiver(this.page, this, rr)); } private rotateColor(col: string, cell: HTMLElement): void { const current = this.color.get(col); const next = LogFileView.nextColor(current); this.color.set(col, next); cell.style.color = this.color.get(col); this.resize(); } private check(colName: string, cell: HTMLElement): void { if (this.visibleColumns.has(colName)) { this.visibleColumns.delete(colName); cell.classList.remove("selected"); } else { this.visibleColumns.add(colName); cell.classList.add("selected"); } this.resize(); } private onFind(next: boolean, fromTop: boolean): void { // TODO } private showFindBar(show: boolean): void { this.findBar.show(show); } public getHTMLRepresentation(): HTMLElement { return this.topLevel; } // get data before the currently visible rows. public getBefore(): void { const reverseOrder = this.order.invert(); const rr = this.createNextKRequest(reverseOrder, this.nextKList.rows[0].values ?? null, LogFileView.increment, null, null); const rec = new LogExpander(this.page, this, rr, this.nextKList, true); rr.invoke(rec); } // get data after the currently visible rows public getAfter(): void { const rr = this.createNextKRequest(this.order, last(this.nextKList.rows)?.values ?? null, LogFileView.increment, null, null); const rec = new LogExpander(this.page, this, rr, this.nextKList, false); rr.invoke(rec); } protected showLine(timestamp: number): void { const fraction = this.timestampPosition(timestamp); this.linePointer.style.top = fractionToPercent(fraction); this.linePointer.style.bottom = fractionToPercent(1 - fraction); } public timestampToString(val: number): string { return Converters.valueToString(val, this.timestampColumn.kind, true); } public updateView(nextKList: NextKList, findResult: FindResult | null): void { this.nextKList = nextKList; removeAllChildren(this.contents); if (nextKList.startPosition > 0) { const before = document.createElement("button"); before.className = "logGap"; this.contents.appendChild(before); before.innerText = formatNumber(nextKList.startPosition) + " rows before; click to bring more"; before.onclick = () => this.getBefore(); } let rowsShown = 0; for (const row of nextKList.rows) { rowsShown += row.count; const rowSpan = document.createElement("span"); rowSpan.className = "logRow"; let index = 0; for (const value of row.values) { const col = this.order.sortOrientationList[index++]; const name = col.columnDescription.name; if (!this.visibleColumns.has(name)) continue; if (value == null) continue let shownValue = Converters.valueToString(value, col.columnDescription.kind, true); let high; if (name === GenericLogs.lineNumberColumn) { // left pad the line number shownValue = ("00000" + shownValue).slice(-5); high = makeSpan(shownValue, false); } else { high = this.findBar.highlight(shownValue, null); } high.classList.add("logCell"); high.style.color = this.color.get(name); rowSpan.appendChild(high); high.oncontextmenu = (e) => { this.contextMenu.clear(); // This menu shows the value to the right, but the filter // takes the value to the left, so we have to flip all // comparison signs. this.contextMenu.addItem({text: "Keep " + shownValue, action: () => this.filterOnValue(col.columnDescription, value, "=="), help: "Keep only the rows that have this value in this column." }, true); this.contextMenu.addItem({text: "Keep different from " + shownValue, action: () => this.filterOnValue(col.columnDescription, value, "!="), help: "Keep only the rows that have a different value in this column." }, true); this.contextMenu.showAtMouse(e); }; } rowSpan.appendChild(document.createElement("br")); this.contents.appendChild(rowSpan); rowSpan.onmouseover = () => this.showLine(row.values[this.tsIndex] as number); } if (this.hasData()) { const minVisTs = this.firstVisibleTimestamp()!; const maxVisTs = this.lastVisibleTimestamp()!; const minFraction = this.timestampPosition(minVisTs); const maxFraction = this.timestampPosition(maxVisTs); this.box.style.top = fractionToPercent(minFraction); this.box.style.bottom = fractionToPercent(1 - maxFraction); this.summary!.set("Lines visible", rowsShown); this.summary!.set("total lines", nextKList.rowsScanned); this.summary!.setString("first timestamp", new HtmlString( this.timestampToString(this.minTs))); this.summary!.setString("last timestamp", new HtmlString( this.timestampToString(this.maxTs))); this.summary!.setString("first visible timestamp", new HtmlString( this.timestampToString(minVisTs))); this.summary!.setString("Last visible timestamp", new HtmlString( this.timestampToString(maxVisTs))); } const rowsAfter = nextKList.rowsScanned - (nextKList.startPosition + rowsShown); if (rowsAfter > 0) { const after = document.createElement("button"); after.className = "logGap"; this.contents.appendChild(after); after.innerText = formatNumber(rowsAfter) + " rows after; click to bring more"; after.onclick = () => this.getAfter(); } this.summary!.display(); } protected filterOnValue(cd: IColumnDescription, value: RowValue, comparison: Comparison): void { const filter = createComparisonFilter(cd, value, comparison); if (filter == null) // Some error occurred return; const rr = this.createFilterComparisonRequest(filter); rr.invoke(new LogFilteredReceiver(this.page, rr, this, Converters.comparisonFilterDescription(filter))); } protected firstVisibleTimestamp(): number | null { if (!this.hasData()) return null; return this.nextKList.rows[0].values[this.tsIndex] as number; } protected lastVisibleTimestamp(): number | null { if (!this.hasData()) return null; return last(this.nextKList.rows)!.values[this.tsIndex] as number; } protected getCombineRenderer(title: PageTitle): (page: FullPage, operation: ICancellable<RemoteObjectId>) => BaseReceiver { assert(false); } public resize(): void { this.updateView(this.nextKList, null); } public updateLineDensity(value: Groups<number>): void { this.plots[0].setHistogram(value, this.minTs, this.maxTs, this.timestampColumn.kind); this.plots[0].draw(); } setTimestampRange(min: number, max: number): void { this.minTs = min; this.maxTs = max; } protected hasData(): boolean { return this.nextKList != null && this.nextKList.rows.length > 0; } private scrollTimestamp(e: MouseEvent) { if (!this.hasData()) return; const y = e.offsetY; const fraction = y / this.barHolder.clientHeight; const ts = this.positionToTimestamp(fraction); // Check if we already have this part; then scroll to it, otherwise bring a new log const minFraction = this.timestampPosition(this.firstVisibleTimestamp()!); const maxFraction = this.timestampPosition(this.lastVisibleTimestamp()!); if (fraction >= minFraction && fraction <= maxFraction) { // already available // find the closest timestamp const closestIndex = argmin(this.nextKList.rows, r => Math.abs(r.values[this.tsIndex] as number - ts)); if (closestIndex >= 0) this.contents.children[closestIndex].scrollIntoView(); return; } // download it const firstRow = []; const colsMinValue = []; for (const c of this.order.sortOrientationList) { if (c.columnDescription.name == this.timestampColumn.name) { firstRow.push(ts); } else { firstRow.push(null); colsMinValue.push(c.columnDescription.name); } } const rr = this.createNextKRequest(this.order, firstRow, LogFileView.increment, null, colsMinValue); rr.invoke(new LogChooser(this.page, this, rr)); } private onMouseMove(e: MouseEvent): void { if (!this.hasData()) return; const y = e.offsetY; const fraction = y / this.barHolder.clientHeight; const ts = this.positionToTimestamp(fraction); // this.summary!.setString("Pointing at", new HtmlString(this.timestampToString(ts))); // this.summary!.display(); } public addFilteredView(fd: FilteredDataset): void { this.filters.push(fd); const filterRow = document.createElement("div"); filterRow.className = "logFilterRow"; const span = makeSpan("Filter: " + fd.title); span.style.flexGrow = "100"; filterRow.appendChild(span); const close = document.createElement("span"); close.className = "close"; close.innerHTML = "&times;"; const index = this.filters.length - 1; close.onclick = () => this.removeFilters(index); close.title = "Remove this filter and the subsequent ones."; filterRow.appendChild(close); this.filterHolder.appendChild(filterRow); } protected removeFilters(startIndex: number): void { if (startIndex >= this.filters.length) return; removeAllChildren(this.filterHolder); const filters = cloneArray(this.filters); this.filters = []; for (let i = 0; i < startIndex; i++) this.addFilteredView(filters[i]); for (const bar of this.bars) { removeAllChildren(bar); } this.nextKList = filters[startIndex].previousNextK; this.refresh(); } } // This receiver is invoked after a filtering operation has been applied to the log. class LogFilteredReceiver extends OnCompleteReceiver<RemoteObjectId> { constructor(page: FullPage, operation: ICancellable<RemoteObjectId>, protected view: LogFileView, protected filterDescription: string) { super(page, operation, "Filter"); } public run(data: RemoteObjectId): void { const fd = new FilteredDataset(data, this.view.meta, this.filterDescription, this.view.nextKList); this.view.addFilteredView(fd); this.view.refresh(); } } // This receiver is invoked after the visible log has been grown backward or forward class LogExpander extends OnCompleteReceiver<NextKList> { constructor(page: FullPage, protected view: LogFileView, operation: ICancellable<NextKList>, protected previous: NextKList | null, protected reverse: boolean) { super(page, operation, "Getting log fragment"); } public run(data: NextKList): void { if (this.reverse) { data.rows.reverse(); const result: NextKList = { rowsScanned: data.rowsScanned, startPosition: data.rowsScanned - data.startPosition - data.rows.length, rows: this.previous != null ? data.rows.concat(this.previous.rows) : data.rows, aggregates: null } this.view.updateView(result, null); } else { const result: NextKList = { rowsScanned: data.rowsScanned, startPosition: this.previous != null ? this.previous.startPosition : data.startPosition, rows: this.previous != null ? this.previous.rows.concat(data.rows) : data.rows, aggregates: null } this.view.updateView(result, null); } } } class LogChooser extends LogExpander { constructor(page: FullPage, protected view: LogFileView, operation: ICancellable<NextKList>) { super(page, view, operation, null, false); } } export class LogFragmentReceiver extends OnCompleteReceiver<NextKList> { constructor(page: FullPage, protected view: LogFileView, operation: ICancellable<NextKList>) { super(page, operation, "Getting log fragment"); } public run(value: NextKList): void { this.view.createSurfaces(); if (value.rowsScanned == 0) { this.view.page.reportError("No data left"); this.view.updateView(value, null); this.view.updateLineDensity({ perBucket: [], perMissing: 0 }); return; } const rr = this.view.createDataQuantilesRequest([this.view.timestampColumn], this.page, "LogFile"); rr.chain(this.operation); const rec = new TimestampRangeReceiver(this.page, this.view, rr, value); rr.invoke(rec); } } export class TimestampRangeReceiver extends OnCompleteReceiver<BucketsInfo[]> { constructor(page: FullPage, protected view: LogFileView, operation: ICancellable<BucketsInfo[]>, protected initialData: NextKList) { super(page, operation, "Getting timestamp range"); } public run(value: BucketsInfo[]) { assert(value.length == 1); const range = value[0]; const pixels = this.view.getHeatmapHeight() / 2; // noinspection JSSuspiciousNameCombination const args = DataRangesReceiver.computeHistogramArgs( this.view.timestampColumn, range, Math.min(this.initialData.rowsScanned, Math.floor(pixels)), true, // This is sideways { height: this.view.heatmapWidth, width: pixels }); this.view.setTimestampRange(range.min!, range.max!); // must do this after we set the timestamp range this.view.updateView(this.initialData, null); const rr = this.view.createHistogramRequest({ histos: [ args ], samplingRate: 1.0, seed: 0, }); rr.chain(this.operation); const rec = new TimestampHistogramReceiver(this.page, this.view, rr); rr.invoke(rec); } } export class TimestampHistogramReceiver extends Receiver<Groups<number>> { constructor(page: FullPage, protected view: LogFileView, operation: ICancellable<Groups<number>>) { super(page, operation, "Getting time distribution"); } public onNext(value: PartialResult<Groups<number>>): void { if (value != null && value.data != null) this.view.updateLineDensity(value.data); } public onCompleted() { super.onCompleted(); this.view.updateCompleted(this.elapsedMilliseconds()); } }
the_stack
import { GeoBox, GeoCoordinates, GeoCoordinatesLike, MathUtils, OrientedBox3, Projection, ProjectionType, sphereProjection, TileKeyUtils, Vector2Like, Vector3Like } from "@here/harp-geoutils"; import { GeoCoordLike } from "@here/harp-geoutils/lib/coordinates/GeoCoordLike"; import { EarthConstants } from "@here/harp-geoutils/lib/projection/EarthConstants"; import { assert, DOMUtils, LoggerManager } from "@here/harp-utils"; import * as THREE from "three"; import { CameraUtils } from "./CameraUtils"; import { ElevationProvider } from "./ElevationProvider"; import { Object3DUtils } from "./geometry/Object3DUtils"; import { MapView } from "./MapView"; const logger = LoggerManager.instance.create("MapViewUtils"); /** * Zoom level to request terrain tiles for getting the height of the camera above terrain. */ const TERRAIN_ZOOM_LEVEL = 4; // Caching those for performance reasons. const groundNormalPlanarProj = new THREE.Vector3(0, 0, 1); const groundPlane = new THREE.Plane(groundNormalPlanarProj.clone()); const groundSphere = new THREE.Sphere(undefined, EarthConstants.EQUATORIAL_RADIUS); const rayCaster = new THREE.Raycaster(); const epsilon = 1e-5; /** * Cached ThreeJS instances for realtime maths. */ const space = { x: new THREE.Vector3(), y: new THREE.Vector3(), z: new THREE.Vector3() }; const tangentSpace = { x: new THREE.Vector3(), y: new THREE.Vector3(), z: new THREE.Vector3() }; const cache = { box3: [new THREE.Box3()], obox3: [new OrientedBox3()], quaternions: [new THREE.Quaternion(), new THREE.Quaternion()], vector2: [new THREE.Vector2(), new THREE.Vector2()], vector3: [new THREE.Vector3(), new THREE.Vector3(), new THREE.Vector3(), new THREE.Vector3()], matrix4: [new THREE.Matrix4(), new THREE.Matrix4()], transforms: [ { xAxis: new THREE.Vector3(), yAxis: new THREE.Vector3(), zAxis: new THREE.Vector3(), position: new THREE.Vector3() } ] }; const tmpCamera = new THREE.PerspectiveCamera(); /** * Rounds a given zoom level up to the nearest integer value if it's close enough. * * The zoom level set in {@link MapView} after a zoom level * target is given to {@link (MapView.lookAt:WITH_PARAMS)} or * {@link @here/harp-map-controls#MapControls} never matches * exactly the target due to the precision loss caused by the * conversion from zoom level to camera distance (done in * {@link (MapView.lookAt:WITH_PARAMS)} and {@link @here/harp-map-controls#MapControls}) * and from distance back to zoom level (done at every frame on camera update). * As a result, given a fixed integer zoom level input, the final zoom level computed at every frame * may fall sometimes below the integer value and others above. This causes flickering since each * frame will use different tile levels and different style evaluations for object visibility. * See HARP-9673 and HARP-8523. * @param zoomLevel - Input zoom level * @return The ceiling zoom level if input zoom level is close enough, otherwise the unmodified * input zoom level. */ function snapToCeilingZoomLevel(zoomLevel: number) { const eps = 1e-6; const ceiling = Math.ceil(zoomLevel); return ceiling - zoomLevel < eps ? ceiling : zoomLevel; } /** * MapView utilities: View transformations, camera setup, view bounds computation... */ export namespace MapViewUtils { export const MAX_TILT_DEG = 89; export const MAX_TILT_RAD = MAX_TILT_DEG * THREE.MathUtils.DEG2RAD; /** * The anti clockwise rotation of an object along the axes of its tangent space, with itself * as origin. */ export interface Attitude { /** * Rotation of the object along its vertical axis. */ yaw: number; /** * Rotation of the object along its horizontal axis. */ pitch: number; /** * Rotation of the object along its forward axis. */ roll: number; } /** * @deprecated */ export interface MemoryUsage extends Object3DUtils.MemoryUsage {} /** * Zooms and moves the map in such a way that the given target position remains at the same * position after the zoom. * * @param mapView - Instance of MapView. * @param targetNDCx - Target x position in NDC space. * @param targetNDCy - Target y position in NDC space. * @param zoomLevel - The desired zoom level. * @param maxTiltAngle - The maximum tilt angle to comply by, in globe projection, in radian. * @returns `false` if requested zoom cannot be achieved due to the map view's maximum bounds * {@link MapView.geoMaxBounds},`true` otherwise. */ export function zoomOnTargetPosition( mapView: MapView, targetNDCx: number, targetNDCy: number, zoomLevel: number, maxTiltAngle: number = MAX_TILT_RAD ): boolean { const { elevationProvider, camera, projection } = mapView; // Use for now elevation at camera position. See getTargetAndDistance. const elevation = elevationProvider ? elevationProvider.getHeight( projection.unprojectPoint(camera.position), TERRAIN_ZOOM_LEVEL ) : undefined; // Get current target position in world space before we zoom. const zoomTarget = rayCastWorldCoordinates(mapView, targetNDCx, targetNDCy, elevation); // Compute current camera target, it may not be the one set in MapView, e.g. when this // function is called multiple times between frames. const cameraTarget = MapViewUtils.getTargetAndDistance( projection, camera, elevationProvider ).target; const newCameraDistance = calculateDistanceFromZoomLevel(mapView, zoomLevel); if (mapView.geoMaxBounds) { // If map view has maximum bounds set, constrain camera target and distance to ensure // they remain within bounds. const constrained = constrainTargetAndDistanceToViewBounds( cameraTarget, newCameraDistance, mapView ); if (constrained.distance !== newCameraDistance) { // Only indicate failure when zooming out. This avoids zoom in cancellations when // camera is already at the maximum distance allowed by the view bounds. return zoomLevel >= mapView.zoomLevel; } } // Set the camera distance according to the given zoom level. camera .getWorldDirection(camera.position) .multiplyScalar(-newCameraDistance) .add(cameraTarget); // In sphere, we may have to also orbit the camera around the target, in order to limit the // the tilt to `maxTiltAngle`, as we change this tilt by changing the camera's height above. if (projection.type === ProjectionType.Spherical) { // FIXME: We cannot use mapView.tilt here b/c it does not reflect the latest camera // changes. const tilt = extractCameraTilt(camera, projection); const deltaTilt = tilt - maxTiltAngle; if (deltaTilt > 0) { orbitAroundScreenPoint(mapView, { deltaTilt, maxTiltAngle }); } } // Get new target position after the zoom const newZoomTarget = rayCastWorldCoordinates(mapView, targetNDCx, targetNDCy, elevation); if (!zoomTarget || !newZoomTarget) { return true; } if (projection.type === ProjectionType.Planar) { // Calculate the difference and pan the map to maintain the map relative to the target // position. zoomTarget.sub(newZoomTarget); panCameraAboveFlatMap(mapView, zoomTarget.x, zoomTarget.y); } else if (projection.type === ProjectionType.Spherical) { panCameraAroundGlobe(mapView, zoomTarget, newZoomTarget); } return true; } /** * Parameters for {@link orbitAroundScreenPoint}. */ export interface OrbitParams { /** * Delta azimuth in radians (default 0). */ deltaAzimuth?: number; /** * Delta tilt in radians (default 0); */ deltaTilt?: number; /** * Maximum tilt between the camera and its target in radians. */ maxTiltAngle: number; /** * Orbiting center in NDC coordinates, defaults to camera's principal point. * @see {@link CameraUtils.getPrincipalPoint}. */ center?: Vector2Like; } /** * Orbits the camera around a given point on the screen. * * @param mapView - The {@link MapView} instance to manipulate. * @param offsetX - Orbit point in NDC space. * @param offsetY - Orbit point in NDC space. * @param deltaAzimuth - Delta azimuth in radians. * @param deltaTilt - Delta tilt in radians. * @param maxTiltAngle - The maximum tilt between the camera and its target in radian. * @deprecated Use overload with {@link OrbitParams} object parameter. */ export function orbitAroundScreenPoint( mapView: MapView, offsetX: number, offsetY: number, deltaAzimuth: number, deltaTilt: number, maxTiltAngle: number ): void; /** * Orbits the camera around a given point on the screen. * * @param mapView - The {@link MapView} instance to manipulate. * @param orbitParams - {@link OrbitParams}. */ export function orbitAroundScreenPoint(mapView: MapView, orbitParams: OrbitParams): void; export function orbitAroundScreenPoint( mapView: MapView, offsetXOrOrbitParams: number | OrbitParams, offsetY?: number, deltaAzimuth?: number, deltaTilt?: number, maxTiltAngle?: number ): void { const ppalPoint = CameraUtils.getPrincipalPoint(mapView.camera, cache.vector2[0]); const mapTargetWorld = MapViewUtils.rayCastWorldCoordinates( mapView, ppalPoint.x, ppalPoint.y ); if (mapTargetWorld === null) { return; } let orbitCenter: Vector2Like | undefined; if (typeof offsetXOrOrbitParams === "number") { orbitCenter = cache.vector2[1].set(offsetXOrOrbitParams, offsetY!); } else { const params = offsetXOrOrbitParams; orbitCenter = params.center ?? ppalPoint; deltaAzimuth = params.deltaAzimuth ?? 0; deltaTilt = params.deltaTilt ?? 0; maxTiltAngle = params.maxTiltAngle; } const orbitAroundPpalPoint = orbitCenter.x === ppalPoint.x && orbitCenter.y === ppalPoint.y; const rotationTargetWorld = orbitAroundPpalPoint ? mapTargetWorld : MapViewUtils.rayCastWorldCoordinates(mapView, orbitCenter.x, orbitCenter.y); if (rotationTargetWorld === null) { return; } applyAzimuthAroundTarget(mapView, rotationTargetWorld, -deltaAzimuth!); const tiltAxis = new THREE.Vector3(1, 0, 0).applyQuaternion(mapView.camera.quaternion); const clampedDeltaTilt = computeClampedDeltaTilt( mapView, orbitCenter.y - ppalPoint.y, deltaTilt!, maxTiltAngle!, mapTargetWorld, rotationTargetWorld, tiltAxis ); applyTiltAroundTarget(mapView, rotationTargetWorld, clampedDeltaTilt, tiltAxis); } /** * @hidden * @internal * * Applies the given Azimith to the camera around the supplied target. */ function applyAzimuthAroundTarget( mapView: MapView, rotationTargetWorld: THREE.Vector3, deltaAzimuth: number ) { const camera = mapView.camera; const projection = mapView.projection; const headingAxis = projection.surfaceNormal(rotationTargetWorld, cache.vector3[0]); const headingQuat = cache.quaternions[0].setFromAxisAngle(headingAxis, deltaAzimuth); camera.quaternion.premultiply(headingQuat); camera.position.sub(rotationTargetWorld); camera.position.applyQuaternion(headingQuat); camera.position.add(rotationTargetWorld); } /** * @hidden * @internal * * Clamps the supplied `deltaTilt` to the `maxTiltAngle` supplied. Note, when a non-zero offset * is applied, we apply another max angle of 89 degrees to the rotation center to prevent some * corner cases where the angle at the rotation center is 90 degrees and therefore intersects * the geometry with the near plane. */ function computeClampedDeltaTilt( mapView: MapView, offsetY: number, deltaTilt: number, maxTiltAngle: number, mapTargetWorld: THREE.Vector3, rotationTargetWorld: THREE.Vector3, tiltAxis: THREE.Vector3 ): number { const camera = mapView.camera; const projection = mapView.projection; const tilt = extractTiltAngleFromLocation(projection, camera, mapTargetWorld, tiltAxis); if (tilt + deltaTilt < 0) { // Clamp the final tilt to 0 return -tilt; } else if (deltaTilt <= 0) { // Reducing the tilt isn't clamped (apart from above). return deltaTilt; } else if (mapTargetWorld.equals(rotationTargetWorld) || offsetY < 0) { // When the rotation target is the center, or the offsetY is < 0, i.e. the angle at the // `mapTargetWorld` is always bigger, then we have a simple formula return MathUtils.clamp(deltaTilt + tilt, 0, maxTiltAngle) - tilt; } const rotationCenterTilt = extractTiltAngleFromLocation( projection, camera, rotationTargetWorld!, tiltAxis ); const maxRotationTiltAngle = THREE.MathUtils.degToRad(89); // The rotationCenterTilt may exceed 89 degrees when for example the user has tilted to 89 // at the mapTargetWorld, then choose a rotation center target above the mapTargetWorld, // i.e. offsetY > 0. In such case, we just return 0, i.e. we don't let the user increase // the tilt (but it can decrease, see check above for "deltaTilt <= 0"). if (rotationCenterTilt > maxRotationTiltAngle) { return 0; } // This is used to find the max tilt angle, because the difference in normals is needed // to correct the triangle used to find the max tilt angle at the rotation center. let angleBetweenNormals = 0; if (projection === sphereProjection) { const projectedRotationTargetNormal = projection .surfaceNormal(rotationTargetWorld, cache.vector3[0]) .projectOnPlane(tiltAxis) .normalize(); const mapTargetNormal = projection.surfaceNormal(mapTargetWorld, cache.vector3[1]); angleBetweenNormals = projectedRotationTargetNormal.angleTo(mapTargetNormal); } const ninetyRad = THREE.MathUtils.degToRad(90); // The following terminology will be used: // Ta = Tilt axis, tilting is achieved by rotating the camera around this direction. // R = rotation target, i.e. the point about which we are rotating: `rotationTargetWorld` // Rp = rotation target projected on to Ta // C = camera position // M = map target, i.e. the point which the camera is looking at at the NDC coordinates 0,0 // Note, the points Rp, C, and M create a plane that is perpendicular to the earths surface, // because the tilt axis is perpendicular to the up vector. The following variable `RpCM` is // the angle between the two rays C->Rp and C->M. This angle remains constant when tilting // with a fixed `offsetX` and `offsetY`. It is calculated by using the intersection of the // two rays with the earth. // Note the use of `angleBetweenNormals` to ensure this works for spherical projections. // Note, this calculation only works when the tilt at M is less than the tilt // at Rp, otherwise the above formula won't work. We however don't need to worry about this // case because this happens only when offsetY is less than zero, and this is handled above. const MRpC = ninetyRad + angleBetweenNormals - rotationCenterTilt; const CMRp = ninetyRad + tilt; const RpCM = ninetyRad * 2 - (MRpC + CMRp); // We want to find the greatest angle at the rotation target that gives us the max // angle at the map center target. const CMRpMaxTilt = ninetyRad * 2 - RpCM - ninetyRad - maxTiltAngle; // Converting the `MRpC` back to a tilt is as easy as subtracting it from 90 and the // `angleBetweenNormals`, i.e. this gives us the maximum allowed tilt at R that satisfies // the `maxTiltAngle` constraint. Note, for globe projection, this is just an approximation, // because once we move the camera by delta, the map target changes, and therefore the // normal also changes, this would need to be applied iteratively until the difference in // normals is reduced to some epsilon. I don't apply this because it is computationally // expensive and the user would never notice this in practice. const maxTilt = ninetyRad + angleBetweenNormals - CMRpMaxTilt; // Here we clamp to the min of `maxTilt` and 89 degrees. The check for 89 is to prevent it // intersecting with the world at 90. This is possible for example when the R position is // near the horizon. If the angle RCM is say 5 degrees, then an angle of say 89 degrees at // R, plus 5 degrees means the tilt at M would be 84 degrees, so the camera can reach 90 // from the point R whilst the tilt to M never reaches the `maxTiltAngle` const clampedDeltaTilt = MathUtils.clamp( deltaTilt + rotationCenterTilt, 0, Math.min(maxTilt, maxRotationTiltAngle) ) - rotationCenterTilt; return clampedDeltaTilt; } /** * @hidden * @internal * * Applies the given tilt to the camera around the supplied target. */ function applyTiltAroundTarget( mapView: MapView, rotationTargetWorld: THREE.Vector3, deltaTilt: number, tiltAxis: THREE.Vector3 ) { const camera = mapView.camera; // Consider to use the cache if necessary, but beware, because the `rayCastWorldCoordinates` // also uses this cache. const posBackup = camera.position.clone(); const quatBackup = camera.quaternion.clone(); const tiltQuat = cache.quaternions[0].setFromAxisAngle(tiltAxis, deltaTilt); camera.quaternion.premultiply(tiltQuat); camera.position.sub(rotationTargetWorld); camera.position.applyQuaternion(tiltQuat); camera.position.add(rotationTargetWorld); if (MapViewUtils.rayCastWorldCoordinates(mapView, 0, 0) === null) { logger.warn("Target got invalidated during rotation."); camera.position.copy(posBackup); camera.quaternion.copy(quatBackup); } } /** * Calculate target (focus) point geo-coordinates for given camera. * @see getTargetPositionFromCamera * * @param camera - The camera looking on target point. * @param projection - The geo-projection used. * @param elevation - Optional elevation above (or below) sea level measured in world units. * * @deprecated This function is for internal use only and will be removed in the future. Use * MapView.worldTarget instead. */ export function getGeoTargetFromCamera( camera: THREE.Camera, projection: Projection, elevation?: number ): GeoCoordinates | null { // This function does almost the same as: // rayCastGeoCoordinates(mapView, 0, 0) // but in more gentle and performance wise manner const targetWorldPos = getWorldTargetFromCamera(camera, projection, elevation); if (targetWorldPos !== null) { return projection.unprojectPoint(targetWorldPos); } return null; } /** * Calculate target (focus) point world coordinates for given camera position and orientation. * @param camera - The camera looking on target point. * @param projection - The geo-projection used. * @param elevation - Optional elevation above (or below) sea level in world units. * * @deprecated This function is for internal use only and will be removed in the future. */ export function getWorldTargetFromCamera( camera: THREE.Camera, projection: Projection, elevation?: number ): THREE.Vector3 | null { const cameraPos = cache.vector3[0].copy(camera.position); const cameraLookAt = camera.getWorldDirection(cache.vector3[1]); rayCaster.set(cameraPos, cameraLookAt); if (elevation !== undefined) { groundPlane.constant -= elevation; groundSphere.radius += elevation; } const targetWorldPos = new THREE.Vector3(); const result = projection.type === ProjectionType.Planar ? rayCaster.ray.intersectPlane(groundPlane, targetWorldPos) : rayCaster.ray.intersectSphere(groundSphere, targetWorldPos); if (elevation !== undefined) { groundPlane.constant = 0; groundSphere.radius = EarthConstants.EQUATORIAL_RADIUS; } return result; } /** * Constrains given camera target and distance to {@link MapView.maxBounds}. * * @remarks * The resulting * target and distance will keep the view within the maximum bounds for a camera with tilt and * yaw set to 0. * @param target - The camera target. * @param distance - The camera distance. * @param mapView - The map view whose maximum bounds will be used as constraints. * @returns constrained target and distance, or the unchanged input arguments if the view * does not have maximum bounds set. */ export function constrainTargetAndDistanceToViewBounds( target: THREE.Vector3, distance: number, mapView: MapView ): { target: THREE.Vector3; distance: number } { const unconstrained = { target, distance }; const worldMaxBounds = mapView.worldMaxBounds; const camera = mapView.camera; const projection = mapView.projection; if (!worldMaxBounds) { return unconstrained; } /** * Constraints are checked similarly for planar and sphere. The extents of a top down view * (even if camera isn't top down) using the given camera distance are compared with those * of the maximum bounds to compute a scale. There are two options: * a) scale > 1. The view covers a larger area than the maximum bounds. The distance is * is reduced to match the bounds extents and the target is set at the bounds center. * b) scale <= 1. The view may fit within the bounds without changing the distance, only the * target is moved to fit the whole view within the bounds. **/ const boundsSize = worldMaxBounds.getSize(cache.vector3[1]); const screenSize = mapView.renderer.getSize(cache.vector2[0]); const viewHeight = CameraUtils.convertScreenToWorldSize( mapView.focalLength, unconstrained.distance, screenSize.height ); const viewWidth = viewHeight * camera.aspect; const scale = Math.max(viewWidth / boundsSize.x, viewHeight / boundsSize.y); const viewHalfSize = new THREE.Vector3(viewWidth / 2, viewHeight / 2, 0); const constrained = { target: unconstrained.target.clone(), distance: unconstrained.distance }; if (projection.type === ProjectionType.Planar) { if (scale > 1) { constrained.distance /= scale; camera .getWorldDirection(camera.position) .multiplyScalar(-constrained.distance) .add(worldMaxBounds.getCenter(constrained.target)); } else { const targetBounds = cache.box3[0] .copy(worldMaxBounds as THREE.Box3) .expandByVector(viewHalfSize.multiplyScalar(-1)); targetBounds .clampPoint(unconstrained.target, constrained.target) .setZ(unconstrained.target.z); if (constrained.target.equals(unconstrained.target)) { return unconstrained; } camera.position.x += constrained.target.x - unconstrained.target.x; camera.position.y += constrained.target.y - unconstrained.target.y; } return constrained; } // Spherical projection if (scale > 1) { // Set target to center of max bounds but keeping same height as unconstrained target. worldMaxBounds.getCenter(constrained.target); constrained.target.setLength(unconstrained.target.length()); constrained.distance /= scale; } else { // Compute the bounds where the target must be to ensure a top down view remains within // the maximum bounds. const targetMaxBounds = cache.obox3[0]; targetMaxBounds.copy(worldMaxBounds as OrientedBox3); targetMaxBounds.position.setLength(unconstrained.target.length()); targetMaxBounds.extents.sub(viewHalfSize); // Project unconstrained target to local tangent plane at the max bounds center. const rotMatrix = targetMaxBounds.getRotationMatrix(cache.matrix4[0]); const localTarget = cache.vector3[1] .copy(constrained.target) .sub(targetMaxBounds.position) .applyMatrix4(cache.matrix4[1].copy(rotMatrix).transpose()) .setZ(0); // Clamp the projected target with the target bounds and check if it changes. const constrainedLocalTarget = cache.vector3[2] .copy(localTarget) .clamp( cache.vector3[3].copy(targetMaxBounds.extents).multiplyScalar(-1), targetMaxBounds.extents ); if (constrainedLocalTarget.equals(localTarget)) { return unconstrained; } // Project the local constrained target back into the sphere. constrained.target .copy(constrainedLocalTarget) .applyMatrix4(rotMatrix) .add(targetMaxBounds.position); const targetHeightSq = targetMaxBounds.position.lengthSq(); const constTargetDistSq = constrained.target.distanceToSquared( targetMaxBounds.position ); const constTargetDistToGround = Math.sqrt(targetHeightSq) - Math.sqrt(targetHeightSq - constTargetDistSq); constrained.target.addScaledVector(targetMaxBounds.zAxis, -constTargetDistToGround); // Set the constrained target to the same height as the unconstrained one. constrained.target.setLength(unconstrained.target.length()); } // Pan camera to constrained target and set constrained distance. MapViewUtils.panCameraAroundGlobe( mapView, cache.vector3[1].copy(constrained.target), cache.vector3[2].copy(unconstrained.target) ); camera .getWorldDirection(camera.position) .multiplyScalar(-constrained.distance) .add(constrained.target); return constrained; } /** * @internal * Computes the target for a given camera and the distance between them. * @param projection - The world space projection. * @param camera - The camera whose target will be computed. * @param elevationProvider - If provided, elevation at the camera position will be used. * @returns The target, the distance to it and a boolean flag set to false in case an elevation * provider was passed but the elevation was not available yet. */ export function getTargetAndDistance( projection: Projection, camera: THREE.Camera, elevationProvider?: ElevationProvider ): { target: THREE.Vector3; distance: number; final: boolean } { const cameraPitch = extractAttitude({ projection }, camera).pitch; //FIXME: For now we keep the old behaviour when terrain is enabled (i.e. use the camera // height above terrain to deduce the target distance). // This leads to zoomlevel changes while panning. We have to find a proper solution // for terrain (e.g. raycast with the ground surfcae that is elevated by the average // elevation in the scene) const elevation = elevationProvider ? elevationProvider.getHeight( projection.unprojectPoint(camera.position), TERRAIN_ZOOM_LEVEL ) : undefined; const final = !elevationProvider || elevation !== undefined; // Even for a tilt of 90° raycastTargetFromCamera is returning some point almost at // infinity. const target = cameraPitch < MAX_TILT_RAD ? getWorldTargetFromCamera(camera, projection, elevation) : null; if (target !== null) { const distance = camera.position.distanceTo(target); return { target, distance, final }; } else { // We either reached the [[PITCH_LIMIT]] or we did not hit the ground surface. // In this case we do the reverse, i.e. compute some fallback distance and // use it to compute the tagret point by using the camera direction. const groundDistance = projection.groundDistance(camera.position); const heightAboveTerrain = Math.max(groundDistance - (elevation ?? 0), 0); //For flat projection we fallback to the target distance at 89 degree pitch. //For spherical projection we fallback to the tangent line distance const distance = projection.type === ProjectionType.Planar ? heightAboveTerrain / Math.cos(Math.min(cameraPitch, MAX_TILT_RAD)) : Math.sqrt( Math.pow(heightAboveTerrain + EarthConstants.EQUATORIAL_RADIUS, 2) - Math.pow(EarthConstants.EQUATORIAL_RADIUS, 2) ); const cameraDir = camera.getWorldDirection(cache.vector3[0]); cameraDir.multiplyScalar(distance); const fallbackTarget = cache.vector3[1]; fallbackTarget.copy(camera.position).add(cameraDir); return { target: fallbackTarget, distance, final }; } } /** * Returns the {@link @here/harp-geoutils#GeoCoordinates} of the camera, * given its target coordinates on the map and its * zoom, yaw and pitch. * * @param targetCoordinates - Coordinates of the center of the view. * @param distance - Distance to the target in meters. * @param yawDeg - Camera yaw in degrees. * @param pitchDeg - Camera pitch in degrees. * @param projection - Active MapView, needed to get the camera fov and map projection. * @param result - Optional output vector. * @returns Camera position in world space. */ export function getCameraPositionFromTargetCoordinates( targetCoordinates: GeoCoordinates, distance: number, yawDeg: number, pitchDeg: number, projection: Projection, result: THREE.Vector3 = new THREE.Vector3() ): THREE.Vector3 { const pitchRad = THREE.MathUtils.degToRad(pitchDeg); const altitude = Math.cos(pitchRad) * distance; const yawRad = THREE.MathUtils.degToRad(yawDeg); projection.projectPoint(targetCoordinates, result); const groundDistance = distance * Math.sin(pitchRad); if (projection.type === ProjectionType.Planar) { result.x = result.x + Math.sin(yawRad) * groundDistance; result.y = result.y - Math.cos(yawRad) * groundDistance; result.z = result.z + altitude; } else if (projection.type === ProjectionType.Spherical) { // In globe yaw and pitch are understood to be in tangent space. The approach below is // to find the Z and Y tangent space axes, then rotate Y around Z by the given yaw, and // set its new length (groundDistance). Finally the up vector's length is set to the // camera height and added to the transformed Y above. // Get the Z axis in tangent space: it is the normalized position vector of the target. tangentSpace.z.copy(result).normalize(); // Get the Y axis (north axis in tangent space): tangentSpace.y.set(0, 0, 1).projectOnPlane(tangentSpace.z).normalize(); // Rotate this north axis by the given yaw, giving the camera direction relative to // the target. cache.quaternions[0].setFromAxisAngle(tangentSpace.z, yawRad - Math.PI); tangentSpace.y.applyQuaternion(cache.quaternions[0]); // Push the camera to the specified distance. tangentSpace.y.setLength(groundDistance); // Now get the actual camera position vector: from the target position, add the // previous computation to get the projection of the camera on the ground, then add // the height of the camera in the tangent space. const height = distance * Math.cos(pitchRad); result.add(tangentSpace.y).add(tangentSpace.z.setLength(height)); const a = EarthConstants.EQUATORIAL_RADIUS + altitude; const b = Math.sin(pitchRad) * distance; const cameraHeight = Math.sqrt(a * a + b * b); result.setLength(cameraHeight); } return result; } /** * @hidden * @internal * * Add offset to geo points for minimal view box in flat projection with tile wrapping. * * @remarks * In flat projection, with wrap around enabled, we should detect clusters of points around that * wrap antimeridian. * * Here, we fit points into minimal geo box taking world wrapping into account. */ export function wrapGeoPointsToScreen( points: GeoCoordLike[], startPosition?: GeoCoordinates ): GeoCoordinates[] { let startIndex = 0; if (startPosition === undefined) { startPosition = GeoCoordinates.fromObject(points[0]); startIndex = 1; } let north = startPosition.latitude; let south = startPosition.latitude; let lonCenter = MathUtils.normalizeLongitudeDeg(startPosition.longitude); let lonSpan = 0; let east = startPosition.longitude; let west = startPosition.longitude; const result: GeoCoordinates[] = []; result.push(new GeoCoordinates(north, lonCenter)); for (let i = startIndex; i < points.length; i++) { const p = GeoCoordinates.fromObject(points[i]); if (p.latitude > north) { north = p.latitude; } else if (p.latitude < south) { south = p.latitude; } let longitude = MathUtils.normalizeLongitudeDeg(p.longitude); const relToCenter = MathUtils.angleDistanceDeg(lonCenter, longitude); longitude = lonCenter - relToCenter; if (relToCenter < 0 && -relToCenter > lonSpan / 2) { east = Math.max(east, lonCenter - relToCenter); lonSpan = east - west; lonCenter = (east + west) / 2; } else if (relToCenter > 0 && relToCenter > lonSpan / 2) { west = Math.min(west, longitude); lonSpan = east - west; lonCenter = (east + west) / 2; } result.push(new GeoCoordinates(p.latitude, longitude)); } return result; } /** * @hidden * @internal * * Given `cameraPos`, force all points that lie on non-visible sphere half to be "near" max * possible viewable circle from given camera position. * * @remarks * Assumes that shpere projection with world center is in `(0, 0, 0)`. */ export function wrapWorldPointsToView(points: THREE.Vector3[], cameraPos: THREE.Vector3) { const cameraPosNormalized = cameraPos.clone().normalize(); for (const point of points) { if (point.angleTo(cameraPos) > Math.PI / 2) { // Point is on other side of sphere, we "clamp it to" max possible viewable circle // from given camera position const pointLen = point.length(); point.projectOnPlane(cameraPosNormalized).setLength(pointLen); } } } /** * @hidden * @internal * * Return `GeoPoints` bounding {@link @here/harp-geoutils#GeoBox} * applicable for {@link getFitBoundsDistance}. * * @returns {@link @here/harp-geoutils#GeoCoordinates} set that covers `box` */ export function geoBoxToGeoPoints(box: GeoBox): GeoCoordinates[] { const center = box.center; return [ new GeoCoordinates(box.north, box.west), new GeoCoordinates(box.north, box.east), new GeoCoordinates(center.latitude, box.west), new GeoCoordinates(center.latitude, box.east), new GeoCoordinates(box.south, box.west), new GeoCoordinates(box.south, box.east), new GeoCoordinates(box.north, center.longitude), new GeoCoordinates(box.south, center.longitude) ]; } /** * @hidden * @internal * * Get minimal distance required for `camera` looking at `worldTarget` to cover `points`. * * All dimensions belong to world space. * * @param points - points which must be in view. * @param worldTarget - readonly, world target of {@link MapView} * @param camera - readonly, camera with proper `position` and rotation set * @returns new distance to camera to be used with {@link (MapView.lookAt:WITH_PARAMS)} */ export function getFitBoundsDistance( points: THREE.Vector3[], worldTarget: THREE.Vector3, camera: THREE.PerspectiveCamera ): number { // Diagram of the camera space YZ plane with the initial situation. Camera is at C0 and may // need to be moved to make point P visible. // // camY // targetDist^ // |<-------->| Ps // constD pEyeZ| /| ^ // |<-->|<--->| / | | // | | | / | | |ndcY-O.y|*h/2 // | | | / | | // <---T----P'----C0----O v // camZ |_| /| | C0 - Initial camera position // | / |<--->| T - Camera target // PcamY| / f P - Bounds point (world space) // | / (focal length) O - Principal point. // |/ h - viewport height. // P // // Diagram of camera space YZ plane with the final camera position C1 that leaves P at the // edge of the viewport. The new camera distance is the sum of a constant term (constD) and // the new distance to P (newPEyeZ), which is the initial distance (pEyeZ) multiplied by a // factor that needs to be found. // // camY // constD newPEyeZ ^ Ps // |<-->|<--------------->| _-`| ^ // | | | _-` | | |sign(ndcY)-O.y|h/2 // | | | _-` | | // <---T----P'----C0----------C1---------O v // camZ |_| _-`| | C0 - Initial camera position // | _-` |<-------->| C1 - New camera position // PcamY| _-` f T - Camera target // | _-` (focal length) P - Bounds point (world space) // | _-` Ps - P projected on screen. // P-` O - Principal point. // h - viewport height. // // P is between target and initial camera position, but calculations are equivalent for // points beyond the target (pEyeZ negative) or behind the camera (constD negative). // Right triangles PP'C0 and PsOC0 are equivalent, as well as PP'C1 and Ps0C1, that means: // |ndcY-O.y|*h/(2*f) = PcamY / |pEyeZ| (1) (ndcY-O.y,pEyeZ may be negative, take abs vals). // |sign(ndcY)-O.y|h/(2*f) = PcamY / newPEyeZ (2) // Dividing (1) by (2) and solving for newPEyeZ we get: // newPEyeZ = | pEyeZ || ndcY - O.y | / |sign(ndcY)-O.y| // The target distance to project P at the top/bottom border of the viewport is then: // constD + newPEyeZ = targetDist - pEyeZ + |pEyeZ||ndcY-O.y| / |sign(ndcY)-O.y| // The target distance to project P at the left/right border of the viewport is similarly: // targetDist - pEyeZ + |pEyeZ||ndcX-O.x| / |sign(ndcX)-O.x| // Take the largest of both distances to ensure the point is inside the viewport: // newDistance = targetDist - pEyeZ + // max(| ndcX - O.x | /|sign(ndcX)-O.x|, |ndcY-O.y|/sign(ndcY) - O.y |) *| pEyeZ | const targetDist = cache.vector3[0].copy(worldTarget).sub(camera.position).length(); const ppalPoint = CameraUtils.getPrincipalPoint(camera); let newDistance = targetDist; const getDistanceFactor = (pointNDC: number, ppNDC: number) => { // Use as maximum NDC a value slightly smaller than 1 to ensure the point is visible // with the final camera distance. Otherwise any precision loss might leave it just // outside of the viewport. const maxNDC = 0.99; return Math.abs(pointNDC) > 1 ? Math.abs((pointNDC - ppNDC) / (maxNDC * Math.sign(pointNDC) - ppNDC)) : 1; }; for (const point of points) { const pEyeZ = -cache.vector3[0].copy(point).applyMatrix4(camera.matrixWorldInverse).z; const pointNDC = cache.vector3[0].applyMatrix4(camera.projectionMatrix); const maxFactor = Math.max( getDistanceFactor(pointNDC.x, ppalPoint.x), getDistanceFactor(pointNDC.y, ppalPoint.y) ); if (maxFactor > 1) { const constDist = targetDist - pEyeZ; const newPEyeZ = Math.abs(pEyeZ) * maxFactor + constDist; newDistance = Math.max(newDistance, newPEyeZ); } } return newDistance; } /** * @hidden * @internal * * Paremeters for [[getFitBoundsLookAtParams]] function. */ export interface FitPointParams { tilt: number; heading: number; projection: Projection; minDistance: number; camera: THREE.PerspectiveCamera; } /** * @hidden * @internal * * Get {@link LookAtParams} that fit all `worldPoints` * giving that {@link MapView} will target at * `geoTarget`. * * @param geoTarget - desired target (see {@link MapView.target}) as geo point * @param worldTarget - same as `geoTarget` but in world space * @param worldPoints - points we want to see * @param params - other params derived from {@link MapView}. */ export function getFitBoundsLookAtParams( geoTarget: GeoCoordinates, worldTarget: THREE.Vector3, worldPoints: THREE.Vector3[], params: FitPointParams ) { const { tilt, heading, projection } = params; const startDistance = params.minDistance; const tmpCamera = params.camera.clone() as THREE.PerspectiveCamera; getCameraRotationAtTarget(projection, geoTarget, -heading, tilt, tmpCamera.quaternion); getCameraPositionFromTargetCoordinates( geoTarget, startDistance, -heading, tilt, projection, tmpCamera.position ); tmpCamera.updateMatrixWorld(true); if (projection.type === ProjectionType.Spherical) { wrapWorldPointsToView(worldPoints, tmpCamera.position); } const distance = getFitBoundsDistance(worldPoints, worldTarget, tmpCamera); return { target: geoTarget, distance, heading, tilt }; } /** * @deprecated use getCameraPositionFromTargetCoordinates instead */ export function getCameraCoordinatesFromTargetCoordinates( targetCoordinates: GeoCoordinates, distance: number, yawDeg: number, pitchDeg: number, mapView: MapView ): GeoCoordinates { return mapView.projection.unprojectPoint( getCameraPositionFromTargetCoordinates( targetCoordinates, distance, yawDeg, pitchDeg, mapView.projection, cache.vector3[1] ) ); } /** * Casts a ray in NDC space from the current map view and returns the intersection point of that * ray wih the map in world space. * * @param mapView - Instance of MapView. * @param pointOnScreenXinNDC - X coordinate in NDC space. * @param pointOnScreenYinNDC - Y coordinate in NDC space. * @param elevation - Optional param used to offset the ground plane. Used when wanting to pan * based on a plane at some altitude. Necessary for example when panning with terrain. * * @returns Intersection coordinates, or `null` if raycast failed. */ export function rayCastWorldCoordinates( mapView: MapView | { camera: THREE.Camera; projection: Projection }, pointOnScreenXinNDC: number, pointOnScreenYinNDC: number, elevation?: number ): THREE.Vector3 | null { const pointInNDCPosition = cache.vector3[0].set( pointOnScreenXinNDC, pointOnScreenYinNDC, 0 ); mapView.camera.updateMatrixWorld(); const cameraPos = cache.vector3[1].copy(mapView.camera.position); cache.matrix4[0].extractRotation(mapView.camera.matrixWorld); // Prepare the unprojection matrix which projects from NDC space to camera space // and takes the current rotation of the camera into account. cache.matrix4[1].multiplyMatrices( cache.matrix4[0], cache.matrix4[1].copy(mapView.camera.projectionMatrix).invert() ); // Unproject the point via the unprojection matrix. const pointInCameraSpace = pointInNDCPosition.applyMatrix4(cache.matrix4[1]); // Use the point in camera space as the vector towards this point. rayCaster.set(cameraPos, pointInCameraSpace.normalize()); if (elevation !== undefined) { groundPlane.constant -= elevation; groundSphere.radius += elevation; } const worldPosition = new THREE.Vector3(); const result = mapView.projection.type === ProjectionType.Planar ? rayCaster.ray.intersectPlane(groundPlane, worldPosition) : rayCaster.ray.intersectSphere(groundSphere, worldPosition); if (elevation !== undefined) { groundPlane.constant = 0; groundSphere.radius = EarthConstants.EQUATORIAL_RADIUS; } return result; } /** * Pans the camera according to the projection. * * @param mapView - Instance of MapView. * @param xOffset - In world space. Value > 0 will pan the map to the right, value < 0 will pan * the map to the left in default camera orientation. * @param yOffset - In world space. Value > 0 will pan the map upwards, value < 0 will pan the * map downwards in default camera orientation. */ export function panCameraAboveFlatMap( mapView: MapView, offsetX: number, offsetY: number ): void { mapView.camera.position.x += offsetX; mapView.camera.position.y += offsetY; } /** * The function doing a pan in the spherical space * when {@link MapView}'s active [[ProjectionType]] * is spherical. In other words, the function that rotates the camera around the globe. * * @param mapView - MapView instance. * @param fromWorld - Start vector representing the scene position of a geolocation. * @param toWorld - End vector representing the scene position of a geolocation. */ export function panCameraAroundGlobe( mapView: MapView, fromWorld: THREE.Vector3, toWorld: THREE.Vector3 ) { cache.quaternions[0] .setFromUnitVectors(fromWorld.normalize(), toWorld.normalize()) .invert(); cache.matrix4[0].makeRotationFromQuaternion(cache.quaternions[0]); mapView.camera.applyMatrix4(cache.matrix4[0]); mapView.camera.updateMatrixWorld(); } /** * Rotates the camera by the given delta yaw and delta pitch. The pitch will be clamped to the * maximum possible tilt to the new target, and under the horizon in sphere projection. * * @param mapView - The {@link MapView} instance in use. * @param deltaYawDeg - Delta yaw in degrees. * @param deltaPitchDeg - Delta pitch in degrees. * @param maxTiltAngleRad - Max tilt angle in radians. */ export function rotate( mapView: { projection: Projection; camera: THREE.PerspectiveCamera }, deltaYawDeg: number, deltaPitchDeg: number = 0, maxTiltAngleRad = Math.PI / 4 ) { // 1. Apply yaw: rotate around the vertical axis. mapView.camera.rotateOnWorldAxis( mapView.projection.type === ProjectionType.Spherical ? cache.vector3[0].copy(mapView.camera.position).normalize() : cache.vector3[0].set(0, 0, 1), THREE.MathUtils.degToRad(-deltaYawDeg) ); mapView.camera.updateMatrixWorld(); // 2. Apply pitch: rotate around the camera's local X axis. if (deltaPitchDeg === 0) { return; } const pitch = MapViewUtils.extractAttitude(mapView, mapView.camera).pitch; // `maxTiltAngle` is equivalent to a `maxPitchAngle` in flat projections. let newPitch = THREE.MathUtils.clamp( pitch + THREE.MathUtils.degToRad(deltaPitchDeg), 0, maxTiltAngleRad ); // In sphere projection, the value of a maximum pitch is smaller than the value of the // maximum tilt, as the curvature of the surface adds up to it. if (mapView.projection.type === ProjectionType.Spherical) { // Deduce max pitch from max tilt. To this end the sine law of triangles is used below. const maxPitch = Math.asin( (EarthConstants.EQUATORIAL_RADIUS * Math.sin(Math.PI - maxTiltAngleRad)) / mapView.camera.position.length() ); newPitch = Math.min(newPitch, maxPitch); } mapView.camera.rotateX(newPitch - pitch); } /** * Computes the rotation of the camera according to yaw and pitch in degrees. The computations * hinge on the current `projection` and `target`, because yaw and pitch are defined in * tangent space of the target point. * * **Note:** `yaw == 0 && pitch == 0` will north up the map and you will look downwards onto the * map. * * @param projection - Current projection. * @param target - The camera target. * @param yawDeg - Yaw in degrees, counter-clockwise (as opposed to azimuth), starting north. * @param pitchDeg - Pitch in degrees. */ export function getCameraRotationAtTarget( projection: Projection, target: GeoCoordinates, yawDeg: number, pitchDeg: number, result: THREE.Quaternion = new THREE.Quaternion() ): THREE.Quaternion { const transform = cache.transforms[0]; projection.localTangentSpace(target, transform); cache.matrix4[0].makeBasis(transform.xAxis, transform.yAxis, transform.zAxis); result.setFromRotationMatrix(cache.matrix4[0]); cache.quaternions[0].setFromAxisAngle( cache.vector3[1].set(0, 0, 1), THREE.MathUtils.degToRad(yawDeg) ); cache.quaternions[1].setFromAxisAngle( cache.vector3[1].set(1, 0, 0), THREE.MathUtils.degToRad(pitchDeg) ); result.multiply(cache.quaternions[0]); result.multiply(cache.quaternions[1]); return result; } /** * Sets the rotation of the camera according to yaw and pitch in degrees. The computations hinge * on the current projection and `geoCenter`, because yaw and pitch are defined in tangent * space. In particular, `MapView#geoCenter` needs to be set before calling `setRotation`. * * **Note:** `yaw == 0 && pitch == 0` will north up the map and you will look downwards onto the * map. * * @param mapView - Instance of MapView. * @param yawDeg - Yaw in degrees, counter-clockwise (as opposed to azimuth), starting north. * @param pitchDeg - Pitch in degrees. */ export function setRotation(mapView: MapView, yawDeg: number, pitchDeg: number) { getCameraRotationAtTarget( mapView.projection, mapView.geoCenter, yawDeg, pitchDeg, mapView.camera.quaternion ); } /** * Extracts current camera tilt angle in radians. * * @param camera - The [[Camera]] in use. * @param projection - The {@link @here/harp-geoutils#Projection} used to * convert between geo and world coordinates. * * @deprecated Use MapView.tilt */ export function extractCameraTilt(camera: THREE.Camera, projection: Projection): number { // For planar projections the camera target point local tangent is the same // at every point on the ground (ignoring terrain fluctuations), so we may // simply use inverted ground normal for tilt calculation. This simplifies // the more generic calculus used for spherical projections. if (projection.type === ProjectionType.Planar) { const lookAt: THREE.Vector3 = camera.getWorldDirection(cache.vector3[0]).normalize(); const normal: THREE.Vector3 = projection .surfaceNormal(camera.position, cache.vector3[1]) .negate(); const cosTheta = lookAt.dot(normal); return Math.acos(THREE.MathUtils.clamp(cosTheta, -1, 1)); } else { // Sanity check if new projection type is introduced. assert(projection.type === ProjectionType.Spherical); const targetGeoCoords = MapViewUtils.getGeoTargetFromCamera(camera, projection); // If focus point is lost we then expose maximum allowable tilt value. if (targetGeoCoords !== null) { return MapViewUtils.extractTiltAngleFromLocation( projection, camera, targetGeoCoords ); } else { logger.warn( "MapView camera is pointing in the void, using maxTilt: ", MAX_TILT_RAD ); return MAX_TILT_RAD; } } } /** * Extracts yaw, pitch, and roll rotation in radians. * - Yaw : Rotation around the vertical axis, counter-clockwise (as opposed to azimuth), * starting north. * - Pitch :Rotation around the horizontal axis. * - Roll : Rotation around the view axis. * * @see https://en.wikipedia.org/wiki/Conversion_between_quaternions_and_Euler_angles * * @param options - Subset of necessary {@link MapView} properties. * @param object - The [[THREE.Object3D]] instance to extract the rotations from. */ export function extractAttitude( mapView: { projection: Projection }, object: THREE.Object3D ): Attitude { // 1. Build the matrix of the tangent space of the object. cache.vector3[1].setFromMatrixPosition(object.matrixWorld); // Ensure using world position. mapView.projection.localTangentSpace(cache.vector3[1], { xAxis: tangentSpace.x, yAxis: tangentSpace.y, zAxis: tangentSpace.z, position: cache.vector3[0] }); cache.matrix4[1].makeBasis(tangentSpace.x, tangentSpace.y, tangentSpace.z); // 2. Change the basis of matrixWorld to the tangent space to get the new base axes. cache.matrix4[0].copy(cache.matrix4[1]).invert().multiply(object.matrixWorld); space.x.setFromMatrixColumn(cache.matrix4[0], 0); space.y.setFromMatrixColumn(cache.matrix4[0], 1); space.z.setFromMatrixColumn(cache.matrix4[0], 2); // 3. Deduce orientation from the base axes. let yaw = 0; let pitch = 0; let roll = 0; // Decompose rotation matrix into Z0 X Z1 Euler angles. const d = space.z.dot(cache.vector3[1].set(0, 0, 1)); if (d < 1.0 - Number.EPSILON) { if (d > -1.0 + Number.EPSILON) { yaw = Math.atan2(space.z.x, -space.z.y); pitch = Math.acos(space.z.z); roll = Math.atan2(space.x.z, space.y.z); } else { // Looking bottom-up with space.z.z == -1.0 yaw = -Math.atan2(-space.y.x, space.x.x); pitch = 180; roll = 0; } } else { // Looking top-down with space.z.z == 1.0 yaw = Math.atan2(-space.y.x, space.x.x); pitch = 0.0; roll = 0.0; } return { yaw, pitch, roll }; } /** * Gets the spherical coordinates in radian of the object to the coordinates of `point`. * * Note: this method can be used to get the direction that an object points to, when `location` * is the target of that object, by adding PI to it. Otherwise it only returns the spherical * coordinates of `object` in the tangent space of `location`. * * @param mapView - The {@link MapView} instance to consider. * @param object - The object to get the coordinates from. * @param location - The reference point. */ export function extractSphericalCoordinatesFromLocation( mapView: { projection: Projection }, object: THREE.Object3D, location: GeoCoordinatesLike | Vector3Like ): { azimuth: number; tilt: number } { // if (projection instanceof MapView) { // logger.warn("Passing MapView to extractSphericalCoordinatesFromLocation is deprecated"); // projection = projection.projection; // } mapView.projection.localTangentSpace(location, { xAxis: tangentSpace.x, yAxis: tangentSpace.y, zAxis: tangentSpace.z, position: cache.vector3[0] }); let tilt = 0; let azimuth = 0; // Get point to object vector in `cache.vector3[1]` and deduce `tilt` from the angle with // tangent Z. cache.vector3[1].copy(object.position).sub(cache.vector3[0]).normalize(); if (cache.vector3[1].dot(tangentSpace.z) > 1 - Number.EPSILON) { // Top down view: the azimuth of the object would be opposite the yaw, and clockwise. azimuth = Math.PI - extractAttitude(mapView, object).yaw; // Wrap between -PI and PI. azimuth = Math.atan2(Math.sin(azimuth), Math.cos(azimuth)); tilt = 0; return { tilt, azimuth }; } tilt = cache.vector3[1].angleTo(tangentSpace.z); // Tilted view: the azimuth is the direction of the object from the origin. cache.vector3[1] .copy(object.position) .sub(cache.vector3[0]) .projectOnPlane(tangentSpace.z) .normalize(); azimuth = cache.vector3[1].angleTo(tangentSpace.y); if (cache.vector3[1].cross(tangentSpace.y).dot(tangentSpace.z) < 0) { azimuth = -azimuth; } return { tilt, azimuth }; } /** * Gets the tilt angle (in radians) of the object relative to the coordinates of `location`. * * Note: this method can be used to get the direction that an object points to, when `location` * is the target of that object, by adding PI to it. Otherwise it only returns the tilt angle * (in radians) of `object` in the tangent space of `location`. * * @param projection - The {@link @here/harp-geoutils#Projection} used when * converting from geo to world coordinates. * @param object - The object to get the coordinates from. * @param location - The reference point. * @param tiltAxis - Optional axis used to define the rotation about which the object's tilt * occurs, the direction vector to the location from the camera is projected on the plane with * the given angle. */ export function extractTiltAngleFromLocation( projection: Projection, object: THREE.Object3D, location: GeoCoordinates | Vector3Like, tiltAxis?: THREE.Vector3 ): number { projection.localTangentSpace(location, { xAxis: tangentSpace.x, yAxis: tangentSpace.y, zAxis: tangentSpace.z, position: cache.vector3[0] }); // Get point to object vector (dirVec) and compute the `tilt` as the angle with tangent Z. const dirVec = cache.vector3[2].copy(object.position).sub(cache.vector3[0]); if (tiltAxis) { dirVec.projectOnPlane(tiltAxis); tangentSpace.z.projectOnPlane(tiltAxis).normalize(); } const dirLen = dirVec.length(); if (dirLen < epsilon) { logger.error("Can not calculate tilt for the zero length vector!"); return 0; } dirVec.divideScalar(dirLen); const cosTheta = dirVec.dot(tangentSpace.z); if (cosTheta >= 1 - Number.EPSILON) { // Top down view. return 0; } return Math.acos(THREE.MathUtils.clamp(cosTheta, -1, 1)); } /** * Get perspective camera frustum planes distances. * @deprecated * @return all plane distances in helper object. */ export function getCameraFrustumPlanes( camera: THREE.PerspectiveCamera ): { left: number; right: number; top: number; bottom: number; near: number; far: number } { const near = camera.near; const far = camera.far; let top = (near * Math.tan(THREE.MathUtils.degToRad(0.5 * camera.fov))) / camera.zoom; let height = 2 * top; let width = camera.aspect * height; let left = -0.5 * width; const view = camera.view; if (view !== null && view.enabled) { const fullWidth = view.fullWidth; const fullHeight = view.fullHeight; left += (view.offsetX * width) / fullWidth; top -= (view.offsetY * height) / fullHeight; width *= view.width / fullWidth; height *= view.height / fullHeight; } // Correct by skew factor left += camera.filmOffset !== 0 ? (near * camera.filmOffset) / camera.getFilmWidth() : 0; return { left, right: left + width, top, bottom: top - height, near, far }; } /** * Casts a ray in NDC space from the current view of the camera and returns the intersection * point of that ray against the map in geo coordinates. The return value can be `null` when * the raycast is above the horizon. * * @param mapView - Instance of MapView. * @param pointOnScreenXNDC - Abscissa in NDC space. * @param pointOnScreenYNDC - Ordinate in NDC space. * @returns Intersection geo coordinates, or `null` if raycast is above the horizon. */ export function rayCastGeoCoordinates( mapView: MapView, pointOnScreenXinNDC: number, pointOnScreenYinNDC: number ): GeoCoordinates | null { const worldCoordinates = rayCastWorldCoordinates( mapView, pointOnScreenXinNDC, pointOnScreenYinNDC ); if (!worldCoordinates) { return null; } return mapView.projection.unprojectPoint(worldCoordinates); } /** * Calculates and returns the distance from the ground, which is needed to put the camera to * this height, to see the size of the area that would be covered by one tile for the given zoom * level. * * @param mapView - Instance of MapView. * @param options - Subset of necessary {@link MapView} properties. */ export function calculateDistanceToGroundFromZoomLevel( mapView: { projection: Projection; focalLength: number; camera: THREE.PerspectiveCamera }, zoomLevel: number ): number { const cameraPitch = extractAttitude(mapView, mapView.camera).pitch; const tileSize = EarthConstants.EQUATORIAL_CIRCUMFERENCE / Math.pow(2, zoomLevel); return ((mapView.focalLength * tileSize) / 256) * Math.cos(cameraPitch); } /** * Calculates and returns the distance to the target point. * * @param options - Necessary subset of MapView properties to compute the distance. * @param zoomLevel - The zoom level to get the equivalent height to. */ export function calculateDistanceFromZoomLevel( options: { focalLength: number }, zoomLevel: number ): number { const tileSize = EarthConstants.EQUATORIAL_CIRCUMFERENCE / Math.pow(2, zoomLevel); return (options.focalLength * tileSize) / 256; } /** * Calculates the zoom level, which corresponds to the current distance from * camera to lookAt point. * Therefore the zoom level is a `float` and not an `int`. The height of the camera can be in * between zoom levels. By setting the zoom level, you change the height position of the camera * in away that the field of view of the camera should be able to cover one tile for the given * zoom level. * * As an example for this, when you have a tile of zoom level 14 in front of the camera and you * set the zoom level of the camera to 14, then you are able to see the whole tile in front of * you. * * @param options - Subset of necessary {@link MapView} properties. * @param distance - The distance in meters, which are scene units in {@link MapView}. */ export function calculateZoomLevelFromDistance( options: { focalLength: number; minZoomLevel: number; maxZoomLevel: number }, distance: number ): number { const tileSize = (256 * distance) / options.focalLength; const zoomLevel = THREE.MathUtils.clamp( Math.log2(EarthConstants.EQUATORIAL_CIRCUMFERENCE / tileSize), options.minZoomLevel, options.maxZoomLevel ); return snapToCeilingZoomLevel(zoomLevel); } /** * @deprecated * Translates a linear clip-space distance value to the actual value stored in the depth buffer. * This is useful as the depth values are not stored in the depth buffer linearly, and this can * lead into confusing behavior when not taken into account. * * @param clipDistance - Distance from the camera in clip space (range: [0, 1]). * @param camera - Camera applying the perspective projection. */ export function calculateDepthFromClipDistance( clipDistance: number, camera: THREE.Camera ): number { const perspCam = camera as THREE.PerspectiveCamera; const cameraRange = perspCam.far - perspCam.near; const viewSpaceDistance = clipDistance * perspCam.far; return (1.0 - perspCam.near / viewSpaceDistance) * (perspCam.far / cameraRange); } /** * @deprecated * Translates a linear distance value [0..1], where 1 is the distance to the far plane, into * [0..cameraFar]. * * @param distance - Distance from the camera (range: [0, 1]). * @param camera - Camera applying the perspective projection. */ export function cameraToWorldDistance(distance: number, camera: THREE.Camera): number { const perspCam = camera as THREE.PerspectiveCamera; return distance * perspCam.far; } /** * @deprecated */ export function calculateVerticalFovByHorizontalFov(hFov: number, aspect: number): number { return 2 * Math.atan(Math.tan(hFov / 2) / aspect); } /** * @deprecated Use {@link CameraUtils.getHorizontalFov}. */ export function calculateHorizontalFovByVerticalFov(vFov: number, aspect: number): number { tmpCamera.fov = THREE.MathUtils.radToDeg(vFov); tmpCamera.aspect = aspect; return CameraUtils.getHorizontalFov(tmpCamera); } /** * @deprecated Use {@link CameraUtils.setVerticalFov}. */ export function calculateFocalLengthByVerticalFov(vFov: number, height: number): number { // setVerticalFov takes into account the principal point position to support // off-center projections. Keep previous behaviour by passing a camera with centered // principal point. CameraUtils.setPrincipalPoint(tmpCamera, new THREE.Vector2()); CameraUtils.setVerticalFov(tmpCamera, vFov, height); return CameraUtils.getFocalLength(tmpCamera)!; } /** * @deprecated Use {@link CameraUtils.setFocalLength}. */ export function calculateFovByFocalLength(focalLength: number, height: number): number { // setFocalLength takes into account the principal point position to support // off-center projections. Keep previous behaviour by passing a camera with centered // principal point. CameraUtils.setPrincipalPoint(tmpCamera, new THREE.Vector2()); CameraUtils.setFocalLength(tmpCamera, focalLength, height); return tmpCamera.fov; } /** * @deprecated Use {@link CameraUtils.convertWorldToScreenSize}. */ export const calculateScreenSizeByFocalLength = CameraUtils.convertWorldToScreenSize; /** * @deprecated Use {@link CameraUtils.convertScreenToWorldSize}. */ export const calculateWorldSizeByFocalLength = CameraUtils.convertScreenToWorldSize; /** * @deprecated */ export const estimateObject3dSize = Object3DUtils.estimateSize; /** * Check if tiles or other content is currently being loaded. * * This method can be removed once HARP-7932 is implemented. * * @returns `true` if MapView has visible tiles or other content that is being loaded. */ export function mapViewIsLoading(mapView: MapView) { let numTilesLoading = 0; for (const tileList of mapView.visibleTileSet.dataSourceTileList) { numTilesLoading += tileList.numTilesLoading; for (const tile of tileList.visibleTiles) { if (!tile.allGeometryLoaded) { numTilesLoading++; } } } let isLoading = numTilesLoading > 0; if (mapView.textElementsRenderer !== undefined) { isLoading = isLoading || mapView.textElementsRenderer.loading; } isLoading = isLoading || !mapView.poiTableManager.finishedLoading || !mapView.visibleTileSet.allVisibleTilesLoaded; return isLoading; } export function closeToFrustum( point: THREE.Vector3, camera: THREE.Camera, eps: number = 1e-13 ): boolean { const ndcPoint = new THREE.Vector3().copy(point).project(camera); if ( Math.abs(ndcPoint.x) - eps < 1 && Math.abs(ndcPoint.y) - eps < 1 && Math.abs(ndcPoint.z) - eps < 1 ) { return true; } return false; } /** * @deprecated Use {@link @here/harp-utils#DOMUtils.getBrowserLanguages} */ export const getBrowserLanguages = DOMUtils.getBrowserLanguages; } export namespace TileOffsetUtils { /** * @deprecated Use {@link @here/harp-geoutils#TileKeyUtils.getKeyForTileKeyAndOffset}. */ export const getKeyForTileKeyAndOffset = TileKeyUtils.getKeyForTileKeyAndOffset; /** * @deprecated Use {@link @here/harp-geoutils#TileKeyUtils.getKeyForTileKeyAndOffset}. */ export const extractOffsetAndMortonKeyFromKey = TileKeyUtils.extractOffsetAndMortonKeyFromKey; /** * @deprecated Use {@link @here/harp-geoutils#TileKeyUtils.getParentKeyFromKey}. */ export const getParentKeyFromKey = TileKeyUtils.getParentKeyFromKey; }
the_stack
/** * NOTE(jalextowle): This comment must be here so that typedoc knows that the above * comment is a module comment */ import Dexie from 'dexie'; import { BatchingDatastore } from './datastore'; export type Record = Order | MiniHeader | Metadata; export interface Options { dataSourceName: string; maxOrders: number; maxMiniHeaders: number; } export interface Query<T extends Record> { filters?: FilterOption<T>[]; sort?: SortOption<T>[]; limit?: number; offset?: number; } export interface SortOption<T extends Record> { field: Extract<keyof T, string>; direction: SortDirection; } export interface FilterOption<T extends Record> { field: Extract<keyof T, string>; kind: FilterKind; value: any; } export enum SortDirection { Asc = 'ASC', Desc = 'DESC', } export enum FilterKind { Equal = '=', NotEqual = '!=', Less = '<', Greater = '>', LessOrEqual = '<=', GreaterOrEqual = '>=', Contains = 'CONTAINS', } export interface Order { hash: string; chainId: number; makerAddress: string; makerAssetData: string; makerAssetAmount: string; makerFee: string; makerFeeAssetData: string; takerAddress: string; takerAssetData: string; takerFeeAssetData: string; takerAssetAmount: string; takerFee: string; senderAddress: string; feeRecipientAddress: string; expirationTimeSeconds: string; salt: string; signature: string; exchangeAddress: string; fillableTakerAssetAmount: string; lastUpdated: string; isRemoved: number; isPinned: number; isNotPinned: number; // Used in a compound index in queries related to max expiration time. isUnfillable: number; isExpired: number; parsedMakerAssetData: string; parsedMakerFeeAssetData: string; lastValidatedBlockNumber: string; lastValidatedBlockHash: string; keepCancelled: number; keepExpired: number; keepFullyFilled: number; keepUnfunded: number; } export interface StoredOrderStatus { isStored: boolean; isMarkedRemoved: boolean; isMarkedUnfillable: boolean; fillableTakerAssetAmount?: string; } export type OrderField = keyof Order; export type OrderQuery = Query<Order>; export type OrderSort = SortOption<Order>; export type OrderFilter = FilterOption<Order>; export interface AddOrdersResult { alreadyStored: string[]; added: Order[]; removed: Order[]; } export interface MiniHeader { hash: string; parent: string; number: string; timestamp: string; logs: Uint8Array; } export type MiniHeaderField = keyof MiniHeader; export type MiniHeaderQuery = Query<MiniHeader>; export type MiniHeaderSort = SortOption<MiniHeader>; export type MiniHeaderFilter = FilterOption<MiniHeader>; export interface AddMiniHeadersResult { added: MiniHeader[]; removed: MiniHeader[]; } export interface Metadata { ethereumChainID: number; ethRPCRequestsSentInCurrentUTCDay: number; startOfCurrentUTCDay: string; } function newNotFoundError(): Error { return new Error('could not find existing model or row in database'); } function newMetadataAlreadExistsError(): Error { return new Error('metadata already exists in the database (use UpdateMetadata instead?)'); } /** * Creates and returns a new database * * @param opts The options to use for the database */ export function createDatabase(opts: Options): Database { return new Database(opts); } export class Database { private readonly _db: Dexie; private readonly _maxOrders: number; private readonly _maxMiniHeaders: number; private readonly _orders: Dexie.Table<Order, string>; private readonly _miniHeaders: Dexie.Table<MiniHeader, string>; private readonly _metadata: Dexie.Table<Metadata, number>; private readonly _dhtstore: BatchingDatastore; private readonly _peerstore: BatchingDatastore; constructor(opts: Options) { this._db = new Dexie(opts.dataSourceName); this._maxOrders = opts.maxOrders; this._maxMiniHeaders = opts.maxMiniHeaders; this._db.version(1).stores({ orders: '&hash,chainId,makerAddress,makerAssetData,makerAssetAmount,makerFee,makerFeeAssetData,takerAddress,takerAssetData,takerFeeAssetData,takerAssetAmount,takerFee,senderAddress,feeRecipientAddress,expirationTimeSeconds,salt,signature,exchangeAddress,fillableTakerAssetAmount,lastUpdated,isRemoved,isPinned,isUnfillable,isExpired,parsedMakerAssetData,parsedMakerFeeAssetData,lastValidatedBlockNumber,lastValidatedBlockHash,keepCancelled,keepExpired,keepFullyFilled,keepUnfunded,[isNotPinned+expirationTimeSeconds]', miniHeaders: '&hash,parent,number,timestamp', metadata: '&ethereumChainID', dhtstore: '&key,data', peerstore: '&key,data', }); this._orders = this._db.table('orders'); this._miniHeaders = this._db.table('miniHeaders'); this._metadata = this._db.table('metadata'); this._dhtstore = new BatchingDatastore(this._db, 'dhtstore'); this._peerstore = new BatchingDatastore(this._db, 'peerstore'); } public dhtStore(): BatchingDatastore { return this._dhtstore; } public peerStore(): BatchingDatastore { return this._peerstore; } public close(): void { this._db.close(); } // AddOrders(orders []*types.OrderWithMetadata) (alreadyStored []common.Hash, added []*types.OrderWithMetadata, removed []*types.OrderWithMetadata, err error) public async addOrdersAsync(orders: Order[]): Promise<AddOrdersResult> { const alreadyStored: string[] = []; const addedMap = new Map<string, Order>(); const removed: Order[] = []; await this._db.transaction('rw!', this._orders, async () => { for (const order of orders) { try { await this._orders.add(order); } catch (e) { if (e.name === 'ConstraintError') { // An order with this hash already exists. Add the order hash to the // array of alreadyStored alreadyStored.push(order.hash); continue; } throw e; } addedMap.set(order.hash, order); } // Remove orders with an expiration time too far in the future. const ordersToRemove = await this._orders .orderBy('[isNotPinned+expirationTimeSeconds]') .offset(this._maxOrders) .toArray(); for (const order of ordersToRemove) { await this._orders.delete(order.hash); if (addedMap.has(order.hash)) { // If the order was previously added, remove it from // the added set and don't add it to the removed set. addedMap.delete(order.hash); } else { removed.push(order); } } }); return { alreadyStored, added: Array.from(addedMap.values()), removed, }; } // GetOrder(hash common.Hash) (*types.OrderWithMetadata, error) public async getOrderAsync(hash: string): Promise<Order> { return this._db.transaction('r!', this._orders, async () => { const order = await this._orders.get(hash); if (order === undefined) { throw newNotFoundError(); } return order; }); } // GetOrderStatuses(hashes []common.Hash) (statuses []StoredOrderStatus, err error) public async getOrderStatusesAsync(hashes: string[]): Promise<StoredOrderStatus[]> { let orders: Order[] = []; await this._db.transaction('r!', this._orders, async () => { orders = await this._orders.bulkGet(hashes); }); const statuses: StoredOrderStatus[] = []; for (const order of orders) { if (order === undefined) { statuses.push({ isStored: false, isMarkedRemoved: false, isMarkedUnfillable: false, }); } else { statuses.push({ isStored: true, isMarkedRemoved: order.isRemoved === 1, isMarkedUnfillable: order.isUnfillable === 1, fillableTakerAssetAmount: order.fillableTakerAssetAmount, }); } } return statuses; } // FindOrders(opts *OrderQuery) ([]*types.OrderWithMetadata, error) public async findOrdersAsync(query?: OrderQuery): Promise<Order[]> { return this._db.transaction('r!', this._orders, async () => { return findRecordsAsync(this._orders, query); }); } // CountOrders(opts *OrderQuery) (int, error) public async countOrdersAsync(query?: OrderQuery): Promise<number> { return this._db.transaction('r!', this._orders, async () => { if (!canUseNativeDexieIndexes(this._orders, query)) { // As a fallback, implement the query inefficiently (in-memory). // Note(albrow): If needed we can optimize specific common queries with compound indexes. const records = await runQueryInMemoryAsync(this._orders, query); return records.length; } const col = buildCollectionWithDexieIndexes(this._orders, query); return col.count(); }); } // DeleteOrder(hash common.Hash) error public async deleteOrderAsync(hash: string): Promise<void> { return this._db.transaction('rw!', this._orders, async () => { return this._orders.delete(hash); }); } // DeleteOrders(opts *OrderQuery) ([]*types.OrderWithMetadata, error) public async deleteOrdersAsync(query: OrderQuery | undefined): Promise<Order[]> { const deletedOrders: Order[] = []; await this._db.transaction('rw!', this._orders, async () => { const orders = await findRecordsAsync(this._orders, query); for (const order of orders) { await this._orders.delete(order.hash); deletedOrders.push(order); } }); return deletedOrders; } // UpdateOrder(hash common.Hash, updateFunc func(existingOrder *types.OrderWithMetadata) (updatedOrder *types.OrderWithMetadata, err error)) error public async updateOrderAsync(hash: string, updateFunc: (existingOrder: Order) => Order): Promise<void> { await this._db.transaction('rw!', this._orders, async () => { const existingOrder = await this._orders.get(hash); if (existingOrder === undefined) { throw newNotFoundError(); } const updatedOrder = updateFunc(existingOrder); await this._orders.put(updatedOrder, hash); }); } // AddMiniHeaders(miniHeaders []*types.MiniHeader) (added []*types.MiniHeader, removed []*types.MiniHeader, err error) public async addMiniHeadersAsync(miniHeaders: MiniHeader[]): Promise<AddMiniHeadersResult> { const addedMap = new Map<string, MiniHeader>(); const removed: MiniHeader[] = []; await this._db.transaction('rw!', this._miniHeaders, async () => { for (const miniHeader of miniHeaders) { try { await this._miniHeaders.add(miniHeader); } catch (e) { if (e.name === 'ConstraintError') { // A miniHeader with this hash already exists. This is fine based on the semantics of // addMiniHeaders. continue; } throw e; } addedMap.set(miniHeader.hash, miniHeader); } // Remove any outdated miniHeaders. const outdatedMiniHeaders = await this._miniHeaders .orderBy('number') .offset(this._maxMiniHeaders) .reverse() .toArray(); for (const outdated of outdatedMiniHeaders) { await this._miniHeaders.delete(outdated.hash); if (addedMap.has(outdated.hash)) { // If the order was previously added, remove it from // the added set and don't add it to the removed set. addedMap.delete(outdated.hash); } else { removed.push(outdated); } } }); return { added: Array.from(addedMap.values()), removed, }; } // ResetMiniHeaders(newMiniHeaders []*types.MiniHeader) (err error) public async resetMiniHeadersAsync(newMiniHeaders: MiniHeader[]): Promise<void> { await this._db.transaction('rw!', this._miniHeaders, async () => { // Remove all of the existing miniheaders await this._miniHeaders.clear(); for (const newMiniHeader of newMiniHeaders) { try { await this._miniHeaders.add(newMiniHeader); } catch (e) { if (e.name === 'ConstraintError') { // A miniHeader with this hash already exists. This is // fine based on the semantics of addMiniHeaders. continue; } throw e; } } }); } // GetMiniHeader(hash common.Hash) (*types.MiniHeader, error) public async getMiniHeaderAsync(hash: string): Promise<MiniHeader> { return this._db.transaction('r!', this._miniHeaders, async () => { const miniHeader = await this._miniHeaders.get(hash); if (miniHeader === undefined) { throw newNotFoundError(); } return miniHeader; }); } // FindMiniHeaders(opts *MiniHeaderQuery) ([]*types.MiniHeader, error) public async findMiniHeadersAsync(query: MiniHeaderQuery): Promise<MiniHeader[]> { return this._db.transaction('r!', this._miniHeaders, async () => { return findRecordsAsync(this._miniHeaders, query); }); } // DeleteMiniHeader(hash common.Hash) error public async deleteMiniHeaderAsync(hash: string): Promise<void> { return this._db.transaction('rw!', this._miniHeaders, async () => { return this._miniHeaders.delete(hash); }); } // DeleteMiniHeaders(opts *MiniHeaderQuery) ([]*types.MiniHeader, error) public async deleteMiniHeadersAsync(query: MiniHeaderQuery): Promise<MiniHeader[]> { const deletedMiniHeaders: MiniHeader[] = []; await this._db.transaction('rw!', this._miniHeaders, async () => { const miniHeaders = await findRecordsAsync(this._miniHeaders, query); for (const miniHeader of miniHeaders) { await this._miniHeaders.delete(miniHeader.hash); deletedMiniHeaders.push(miniHeader); } }); return deletedMiniHeaders; } // GetMetadata() (*types.Metadata, error) public async getMetadataAsync(): Promise<Metadata> { return this._db.transaction('r!', this._metadata, async () => { return this._getMetadataAsync(); }); } // SaveMetadata(metadata *types.Metadata) error public async saveMetadataAsync(metadata: Metadata): Promise<void> { await this._db.transaction('rw!', this._metadata, async () => { if ((await this._metadata.count()) > 0) { throw newMetadataAlreadExistsError(); } await this._metadata.add(metadata); }); } // UpdateMetadata(updateFunc func(oldmetadata *types.Metadata) (newMetadata *types.Metadata)) error public async updateMetadataAsync(updateFunc: (existingMetadata: Metadata) => Metadata): Promise<void> { await this._db.transaction('rw!', this._metadata, async () => { const existingMetadata = await this._getMetadataAsync(); const updatedMetadata = updateFunc(existingMetadata); await this._metadata.put(updatedMetadata); }); } private async _getMetadataAsync(): Promise<Metadata> { const count = await this._metadata.count(); if (count === 0) { throw newNotFoundError(); } else if (count > 1) { // This should never happen, but it's possible if a user manually messed around with // IndexedDB. In this case, just delete the metadata table and we should start // over. await this._metadata.clear(); throw new Error('more than one metadata entry stored in the database'); } const metadatas = await this._metadata.toArray(); return metadatas[0]; } } async function findRecordsAsync<T extends Record, Key>(table: Dexie.Table<T, Key>, query?: Query<T>): Promise<T[]> { if (!canUseNativeDexieIndexes(table, query)) { // As a fallback, implement the query inefficiently (in-memory). // Note(albrow): If needed we can optimize specific common queries with compound indexes. return runQueryInMemoryAsync(table, query); } const col = buildCollectionWithDexieIndexes(table, query); return col.toArray(); } function buildCollectionWithDexieIndexes<T extends Record, Key>( table: Dexie.Table<T, Key>, query?: Query<T>, ): Dexie.Collection<T, Key> { if (query === null || query === undefined) { return table.toCollection(); } // First we create the Collection based on the query fields. let col: Dexie.Collection<T, Key>; if (queryUsesFilters(query)) { // tslint:disable-next-line:no-non-null-assertion const filter = query.filters![0]; switch (filter.kind) { case FilterKind.Equal: col = table.where(filter.field).equals(filter.value); break; case FilterKind.NotEqual: col = table.where(filter.field).notEqual(filter.value); break; case FilterKind.Greater: col = table.where(filter.field).above(filter.value); break; case FilterKind.GreaterOrEqual: col = table.where(filter.field).aboveOrEqual(filter.value); break; case FilterKind.Less: col = table.where(filter.field).below(filter.value); break; case FilterKind.LessOrEqual: col = table.where(filter.field).belowOrEqual(filter.value); break; case FilterKind.Contains: // Note(albrow): This iterates through all orders and is very inefficient. // If needed, we should try to find a way to optimize this. col = table.filter(containsFilterFunc(filter)); break; default: throw new Error(`unexpected filter kind: ${filter.kind}`); } // tslint:disable-next-line:no-non-null-assertion if (queryUsesSortOptions(query) && query.sort![0].direction === SortDirection.Desc) { // Note(albrow): This is only allowed if the sort and filter are using // the same field. Dexie automatically returns records sorted by the filter // field. If the direction is Ascending, we don't need to do anything else. // If it the direction is Descending, we just need to call reverse(). col.reverse(); } } else if (queryUsesSortOptions(query)) { // tslint:disable-next-line:no-non-null-assertion const sortOpt = query.sort![0]; col = table.orderBy(sortOpt.field); if (sortOpt.direction === SortDirection.Desc) { col = col.reverse(); } } else { // Query doesn't use filter or sort options. col = table.toCollection(); } if (queryUsesOffset(query)) { // tslint:disable-next-line:no-non-null-assertion col.offset(query.offset!); } if (queryUsesLimit(query)) { // tslint:disable-next-line:no-non-null-assertion col.limit(query.limit!); } return col; } async function runQueryInMemoryAsync<T extends Record, Key>( table: Dexie.Table<T, Key>, query?: Query<T>, ): Promise<T[]> { let records = await table.toArray(); if (query === undefined || query === null) { return records; } if (queryUsesFilters(query)) { // tslint:disable-next-line:no-non-null-assertion records = filterRecords(query.filters!, records); } if (queryUsesSortOptions(query)) { // tslint:disable-next-line:no-non-null-assertion records = sortRecords(query.sort!, records); } if (queryUsesOffset(query) && queryUsesLimit(query)) { // tslint:disable-next-line:no-non-null-assertion records = records.slice(query.offset!, query.limit!); } else if (queryUsesLimit(query)) { // tslint:disable-next-line:no-non-null-assertion records = records.slice(0, query.limit!); } else if (queryUsesOffset(query)) { // tslint:disable-next-line:no-non-null-assertion records = records.slice(query.offset!); } return records; } function filterRecords<T extends Record>(filters: FilterOption<T>[], records: T[]): T[] { let result = records; // Note(albrow): As an optimization, we could use the native Dexie.js index for // the *first* filter when possible. for (const filter of filters) { switch (filter.kind) { case FilterKind.Equal: result = result.filter((record) => record[filter.field] === filter.value); break; case FilterKind.NotEqual: result = result.filter((record) => record[filter.field] !== filter.value); break; case FilterKind.Greater: result = result.filter((record) => record[filter.field] > filter.value); break; case FilterKind.GreaterOrEqual: result = result.filter((record) => record[filter.field] >= filter.value); break; case FilterKind.Less: result = result.filter((record) => record[filter.field] < filter.value); break; case FilterKind.LessOrEqual: result = result.filter((record) => record[filter.field] <= filter.value); break; case FilterKind.Contains: result = result.filter(containsFilterFunc(filter)); break; default: throw new Error(`unexpected filter kind: ${filter.kind}`); } } return result; } function sortRecords<T extends Record>(sortOpts: SortOption<T>[], records: T[]): T[] { // Note(albrow): As an optimization, we could use native Dexie.js ordering for // the *first* sort option when possible. const result = records; return result.sort((a: T, b: T) => { for (const s of sortOpts) { switch (s.direction) { case SortDirection.Asc: if (a[s.field] < b[s.field]) { return -1; } else if (a[s.field] > b[s.field]) { return 1; } break; case SortDirection.Desc: if (a[s.field] > b[s.field]) { return -1; } else if (a[s.field] < b[s.field]) { return 1; } break; default: throw new Error(`unexpected sort direction: ${s.direction}`); } } return 0; }); } function isString(x: any): x is string { return typeof x === 'string'; } function containsFilterFunc<T extends Record>(filter: FilterOption<T>): (record: T) => boolean { return (record: T): boolean => { const field = record[filter.field]; if (!isString(field)) { throw new Error( `cannot use CONTAINS filter on non-string field ${filter.field} of type ${typeof record[filter.field]}`, ); } return field.includes(filter.value); }; } function canUseNativeDexieIndexes<T extends Record, Key>(table: Dexie.Table<T, Key>, query?: Query<T>): boolean { if (query === null || query === undefined) { return true; } // tslint:disable-next-line:no-non-null-assertion if (queryUsesSortOptions(query) && query.sort!.length > 1) { // Dexie does not support multiple sort orders. return false; } // tslint:disable-next-line:no-non-null-assertion if (queryUsesFilters(query) && query.filters!.length > 1) { // Dexie does not support multiple filters. return false; } // tslint:disable-next-line:no-non-null-assertion if (queryUsesFilters(query) && queryUsesSortOptions(query) && query.filters![0].field !== query.sort![0].field) { // Dexie does not support sorting and filtering by two different fields. return false; } return true; } function queryUsesSortOptions<T extends Record>(query: Query<T>): boolean { return query.sort !== null && query.sort !== undefined && query.sort.length > 0; } function queryUsesFilters<T extends Record>(query: Query<T>): boolean { return query.filters !== null && query.filters !== undefined && query.filters.length > 0; } function queryUsesLimit<T extends Record>(query: Query<T>): boolean { return query.limit !== null && query.limit !== undefined && query.limit !== 0; } function queryUsesOffset<T extends Record>(query: Query<T>): boolean { return query.offset !== null && query.offset !== undefined && query.offset !== 0; }
the_stack
import Logger from '@storefront-api/lib/logger' const Magento2Client = require('magento2-rest-client').Magento2Client; const config = require('config') const redis = require('@storefront-api/lib/redis'); const redisClient = redis.getClient(config) const countryMapper = require('@storefront-api/lib/countrymapper') const Ajv = require('ajv'); // json validator const fs = require('fs'); const ajv = new Ajv(); // validator const merge = require('lodash/merge') const orderSchema = require('@storefront-api/default-vsf/models/order.schema') let orderSchemaExtension = {} if (fs.existsSync('../../models/order.schema.extension.json')) { orderSchemaExtension = require('../../models/order.schema.extension.json') } const validate = ajv.compile(merge(orderSchema, orderSchemaExtension)); function isNumeric (val) { return Number(parseFloat(val)).toString() === val; } /** * Internal function to compose Error object using messages about other errors. * * 'Error' constructor should contain one message object only. * (https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Error/Error) * * @param {string} message Main error message. * @param {string|array|object} errors Additional error message or error object or array of array objects. * @return {Error} */ function composeError (message: string, errors: string|any[]|Record<string, any>): Error { if (typeof errors === 'string') { message = message + ' ' + errors; } else if (Array.isArray(errors)) { // case with array of validation errors (ajv.ErrorObject - node_modules/ajv/lib/ajv.d.ts) errors.forEach((item) => { const part = (typeof item === 'string') ? item : (item.message || ''); message = (message + ' ' + part).trim(); }); } else if (errors && (errors.message || errors.errorMessage)) { // I don't know possible structure of an 'errors' in this case, so I take 'apiError()' from 'src/lib/util.js' // we should use debugger to inspect this case in more details and modify code. message = message + ' ' + (errors.message || errors.errorMessage); } return new Error(message.trim()); } /** * Send single order to Magento Instance * * The Magento2 API: https://magento.stackexchange.com/questions/136028/magento-2-create-order-using-rest-api * * @param {json} orderData order data in format as described in '../models/order.md' * @param {Object} config global CLI configuration * @param {Function} done callback - @example done(new Error()) - to acknowledge problems */ function processSingleOrder (orderData, config, job, done, logger = console) { const TOTAL_STEPS = 4; const THREAD_ID = 'ORD:' + (job ? job.id : 1) + ' - '; // job id let currentStep = 1; if (!validate(orderData)) { // schema validation of upcoming order logger.error(THREAD_ID + ' Order validation error!', validate.errors); // @ts-ignore done(composeError('Error while validating order object.', validate.errors)); if (job) job.progress(currentStep++, TOTAL_STEPS); return; } let isThisAuthOrder = parseInt(orderData.user_id) > 0 const userId = orderData.user_id let apiConfig = config.magento2.api if (orderData.store_code) { if (config.availableStores.indexOf(orderData.store_code) >= 0) { apiConfig = Object.assign({}, apiConfig, { url: apiConfig.url + '/' + orderData.store_code }) Logger.info('> Store code', orderData.store_code) } else { logger.error('Invalid store code', orderData.store_code) } } const api = Magento2Client(apiConfig); logger.info('> Order Id', orderData.order_id) logger.info('> Is order authorized?', isThisAuthOrder) logger.info('> User Id', userId) let cartId = orderData.cart_id const cartIdPrepare = isThisAuthOrder ? api.cart.create(null, userId) : (cartId ? new Promise((resolve, reject) => { resolve(cartId) }) : api.cart.create(null)) logger.info(THREAD_ID + '> Cart Id', cartId) const processCart = (result) => { cartId = result logger.info(THREAD_ID + '< Cart Id', cartId) // load current cart from the Magento to synchronize elements api.cart.pull(null, cartId, null, isThisAuthOrder).then((serverItems) => { const clientItems = orderData.products const syncPromises = [] logger.info(THREAD_ID + '> Sync between clientItems', clientItems.map((item) => { return { sku: item.sku, qty: item.qty, server_item_id: item.server_item_id, product_option: item.product_option } })) logger.info(THREAD_ID + '> ... and serverItems', serverItems) for (const clientItem of clientItems) { const serverItem = serverItems.find((itm) => { return itm.sku === clientItem.sku || itm.sku.indexOf(clientItem.sku + '-') >= 0 /* bundle products */ }) if (!serverItem) { logger.info(THREAD_ID + '< No server item for ' + clientItem.sku) syncPromises.push(api.cart.update(null, cartId, { // use magento API sku: clientItem.parentSku && config.cart.setConfigurableProductOptions ? clientItem.parentSku : clientItem.sku, qty: clientItem.qty, product_option: clientItem.product_option, quote_id: cartId }, isThisAuthOrder)) } else if (serverItem.qty !== clientItem.qty) { logger.info(THREAD_ID + '< Wrong qty for ' + clientItem.sku, clientItem.qty, serverItem.qty) syncPromises.push(api.cart.update(null, cartId, { // use magento API sku: clientItem.parentSku && config.cart.setConfigurableProductOptions ? clientItem.parentSku : clientItem.sku, qty: clientItem.qty, product_option: clientItem.product_option, item_id: serverItem.item_id, quote_id: cartId }, isThisAuthOrder)) } else { logger.info(THREAD_ID + '< Server and client items synced for ' + clientItem.sku) // here we need just update local item_id } } for (const serverItem of serverItems) { if (serverItem) { const clientItem = clientItems.find((itm) => { return itm.sku === serverItem.sku || serverItem.sku.indexOf(itm.sku + '-') >= 0 /* bundle products */ }) if (!clientItem) { logger.info(THREAD_ID + '< No client item for ' + serverItem.sku + ', removing from server cart') // use magento API syncPromises.push(api.cart.delete(null, cartId, { // delete server side item if not present if client's cart sku: serverItem.sku, item_id: serverItem.item_id }, isThisAuthOrder)) } } } Promise.all(syncPromises).then((results) => { if (job) job.progress(currentStep++, TOTAL_STEPS); logger.info(THREAD_ID + '< Server cart in sync') logger.debug(THREAD_ID + results) const billingAddr = orderData.addressInformation.billingAddress; const shippingAddr = orderData.addressInformation.shippingAddress; let mappedShippingRegion: Record<string, any> = {} let mappedBillingRegion: Record<string, any> = {} api.directory.countries().then((countryList) => { if (typeof shippingAddr !== 'undefined' && shippingAddr !== null) { if (shippingAddr.region_id > 0) { mappedShippingRegion = { regionId: shippingAddr.region_id, regionCode: shippingAddr.region_code } } else { mappedShippingRegion = countryMapper.mapCountryRegion(countryList, shippingAddr.country_id, shippingAddr.region_code ? shippingAddr.region_code : shippingAddr.region) } } if (billingAddr.region_id > 0) { mappedBillingRegion = { regionId: billingAddr.region_id, regionCode: billingAddr.region_code } } else { mappedBillingRegion = countryMapper.mapCountryRegion(countryList, billingAddr.country_id, billingAddr.region_code ? billingAddr.region_code : billingAddr.region) } const billingAddressInfo = { // sum up totals 'address': { 'countryId': billingAddr.country_id, 'street': billingAddr.street, 'telephone': billingAddr.telephone, 'postcode': billingAddr.postcode, 'city': billingAddr.city, 'firstname': billingAddr.firstname, 'lastname': billingAddr.lastname, 'email': billingAddr.email, 'regionCode': mappedBillingRegion.regionCode, 'regionId': mappedBillingRegion.regionId, 'company': billingAddr.company, 'vatId': billingAddr.vat_id, 'save_in_address_book': billingAddr.save_address } } const shippingAddressInfo = { // sum up totals 'addressInformation': { 'billingAddress': { 'countryId': billingAddr.country_id, 'street': billingAddr.street, 'telephone': billingAddr.telephone, 'postcode': billingAddr.postcode, 'city': billingAddr.city, 'firstname': billingAddr.firstname, 'lastname': billingAddr.lastname, 'email': billingAddr.email, 'regionId': mappedBillingRegion.regionId, 'regionCode': mappedBillingRegion.regionCode, 'region': billingAddr.region, 'company': billingAddr.company, 'vatId': billingAddr.vat_id, 'save_in_address_book': billingAddr.save_address }, 'shippingMethodCode': orderData.addressInformation.shipping_method_code, 'shippingCarrierCode': orderData.addressInformation.shipping_carrier_code, 'extensionAttributes': orderData.addressInformation.shippingExtraFields } } if (typeof shippingAddr !== 'undefined' && shippingAddr !== null) { shippingAddressInfo['addressInformation']['shippingAddress'] = { 'countryId': shippingAddr.country_id, 'street': shippingAddr.street, 'telephone': shippingAddr.telephone, 'postcode': shippingAddr.postcode, 'city': shippingAddr.city, 'firstname': shippingAddr.firstname, 'lastname': shippingAddr.lastname, 'email': shippingAddr.email, 'regionId': mappedShippingRegion.regionId, 'regionCode': mappedShippingRegion.regionCode, 'region': shippingAddr.region, 'company': shippingAddr.company, 'save_in_address_book': shippingAddr.save_address } } else { shippingAddressInfo['addressInformation']['shippingAddress'] = shippingAddressInfo['addressInformation']['billingAddress'] } logger.info(THREAD_ID + '< Billing info', billingAddressInfo) api.cart.billingAddress(null, cartId, billingAddressInfo, isThisAuthOrder).then((result) => { logger.info(THREAD_ID + '< Billing address assigned', result) logger.info(THREAD_ID + '< Shipping info', shippingAddressInfo) api.cart.shippingInformation(null, cartId, shippingAddressInfo, isThisAuthOrder).then((result) => { logger.info(THREAD_ID + '< Shipping address assigned', result) if (job) job.progress(currentStep++, TOTAL_STEPS); api.cart.order(null, cartId, { 'paymentMethod': { 'method': orderData.addressInformation.payment_method_code, 'additional_data': orderData.addressInformation.payment_method_additional } }, isThisAuthOrder).then(result => { logger.info(THREAD_ID, result) if (job) job.progress(currentStep++, TOTAL_STEPS); logger.info(THREAD_ID + '[OK] Order placed with ORDER ID', result); logger.debug(THREAD_ID + result) if (orderData.order_id) { redisClient.set('order$$id$$' + orderData.order_id, JSON.stringify({ platform_order_id: result, transmited: true, transmited_at: new Date(), platform: 'magento2', order: orderData })); redisClient.set('order$$totals$$' + orderData.order_id, JSON.stringify(result[1])); } let orderIncrementId = null; api.orders.incrementIdById(result).then(result => { orderIncrementId = result.increment_id }).catch(err => { logger.warn('could not fetch increment_id for Order', err, typeof err) }).finally(() => { if (job) job.progress(currentStep++, TOTAL_STEPS); return done(null, { magentoOrderId: result, orderNumber: orderIncrementId, backendOrderId: result, transferedAt: new Date() }); }) }).catch(err => { logger.error('Error placing an order', err, typeof err) if (job) job.attempts(6).backoff({delay: 30 * 1000, type: 'fixed'}).save() // @ts-ignore return done(composeError('Error placing an order.', err)); }) }).catch((errors) => { logger.error('Error while adding shipping address', errors) if (job) job.attempts(3).backoff({ delay: 60 * 1000, type: 'fixed' }).save() // @ts-ignore return done(composeError('Error while adding shipping address.', errors)); }) }).catch((errors) => { logger.error('Error while adding billing address', errors) if (job) job.attempts(3).backoff({ delay: 60 * 1000, type: 'fixed' }).save() // @ts-ignore return done(composeError('Error while adding billing address.', errors)); }) }).catch((errors) => { logger.error('Error while synchronizing country list', errors) if (job) job.attempts(3).backoff({ delay: 30 * 1000, type: 'fixed' }).save() // @ts-ignore return done(composeError('Error while syncing country list.', errors)); }) }).catch((errors) => { logger.error('Error while adding products', errors) if (job) job.attempts(3).backoff({ delay: 30 * 1000, type: 'fixed' }).save() // @ts-ignore return done(composeError('Error while adding products.', errors)); }) }) } cartIdPrepare.then(processCart).catch((error) => { // cannot create a quote for specific user, so bypass by placing anonymous order logger.error(THREAD_ID, error) logger.info('< Bypassing to anonymous order') isThisAuthOrder = false if (isNumeric(cartId)) { // we have numeric id - assigned to the user provided api.cart.create(null, null).then((result) => { processCart(result) // logger.info('< Assigning guest cart with the user') // api.cart.assign(cartId, userId).then((subres) =>{ // console.info(subres) // processCart(result) // }).catch((err) => { // logger.error(err) // }) }).catch(error => { logger.info(error) // @ts-ignore return done(composeError('Error while adding products.', error)); }) // TODO: assign the guest cart with user at last? } else { logger.info(THREAD_ID + '< Using cartId provided with the order', cartId) processCart(cartId) } }) } export { processSingleOrder }
the_stack
import { defaultErrorMap, ZodErrorMap } from "./defaultErrorMap.ts"; import { INVALID, util } from "./helpers/util.ts"; import { NOSET, PseudoPromise } from "./PseudoPromise.ts"; // import { inputSchema } from "../types/base/output-schema"; import { ZodType, RefinementCtx } from "./index.ts"; // type adsf = RefinementCtx // import { ZodNever } from "../types/never"; // import { ZodPromise } from "../types/promise"; import { ZodDef } from "./ZodDef.ts"; import { ZodError, ZodIssue, ZodIssueCode, MakeErrorData } from "./ZodError.ts"; import { ZodParsedType } from "./ZodParsedType.ts"; import { ZodTypes } from "./ZodTypes.ts"; export const getParsedType = (data: any): ZodParsedType => { if (typeof data === "string") return "string"; if (typeof data === "number") { if (Number.isNaN(data)) return "nan"; return "number"; } if (typeof data === "boolean") return "boolean"; if (typeof data === "bigint") return "bigint"; if (typeof data === "symbol") return "symbol"; if (data instanceof Date) return "date"; if (typeof data === "function") return "function"; if (data === undefined) return "undefined"; if (typeof data === "undefined") return "undefined"; if (typeof data === "object") { if (Array.isArray(data)) return "array"; if (data === null) return "null"; if ( data.then && typeof data.then === "function" && data.catch && typeof data.catch === "function" ) { return "promise"; } if (data instanceof Map) { return "map"; } return "object"; } return "unknown"; }; const makeError = ( params: Required<ParseParams>, data: any, errorData: MakeErrorData ): ZodIssue => { const errorArg = { ...errorData, path: [...params.path, ...(errorData.path || [])], }; const ctxArg = { data }; const defaultError = defaultErrorMap === params.errorMap ? { message: `Invalid value.` } : defaultErrorMap(errorArg, { ...ctxArg, defaultError: `Invalid value.`, }); return { ...errorData, path: [...params.path, ...(errorData.path || [])], message: errorData.message || params.errorMap(errorArg, { ...ctxArg, defaultError: defaultError.message, }).message, }; }; export type ParseParams = { seen?: { schema: ZodType<any>; objects: { input: any; error?: ZodError; output: any }[]; }[]; path?: (string | number)[]; errorMap?: ZodErrorMap; async?: boolean; runAsyncValidationsInSeries?: boolean; }; export const ZodParser = (schema: ZodType<any>) => ( data: any, baseParams: ParseParams = { seen: [], errorMap: defaultErrorMap, path: [] } ) => { const params: Required<ParseParams> = { seen: baseParams.seen || [], path: baseParams.path || [], errorMap: baseParams.errorMap || defaultErrorMap, async: baseParams.async ?? false, runAsyncValidationsInSeries: baseParams.runAsyncValidationsInSeries ?? false, }; const def: ZodDef = schema._def as any; let PROMISE: PseudoPromise<any> = new PseudoPromise(); (PROMISE as any)._default = true; const RESULT: { input: any; output: any; error?: ZodError } = { input: data, output: INVALID, }; params.seen = params.seen || []; const ERROR = new ZodError([]); const THROW = () => { RESULT.error = ERROR; throw ERROR; }; const HANDLE = (err: Error) => { if (err instanceof ZodError) { ERROR.addIssues(err.issues); return INVALID; } throw ERROR; }; const parsedType = getParsedType(data); switch (def.t) { case ZodTypes.string: if (parsedType !== ZodParsedType.string) { ERROR.addIssue( makeError(params, data, { code: ZodIssueCode.invalid_type, expected: ZodParsedType.string, received: parsedType, }) ); THROW(); } PROMISE = PseudoPromise.resolve(data); break; case ZodTypes.number: if (parsedType !== ZodParsedType.number) { ERROR.addIssue( makeError(params, data, { code: ZodIssueCode.invalid_type, expected: ZodParsedType.number, received: parsedType, }) ); THROW(); } if (Number.isNaN(data)) { ERROR.addIssue( makeError(params, data, { code: ZodIssueCode.invalid_type, expected: ZodParsedType.number, received: ZodParsedType.nan, }) ); THROW(); } PROMISE = PseudoPromise.resolve(data); break; case ZodTypes.bigint: if (parsedType !== ZodParsedType.bigint) { ERROR.addIssue( makeError(params, data, { code: ZodIssueCode.invalid_type, expected: ZodParsedType.bigint, received: parsedType, }) ); THROW(); } PROMISE = PseudoPromise.resolve(data); break; case ZodTypes.boolean: if (parsedType !== ZodParsedType.boolean) { ERROR.addIssue( makeError(params, data, { code: ZodIssueCode.invalid_type, expected: ZodParsedType.boolean, received: parsedType, }) ); THROW(); } PROMISE = PseudoPromise.resolve(data); break; case ZodTypes.undefined: if (parsedType !== ZodParsedType.undefined) { ERROR.addIssue( makeError(params, data, { code: ZodIssueCode.invalid_type, expected: ZodParsedType.undefined, received: parsedType, }) ); THROW(); } PROMISE = PseudoPromise.resolve(data); break; case ZodTypes.null: if (parsedType !== ZodParsedType.null) { ERROR.addIssue( makeError(params, data, { code: ZodIssueCode.invalid_type, expected: ZodParsedType.null, received: parsedType, }) ); THROW(); } PROMISE = PseudoPromise.resolve(data); break; case ZodTypes.any: PROMISE = PseudoPromise.resolve(data); break; case ZodTypes.unknown: PROMISE = PseudoPromise.resolve(data); break; case ZodTypes.never: ERROR.addIssue( makeError(params, data, { code: ZodIssueCode.invalid_type, expected: ZodParsedType.never, received: parsedType, }) ); PROMISE = PseudoPromise.resolve(INVALID); break; case ZodTypes.void: if ( parsedType !== ZodParsedType.undefined && parsedType !== ZodParsedType.null ) { ERROR.addIssue( makeError(params, data, { code: ZodIssueCode.invalid_type, expected: ZodParsedType.void, received: parsedType, }) ); THROW(); } PROMISE = PseudoPromise.resolve(data); break; case ZodTypes.array: RESULT.output = []; if (parsedType !== ZodParsedType.array) { ERROR.addIssue( makeError(params, data, { code: ZodIssueCode.invalid_type, expected: ZodParsedType.array, received: parsedType, }) ); THROW(); } // const data: any[] = data; if (def.nonempty === true && data.length === 0) { ERROR.addIssue( makeError(params, data, { code: ZodIssueCode.nonempty_array_is_empty, }) ); THROW(); } PROMISE = PseudoPromise.all( (data as any[]).map((item, i) => { return new PseudoPromise() .then(() => def.type.parse(item, { ...params, path: [...params.path, i], }) ) .catch((err) => { if (!(err instanceof ZodError)) { throw err; } ERROR.addIssues(err.issues); return INVALID; }); }) ); break; case ZodTypes.map: if (parsedType !== ZodParsedType.map) { ERROR.addIssue( makeError(params, data, { code: ZodIssueCode.invalid_type, expected: ZodParsedType.map, received: parsedType, }) ); THROW(); } const dataMap: Map<unknown, unknown> = data; const returnedMap = new Map(); PROMISE = PseudoPromise.all( [...dataMap.entries()].map(([key, value], index) => { return PseudoPromise.all([ new PseudoPromise() .then(() => { return def.keyType.parse(key, { ...params, path: [...params.path, index, "key"], }); }) .catch(HANDLE), new PseudoPromise() .then(() => { const mapValue = def.valueType.parse(value, { ...params, path: [...params.path, index, "value"], }); return [key, mapValue]; }) .catch(HANDLE), ]) .then((item: any) => { if (item[0] !== INVALID && item[1] !== INVALID) { returnedMap.set(item[0], item[1]); } }) .catch(HANDLE); }) ) .then(() => { if (!ERROR.isEmpty) { throw ERROR; } }) .then(() => { return returnedMap; }) .then(() => { return returnedMap; }); break; case ZodTypes.object: RESULT.output = {}; if (parsedType !== ZodParsedType.object) { ERROR.addIssue( makeError(params, data, { code: ZodIssueCode.invalid_type, expected: ZodParsedType.object, received: parsedType, }) ); THROW(); } const objectPromises: { [k: string]: PseudoPromise<any> } = {}; const shape = def.shape(); const shapeKeys = Object.keys(shape); const dataKeys = Object.keys(data); const extraKeys = dataKeys.filter((k) => shapeKeys.indexOf(k) === -1); for (const key of shapeKeys) { const keyValidator = shapeKeys.includes(key) ? shape[key] : !(def.catchall._def.t === ZodTypes.never) ? def.catchall : undefined; if (!keyValidator) { continue; } // first check is required to avoid non-enumerable keys if (typeof data[key] === "undefined" && !dataKeys.includes(key)) { objectPromises[key] = new PseudoPromise() .then(() => { return keyValidator.parse(undefined, { ...params, path: [...params.path, key], }); }) .then((output) => { if (output === undefined) { // schema is optional // data is undefined // don't explicity add undefined to outut // continue; return NOSET; } else { return output; } }) .catch((err) => { if (err instanceof ZodError) { const zerr: ZodError = err; ERROR.addIssues(zerr.issues); objectPromises[key] = PseudoPromise.resolve(INVALID); } else { throw err; } }); continue; } objectPromises[key] = new PseudoPromise() .then(() => { return keyValidator.parse(data[key], { ...params, path: [...params.path, key], }); }) .catch((err) => { if (err instanceof ZodError) { const zerr: ZodError = err; ERROR.addIssues(zerr.issues); return INVALID; } else { throw err; } }); } if (def.catchall._def.t === ZodTypes.never) { if (def.unknownKeys === "passthrough") { for (const key of extraKeys) { objectPromises[key] = PseudoPromise.resolve(data[key]); } } else if (def.unknownKeys === "strict") { if (extraKeys.length > 0) { ERROR.addIssue( makeError(params, data, { code: ZodIssueCode.unrecognized_keys, keys: extraKeys, }) ); } } else if (def.unknownKeys === "strip") { // do nothing } else { util.assertNever(def.unknownKeys); } } else { // run catchall validation for (const key of extraKeys) { objectPromises[key] = new PseudoPromise() .then(() => { const parsedValue = def.catchall.parse(data[key], { ...params, path: [...params.path, key], }); return parsedValue; }) .catch((err) => { if (err instanceof ZodError) { ERROR.addIssues(err.issues); } else { throw err; } }); } } PROMISE = PseudoPromise.object(objectPromises) .then((resolvedObject) => { Object.assign(RESULT.output, resolvedObject); return RESULT.output; }) .then((finalObject) => { if (ERROR.issues.length > 0) { return INVALID; } return finalObject; }) .catch((err) => { if (err instanceof ZodError) { ERROR.addIssues(err.issues); return INVALID; } throw err; }); break; case ZodTypes.union: let isValid = false; const unionErrors: ZodError[] = []; PROMISE = PseudoPromise.all( def.options.map((opt, _j) => { // return new PseudoPromise().then return new PseudoPromise() .then(() => { return opt.parse(data, params); }) .then((optionData) => { isValid = true; return optionData; }) .catch((err) => { if (err instanceof ZodError) { unionErrors.push(err); return INVALID; } throw err; }); }) ) .then((unionResults) => { if (!isValid) { const nonTypeErrors = unionErrors.filter((err) => { return err.issues[0].code !== "invalid_type"; }); if (nonTypeErrors.length === 1) { ERROR.addIssues(nonTypeErrors[0].issues); } else { ERROR.addIssue( makeError(params, data, { code: ZodIssueCode.invalid_union, unionErrors, }) ); } THROW(); // return; } return unionResults; }) .then((unionResults: any[]) => { return util.find(unionResults, (val: any) => val !== INVALID); }); break; case ZodTypes.intersection: PROMISE = PseudoPromise.all([ new PseudoPromise() .then(() => { return def.left.parse(data, params); }) .catch(HANDLE), new PseudoPromise() .then(() => { return def.right.parse(data, params); }) .catch(HANDLE), ]).then(([parsedLeft, parsedRight]: any) => { if (parsedLeft === INVALID || parsedRight === INVALID) return INVALID; const parsedLeftType = getParsedType(parsedLeft); const parsedRightType = getParsedType(parsedRight); if (parsedLeft === parsedRight) { return parsedLeft; } else if ( parsedLeftType === ZodParsedType.object && parsedRightType === ZodParsedType.object ) { return { ...parsedLeft, ...parsedRight }; } else { ERROR.addIssue( makeError(params, data, { code: ZodIssueCode.invalid_intersection_types, }) ); } }); break; case ZodTypes.optional: if (parsedType === ZodParsedType.undefined) { PROMISE = PseudoPromise.resolve(undefined); break; } PROMISE = new PseudoPromise() .then(() => { return def.innerType.parse(data, params); }) .catch(HANDLE); break; case ZodTypes.nullable: if (parsedType === ZodParsedType.null) { PROMISE = PseudoPromise.resolve(null); break; } PROMISE = new PseudoPromise() .then(() => { return def.innerType.parse(data, params); }) .catch(HANDLE); break; case ZodTypes.tuple: if (parsedType !== ZodParsedType.array) { ERROR.addIssue( makeError(params, data, { code: ZodIssueCode.invalid_type, expected: ZodParsedType.array, received: parsedType, }) ); THROW(); } if (data.length > def.items.length) { ERROR.addIssue( makeError(params, data, { code: ZodIssueCode.too_big, maximum: def.items.length, inclusive: true, type: "array", }) ); } else if (data.length < def.items.length) { ERROR.addIssue( makeError(params, data, { code: ZodIssueCode.too_small, minimum: def.items.length, inclusive: true, type: "array", }) ); } const tupleData: any[] = data; PROMISE = PseudoPromise.all( tupleData.map((item, index) => { const itemParser = def.items[index]; return new PseudoPromise() .then(() => { const tupleDatum = itemParser.parse(item, { ...params, path: [...params.path, index], }); return tupleDatum; }) .catch((err) => { if (err instanceof ZodError) { ERROR.addIssues(err.issues); return; } throw err; }) .then((arg) => { return arg; }); }) ) .then((tupleData) => { if (!ERROR.isEmpty) THROW(); return tupleData; }) .catch((err) => { throw err; }); break; case ZodTypes.lazy: const lazySchema = def.getter(); PROMISE = PseudoPromise.resolve(lazySchema.parse(data, params)); break; case ZodTypes.literal: if (data !== def.value) { ERROR.addIssue( makeError(params, data, { code: ZodIssueCode.invalid_literal_value, expected: def.value, }) ); } PROMISE = PseudoPromise.resolve(data); break; case ZodTypes.enum: if (def.values.indexOf(data) === -1) { ERROR.addIssue( makeError(params, data, { code: ZodIssueCode.invalid_enum_value, options: def.values, }) ); } PROMISE = PseudoPromise.resolve(data); break; case ZodTypes.nativeEnum: if (util.getValidEnumValues(def.values).indexOf(data) === -1) { ERROR.addIssue( makeError(params, data, { code: ZodIssueCode.invalid_enum_value, options: util.objectValues(def.values), }) ); } PROMISE = PseudoPromise.resolve(data); break; case ZodTypes.function: if (parsedType !== ZodParsedType.function) { ERROR.addIssue( makeError(params, data, { code: ZodIssueCode.invalid_type, expected: ZodParsedType.function, received: parsedType, }) ); THROW(); } const isAsyncFunction = def.returns._def.t === ZodTypes.promise; const validatedFunction = (...args: any[]) => { const internalProm = new PseudoPromise() .then(() => { return def.args.parse(args as any, { ...params, async: isAsyncFunction, }); }) .catch((err) => { if (!(err instanceof ZodError)) throw err; const argsError = new ZodError([]); argsError.addIssue( makeError(params, data, { code: ZodIssueCode.invalid_arguments, argumentsError: err, }) ); throw argsError; }) .then((args) => { return data(...(args as any)); }) .then((result) => { return def.returns.parse(result, { ...params, async: isAsyncFunction, }); }) .catch((err) => { if (err instanceof ZodError) { const returnsError = new ZodError([]); returnsError.addIssue( makeError(params, data, { code: ZodIssueCode.invalid_return_type, returnTypeError: err, }) ); throw returnsError; } throw err; }); if (isAsyncFunction) { return internalProm.getValueAsync(); } else { return internalProm.getValueSync(); } }; PROMISE = PseudoPromise.resolve(validatedFunction); break; case ZodTypes.record: if (parsedType !== ZodParsedType.object) { ERROR.addIssue( makeError(params, data, { code: ZodIssueCode.invalid_type, expected: ZodParsedType.object, received: parsedType, }) ); THROW(); } const parsedRecordPromises: { [k: string]: PseudoPromise<any> } = {}; for (const key in data) { parsedRecordPromises[key] = new PseudoPromise() .then(() => { return def.valueType.parse(data[key], { ...params, path: [...params.path, key], }); }) .catch(HANDLE); } PROMISE = PseudoPromise.object(parsedRecordPromises); break; case ZodTypes.date: if (!(data instanceof Date)) { ERROR.addIssue( makeError(params, data, { code: ZodIssueCode.invalid_type, expected: ZodParsedType.date, received: parsedType, }) ); THROW(); } if (isNaN(data.getTime())) { ERROR.addIssue( makeError(params, data, { code: ZodIssueCode.invalid_date, }) ); THROW(); } PROMISE = PseudoPromise.resolve(data); break; case ZodTypes.promise: if (parsedType !== ZodParsedType.promise && params.async !== true) { ERROR.addIssue( makeError(params, data, { code: ZodIssueCode.invalid_type, expected: ZodParsedType.promise, received: parsedType, }) ); THROW(); } const promisified = parsedType === ZodParsedType.promise ? data : Promise.resolve(data); PROMISE = PseudoPromise.resolve( promisified.then((resolvedData: any) => { return def.type.parse(resolvedData, params); }) ); break; case ZodTypes.transformer: PROMISE = new PseudoPromise().then(() => { return def.schema.parse(data, params); }); break; default: PROMISE = PseudoPromise.resolve("adsf" as never); util.assertNever(def); } if ((PROMISE as any)._default === true) { throw new Error("Result is not materialized."); } if (!ERROR.isEmpty) { THROW(); } const effects = def.effects || []; const checkCtx: RefinementCtx = { addIssue: (arg: MakeErrorData) => { ERROR.addIssue(makeError(params, data, arg)); }, path: params.path, }; if (params.async === false) { const resolvedValue = PROMISE.getValueSync(); if (resolvedValue === INVALID && ERROR.isEmpty) { ERROR.addIssue( makeError(params, data, { code: ZodIssueCode.custom, message: "Invalid", }) ); } if (!ERROR.isEmpty) { THROW(); } let finalValue = resolvedValue; for (const effect of effects) { // console.log(`running effect: `); // console.log(effect); if (effect.type === "check") { const checkResult = effect.check(finalValue, checkCtx); // console.log(`checkresult: ${checkResult}`); if (checkResult instanceof Promise) throw new Error( "You can't use .parse() on a schema containing async refinements. Use .parseAsync instead." ); } else if (effect.type === "mod") { if (def.t !== ZodTypes.transformer) throw new Error("Only Modders can contain mods"); finalValue = effect.mod(finalValue); if (finalValue instanceof Promise) { throw new Error( `You can't use .parse() on a schema containing async transformations. Use .parseAsync instead.` ); } } else { throw new Error(`Invalid effect type.`); } } if (!ERROR.isEmpty) { THROW(); } return finalValue as any; } else { // if (params.async == true) { const checker = async () => { const resolvedValue = await PROMISE.getValueAsync(); if (resolvedValue === INVALID && ERROR.isEmpty) { // let someError: boolean = false; ERROR.addIssue( makeError(params, data, { code: ZodIssueCode.custom, message: "Invalid", }) ); } if (!ERROR.isEmpty) { THROW(); } let finalValue = resolvedValue; for (const effect of effects) { if (effect.type === "check") { await effect.check(finalValue, checkCtx); } else if (effect.type === "mod") { if (def.t !== ZodTypes.transformer) throw new Error("Only Modders can contain mods"); finalValue = await effect.mod(finalValue); } } // if (params.runAsyncValidationsInSeries) { // let someError = false; // await customChecks.reduce((previousPromise, check) => { // return previousPromise.then(async () => { // if (!someError) { // const len = ERROR.issues.length; // await check.check(resolvedValue, checkCtx); // if (len < ERROR.issues.length) someError = true; // } // }); // }, Promise.resolve()); // } else { // await Promise.all( // customChecks.map(async (check) => { // await check.check(resolvedValue, checkCtx); // }) // ); // } if (!ERROR.isEmpty) { THROW(); } return finalValue; }; return checker(); } };
the_stack
import { State as SettingsState } from 'src/modules/settings' import { RootStackParamList } from 'src/modules/navigation' import { StackNavigationProp } from '@react-navigation/stack' import { RouteProp } from '@react-navigation/native' // reduxjs/toolkit import { TxReceiveActions } from 'src/modules/tx/receive' import { TorActions } from 'src/modules/tor' import { RootState } from './redux' export type txListRequestAction = { type: 'TX_LIST_REQUEST' showLoader: boolean refreshFromNode: boolean } export type txListClearAction = { type: 'TX_LIST_CLEAR' } export type txListSuccessAction = { type: 'TX_LIST_SUCCESS' data: Array<Tx> balance: RustBalance isRefreshed: boolean } export type txListFailureAction = { type: 'TX_LIST_FAILURE' code?: number message: string } export type txGetRequestAction = { type: 'TX_GET_REQUEST' txSlateId: string } export type txGetSuccessAction = { type: 'TX_GET_SUCCESS' tx: RustTx isRefreshed: boolean } export type txGetFalureAction = { type: 'TX_GET_FAILURE' code?: number message: string } export type txCancelRequestAction = { type: 'TX_CANCEL_REQUEST' id: number slateId: string isResponse: boolean } export type txCancelSuccessAction = { type: 'TX_CANCEL_SUCCESS' } export type txCancelFalureAction = { type: 'TX_CANCEL_FAILURE' code: number message: string } export type txCreateRequestAction = { type: 'TX_CREATE_REQUEST' amount: number selectionStrategyIsUseAll: boolean } export type txCreateSuccessAction = { type: 'TX_CREATE_SUCCESS' } export type txCreateFalureAction = { type: 'TX_CREATE_FAILURE' code?: number message: string } export type txSendAddressRequestAction = { type: 'TX_SEND_ADDRESS_REQUEST' amount: number selectionStrategyIsUseAll: boolean address: string } export type txSendAddressSuccessAction = { type: 'TX_SEND_ADDRESS_SUCCESS' } export type txSendAddressFalureAction = { type: 'TX_SEND_ADDRESS_FAILURE' code?: number message: string } export type txPostShowAction = { type: 'TX_POST_SHOW' txSlateId: string } export type txPostCloseAction = { type: 'TX_POST_CLOSE' } export type txPostRequestAction = { type: 'TX_POST_REQUEST' txSlateId: string } export type txPostSuccessAction = { type: 'TX_POST_SUCCESS' } export type txPostFalureAction = { type: 'TX_POST_FAILURE' code?: number message: string } export type setSettingsAction = { type: 'SET_SETTINGS' newSettings: { [k in keyof SettingsState]?: SettingsState[k] } } export type switchToMainnetAction = { type: 'SWITCH_TO_MAINNET' } export type switchToFloonetAction = { type: 'SWITCH_TO_FLOONET' } export type enableBiometryRequestAction = { type: 'ENABLE_BIOMETRY_REQUEST' } export type enableBiometrySuccessAction = { type: 'ENABLE_BIOMETRY_SUCCESS' } export type enableBiometryFalureAction = { type: 'ENABLE_BIOMETRY_FAILURE' code?: number message: string } export type disableBiometryRequestAction = { type: 'DISABLE_BIOMETRY_REQUEST' } export type disableBiometrySuccessAction = { type: 'DISABLE_BIOMETRY_SUCCESS' } export type disableBiometryFalureAction = { type: 'DISABLE_BIOMETRY_FAILURE' code?: number message: string } export type resetBiometryRequestAction = { type: 'RESET_BIOMETRY_REQUEST' } export type resetBiometrySuccessAction = { type: 'RESET_BIOMETRY_SUCCESS' } export type resetBiometryFalureAction = { type: 'RESET_BIOMETRY_FAILURE' code?: number message: string } export type checkBiometryRequestAction = { type: 'CHECK_BIOMETRY_REQUEST' } export type checkBiometrySuccessAction = { type: 'CHECK_BIOMETRY_SUCCESS' biometryType: string | undefined | null } export type checkBiometryFalureAction = { type: 'CHECK_BIOMETRY_FAILURE' code?: number message: string } export type setApiSecretAction = { type: 'SET_API_SECRET' apiSecret: string } export type setWalletOpenction = { type: 'SET_WALLET_OPEN' } export type closeWalletAction = { type: 'CLOSE_WALLET' } export type slateSetRequestAction = { type: 'SLATE_SET_REQUEST' id: string slatepack: string isResponse: boolean } export type slateSetSuccessAction = { type: 'SLATE_SET_SUCCESS' } export type slateSetFalureAction = { type: 'SLATE_SET_FAILURE' code?: number message: string } export type slateRemoveRequestAction = { type: 'SLATE_REMOVE_REQUEST' id: string isResponse: boolean } export type slateRemoveSuccessAction = { type: 'SLATE_REMOVE_SUCCESS' } export type slateRemoveFalureAction = { type: 'SLATE_REMOVE_FAILURE' code?: number message: string } export type txReceiveRequestAction = { type: 'TX_RECEIVE_REQUEST' slatepack: string } export type txReceiveSuccessAction = { type: 'TX_RECEIVE_SUCCESS' } export type txReceiveFalureAction = { type: 'TX_RECEIVE_FAILURE' code?: number message: string } export type txFinalizeRequestAction = { type: 'TX_FINALIZE_REQUEST' slatepack: string } export type txFinalizeSuccessAction = { type: 'TX_FINALIZE_SUCCESS' } export type txFinalizeFalureAction = { type: 'TX_FINALIZE_FAILURE' code?: number message: string } export type walletClear = { type: 'WALLET_CLEAR' } export type walletInitRequestAction = { type: 'WALLET_INIT_REQUEST' password: string phrase: string isNew: boolean } export type walletInitSuccessAction = { type: 'WALLET_INIT_SUCCESS' } export type walletInitFalureAction = { type: 'WALLET_INIT_FAILURE' code?: number message: string } export type walletInitSetIsNewAction = { type: 'WALLET_INIT_SET_IS_NEW' value: boolean } export type walletScanStartAction = { type: 'WALLET_SCAN_START' } export type walletScanDoneAction = { type: 'WALLET_SCAN_DONE' } export type walletScanResetAction = { type: 'WALLET_SCAN_RESET' } export type walletScanFailureAction = { type: 'WALLET_SCAN_FAILURE' code?: number message: string } export type walletScanPmmrRangeRequestAction = { type: 'WALLET_SCAN_PMMR_RANGE_REQUEST' } export type walletScanPmmrRangeSuccessAction = { type: 'WALLET_SCAN_PMMR_RANGE_SUCCESS' range: PmmrRange } export type walletScanPmmrRangeFalureAction = { type: 'WALLET_SCAN_PMMR_RANGE_FAILURE' code?: number message: string } export type walletScanOutputsRequestAction = { type: 'WALLET_SCAN_OUTPUTS_REQUEST' lastRetrievedIndex: number } export type walletScanOutputsSuccessAction = { type: 'WALLET_SCAN_OUTPUTS_SUCCESS' lastRetrievedIndex: number } export type walletScanOutputsFalureAction = { type: 'WALLET_SCAN_OUTPUTS_FAILURE' code?: number message: string } export type walletPhraseRequestAction = { type: 'WALLET_PHRASE_REQUEST' password: string } export type walletPhraseSuccessAction = { type: 'WALLET_PHRASE_SUCCESS' phrase: string } export type walletPhraseFalureAction = { type: 'WALLET_PHRASE_FAILURE' code?: number message: string } export type walletDestroyRequestAction = { type: 'WALLET_DESTROY_REQUEST' } export type walletDestroySuccessAction = { type: 'WALLET_DESTROY_SUCCESS' } export type walletDestroyFalureAction = { type: 'WALLET_DESTROY_FAILURE' code?: number message: string } export type walletMigrateToMainnetRequestAction = { type: 'WALLET_MIGRATE_TO_MAINNET_REQUEST' } export type walletMigrateToMainnetSuccessAction = { type: 'WALLET_MIGRATE_TO_MAINNET_SUCCESS' } export type walletMigrateToMainnetFalureAction = { type: 'WALLET_MIGRATE_TO_MAINNET_FAILURE' code?: number message: string } export type toastShowAction = { type: 'TOAST_SHOW' text: string duration?: number } export type toastClearAction = { type: 'TOAST_CLEAR' } export type txFormSetFromLinkAction = { type: 'TX_FORM_SET_FROM_LINK' amount: number message: string url: string textAmount: string } export type txFormSetAmountAction = { type: 'TX_FORM_SET_AMOUNT' amount: number textAmount: string } export type txFormSetAddressAction = { type: 'TX_FORM_SET_ADDRESS' address: string } export type txFormSetOutputStrategyAction = { type: 'TX_FORM_SET_OUTPUT_STRATEGY' outputStrategy: OutputStrategy } export type txFormOutputStrategiesRequestAction = { type: 'TX_FORM_OUTPUT_STRATEGIES_REQUEST' amount: number } export type txFormOutputStrategiesSuccessAction = { type: 'TX_FORM_OUTPUT_STRATEGIES_SUCCESS' outputStrategies: Array<RustOutputStrategy> } export type txFormOutputStrategiesFalureAction = { type: 'TX_FORM_OUTPUT_STRATEGIES_FAILURE' code?: number message: string } export type txFormSetMessageAction = { type: 'TX_FORM_SET_MESSAGE' message: string } export type txFormResetAction = { type: 'TX_FORM_RESET' } export type currencyRatesRequestAction = { type: 'CURRENCY_RATES_REQUEST' } export type currencyRatesSuccessAction = { type: 'CURRENCY_RATES_SUCCESS' rates: { [x: string]: Record<string, number> } } export type currencyRatesToggleAction = { type: 'CURRENCY_RATES_TOGGLE' } export type currencyRatesFalureAction = { type: 'CURRENCY_RATES_FAILURE' code?: number message: string } export type walletExistsRequestAction = { type: 'WALLET_EXISTS_REQUEST' } export type walletExistsSuccessAction = { type: 'WALLET_EXISTS_SUCCESS' exists: boolean } export type walletExistsFalureAction = { type: 'WALLET_EXISTS_FAILURE' code?: number message: string } export type acceptLegal = { type: 'ACCEPT_LEGAL'; value: boolean } export type Action = | acceptLegal | txListClearAction | txListRequestAction | txListSuccessAction | txListFailureAction | txGetRequestAction | txGetSuccessAction | txGetFalureAction | txCancelRequestAction | txCancelSuccessAction | txCancelFalureAction | txCreateRequestAction | txCreateSuccessAction | txCreateFalureAction | txSendAddressRequestAction | txSendAddressSuccessAction | txSendAddressFalureAction | txPostShowAction | txPostCloseAction | txPostRequestAction | txPostSuccessAction | txPostFalureAction | setSettingsAction | switchToMainnetAction | switchToFloonetAction | enableBiometryRequestAction | enableBiometrySuccessAction | enableBiometryFalureAction | disableBiometryRequestAction | disableBiometrySuccessAction | disableBiometryFalureAction | resetBiometryRequestAction | resetBiometrySuccessAction | resetBiometryFalureAction | checkBiometryRequestAction | checkBiometrySuccessAction | checkBiometryFalureAction | setApiSecretAction | setWalletOpenction | closeWalletAction | slateSetRequestAction | slateSetSuccessAction | slateSetFalureAction | slateRemoveRequestAction | slateRemoveSuccessAction | slateRemoveFalureAction | txReceiveRequestAction | txReceiveSuccessAction | txReceiveFalureAction | txFinalizeRequestAction | txFinalizeSuccessAction | txFinalizeFalureAction | toastShowAction | toastClearAction | txFormSetFromLinkAction | txFormSetAmountAction | txFormSetAddressAction | txFormSetMessageAction | txFormResetAction | txFormSetOutputStrategyAction | txFormOutputStrategiesRequestAction | txFormOutputStrategiesSuccessAction | txFormOutputStrategiesFalureAction | walletClear | walletInitRequestAction | walletInitSuccessAction | walletInitFalureAction | walletInitSetIsNewAction | walletScanResetAction | walletScanFailureAction | walletScanDoneAction | walletScanStartAction | walletScanPmmrRangeRequestAction | walletScanPmmrRangeSuccessAction | walletScanPmmrRangeFalureAction | walletScanOutputsRequestAction | walletScanOutputsSuccessAction | walletScanOutputsFalureAction | walletPhraseRequestAction | walletPhraseSuccessAction | walletPhraseFalureAction | walletDestroyRequestAction | walletDestroySuccessAction | walletDestroyFalureAction | walletMigrateToMainnetRequestAction | walletMigrateToMainnetSuccessAction | walletMigrateToMainnetFalureAction | currencyRatesRequestAction | currencyRatesSuccessAction | currencyRatesToggleAction | currencyRatesFalureAction | walletExistsRequestAction | walletExistsSuccessAction | walletExistsFalureAction export type ToolkitActions = TorActions | TxReceiveActions export type Currency = { code: string fractionDigits: number } export type Dispatch = (action: Action) => void export type Store = { dispatch: Dispatch getState: () => RootState } export type OutputStrategy = { selectionStrategyIsUseAll: boolean total: string fee: string } export type PmmrRange = { lastRetrievedIndex: number highestIndex: number } export type Balance = { amountAwaitingConfirmation: string amountCurrentlySpendable: string amountImmature: string amountLocked: string lastConfirmedHeight: string minimumConfirmations: string total: string } type SlateParticipantData = { message: string } export type Slate = { id: string amount: number fee: number participant_data: Array<SlateParticipantData> sta: 'S1' | 'S2' | 'S3' } export type Tx = { id: number type: string amount: string confirmed: boolean fee: string creationTime: string slateId: string | null storedTx: string | null kernelExcess: string | null } // Rust structures export type RustBalance = { amount_awaiting_confirmation: string amount_currently_spendable: string amount_immature: string amount_locked: string last_confirmed_height: string minimum_confirmations: string total: string } export type RustTx = { amount_credited: string amount_debited: string confirmation_ts: string | null confirmed: boolean creation_ts: string fee: string | null id: number kernel_excess: string | null kernel_lookup_min_height: string | null num_inputs: number num_outputs: number parent_key_id: string payment_proof: string | null reverted_after: string | null stored_tx: string | null ttl_cutoff_height: string | null tx_slate_id: string | null tx_type: string // TxReceived ... } export type RustOutputStrategy = { selection_strategy_is_use_all: boolean total: string fee: string } export type RustPmmrRange = Array<number> // Redux export type Error = { code?: number message: string } export type UrlQuery = { amount: string destination: string message: string } export interface NavigationProps<Screen extends keyof RootStackParamList> { navigation: StackNavigationProp<RootStackParamList, Screen> route: RouteProp<RootStackParamList, Screen> } export type valueof<T> = T[keyof T]
the_stack
import { Value, Block, BlockKind, Fun, BranchPrediction } from './ssa' import { phielimValue } from './phielim' import { copyelim } from './copyelim' import { ops, opinfo } from "./ops" // import { printir } from './repr' // import { debuglog as dlog } from '../util' const dlog = function(..._ :any[]){} // silence dlog // deadcode removes dead code from f // export function deadcode(f :Fun) { // deadcode after regalloc is forbidden for now. Regalloc // doesn't quite generate legal SSA which will lead to some // required moves being eliminated. assert(f.regAlloc == null, `deadcode after regalloc for ${f}`) // Find reachable blocks. let reachable = reachableBlocks(f) // debug log dlog(`reachable blocks: ` + reachable .map((reachable, id) => reachable ? "b"+id : undefined) .filter(id => id !== undefined) .join(" ") ) // remove edges from dead to live code for (let b of f.blocks) { if (reachable[b.id]) { continue } dlog(`${b} is dead; remove edges to live blocks`) for (let i = 0; i < b.succs.length;) { let sb = b.succs[i] if (reachable[sb.id]) { dlog(` remove edge ${sb} -> ${b}`) removeEdge(b, i) } else { // sb not reachable (no need to disconnect) i++ } } } // remove dead edges from live code for (let b of f.blocks) { if (!reachable[b.id]) { continue } if (b.kind != BlockKind.First) { continue } removeEdge(b, 1) b.kind = BlockKind.Plain b.likely = BranchPrediction.Unknown } // Splice out any copies introduced during dead block removal copyelim(f) // Find live values. let live = liveValues(f, reachable) dlog(`live values:`, Object.keys(live).map(k => 'v' + k).join(', ')) // Remove dead & duplicate entries from namedValues map. let s = new Set<int>() // let i = 0 for (let [key, e] of f.namedValues) { let j = 0 s.clear() let values = e.values for (let v of values) { if (live[v.id] && !s.has(v.id)) { values[j] = v j++ s.add(v.id) } } if (j == 0) { f.namedValues.delete(key) } else { // f.names[i] = key // i++ // for (let k = values.length - 1; k >= j; k--) { // // values[k].uses-- // ;(values as any)[k] = null // } // e.values = values.slice(0, j) values.length = j } } // f.names.length = i dlog(`live names:`, Array.from(f.namedValues.keys()).join(', ')) // Unlink values for (let b of f.blocks) { if (!reachable[b.id]) { b.setControl(null) // for (let v of b.values) { // // v.uses = 0 // v.resetArgs() // } // continue } // dlog(`${b}`) for (let v of b.values) { if (!live[v.id]) { // dlog(` ${v}.resetArgs() ${v.args}`) // for (let arg of v.args) { // // dlog(` ${arg}.resetArgs()`) // arg.resetArgs() // } v.resetArgs() } //else dlog(` ${v} (keep live)`) } } // print('———————————————————————————-') // printir(f) // decrement use counters of unused args and reduce Phis for (let b of f.blocks) { if (!reachable[b.id]) { continue } for (let v of b.values) { if (v.op == ops.Phi) { let args = v.args if (live[v.id]) { let i = 0 for (let a of v.args) { if (live[a.id]) { dlog(`phireduce keep ${v}[${a}]`) v.args[i++] = a } else { dlog(`phireduce remove ${v}[${a}]`) a.uses-- } } v.args.length = i phielimValue(v) } else { for (let a of v.args) { dlog(`phireduce remove ${v}[${a}]`) a.uses-- } } } } } // print('———————————————————————————-') // printir(f) // Remove dead values from blocks' value list for (let b of f.blocks) { let i = 0 for (let v of b.values) { if (live[v.id]) { b.values[i] = v i++ } else { f.freeValue(v) } } b.values.length = i } // Remove unreachable blocks let i = 0 for (let b of f.blocks) { if (reachable[b.id]) { f.blocks[i] = b i++ } else { assert( b.values.length == 0, `live values in unreachable block ${b}: ${b.values.join(', ')}` ) f.freeBlock(b) } } f.blocks.length = i // print('———————————————————————————-') // printir(f) // process.exit(0) } // deadcode // ReachableBlocks returns the reachable blocks in f // function reachableBlocks(f :Fun) :bool[] { let reachable = new Array<bool>(f.numBlocks()) reachable[f.entry.id] = true let p :Block[] = [] // stack-like worklist p.push(f.entry) while (p.length > 0) { // Pop a reachable block let b = p.pop() as Block // Mark successors as reachable let s = b.succs if (b.kind == BlockKind.First) { // Drop 2nd block from being considered reachable. // BlockKind.First indicates that only the first path is // ever taken, never the second. s.splice(1, 1) } for (let c of s) { assert( c.id < reachable.length, `block ${c} >= f.numBlocks()=${reachable.length}` ) if (!reachable[c.id]) { reachable[c.id] = true p.push(c) } } } return reachable } // liveValues returns the live values in f and a list of values that are // eligible to be statements in reversed data flow order. // reachable is a map from block id to whether the block is reachable. // function liveValues(f :Fun, reachable :bool[]) :bool[] { let live = new Array<bool>(f.numValues()) // After regalloc, consider all values to be live. // See the comment at the top of regalloc.go and in deadcode for details. if (f.regAlloc) { live.fill(true) return live } // TODO: inlining // Record all the inline indexes we need // let liveInlIdx = new Map<int,bool>() // let pt = f.config.ctxt.PosTable // ... // Find all live values let q :Value[] = [] // stack-like worklist of unscanned values // Starting set: all control values of reachable blocks are live. // Calls are live (because callee can observe the memory state). for (let b of f.blocks) { if (!reachable[b.id]) { continue } let v = b.control if (v && !live[v.id]) { // dlog(`flag live block control ${v}`) live[v.id] = true q.push(v) } for (let v of b.values) { let info = opinfo[v.op] if ((info.call || info.hasSideEffects) && !live[v.id]) { // dlog(`flag live call/side-effect ${v}`) live[v.id] = true q.push(v) } if (info.type && info.type.isNil() && !live[v.id]) { // The only Void ops are nil checks and inline marks. We must keep these. // if (v.op == ops.InlMark && !liveInlIdx[v.auxInt]) { // // We don't need marks for bodies that // // have been completely optimized away. // // TODO: save marks only for bodies which // // have a faulting instruction or a call? // continue // } // dlog(`flag live niltype ${v}`) live[v.id] = true q.push(v) } } } dlog(`live: ${live.map((v, i) => [v,"v"+i]).filter(v => v[0] !== undefined).map(v => v[1])}`) dlog(`q: ${q}`) // Compute transitive closure of live values. while (q.length > 0) { // pop a reachable value let v = q.pop()! dlog(`${v} reachable`) for (let i = 0; i < v.args.length; i++) { let x = v.args[i] if (v.op == ops.Phi && !reachable[v.b.preds[i].id]) { dlog(` args[${i}] = ${x} -- skip phi`) continue } if (reachable[x.b.id] && !live[x.id]) { dlog(` args[${i}] = ${x} -- promote to live`) live[x.id] = true q.push(x) } } } return live } // function constControl(ctrl :Value) :Value { // let args :Value[]|undefined // for (let i = 0; i < ctrl.args.length; i++) { // let arg = ctrl.args[i] // if (arg.op === ops.Phi && arg.b === ifb) { // if (!args) { // args = ctrl.args.slice() // copy // } // assert(ifb.preds[0] === entryb, `entryb not at expected index`) // args[i] = arg.args[0] // } // } // // args will be set only if we found at least one Phi in control.args // if (args) { // // attempt constant evaluation of control value // let constctrl :Value|null = null // if (args.length == 2) { // constctrl = optcf_op2(ifb, control.op, args[0], args[1]) // } else if (args.length == 1) { // constctrl = optcf_op1(ifb, control.op, args[0]) // } // if (constctrl && constctrl.auxIsZero()) { // // while loop never taken -- shortcut entryb -> nextb // removeEdge(entryb, 0) // entryb.succs = [nextb] // removeEdge(ifb, 0) // nextb.preds = [entryb] // // s.f.removeBlock(ifb) // // s.f.removeBlock(thenb) // } // } // } // removeEdge removes the i'th outgoing edge from b // (and the corresponding incoming edge from b.succs[i]) // export function removeEdge(b :Block, i :int) { let c = b.succs[i] let j = c.preds.indexOf(b) // index of reverse edge. Invariant: // e := x.succs[idx] // e.b.preds[e.i] = Edge(x,idx) assert(b.succs[i] === c) // index of reverse edge. Invariant: // e := x.preds[idx] // e.b.succs[e.i] = Edge(x,idx) assert(b === c.preds[j]) // Adjust b.succs b.removeSucc(i) // Adjust c.preds c.removePred(j) // Remove phi args from c's phis. let n = c.preds.length for (let v of c.values) { if (v.op != ops.Phi) { continue } // remove the edge from Phi's args, i.e. (Phi x y) -> (Phi x) v.args[j].uses-- v.args[j] = v.args[n] v.args.length = n // x = (Phi y _) -> x = (Copy y) phielimValue(v) // [from go/src/cmd/compile/internal/ssa/deadcode.go] // // Note: this is trickier than it looks. Replacing // a Phi with a Copy can in general cause problems because // Phi and Copy don't have exactly the same semantics. // Phi arguments always come from a predecessor block, // whereas copies don't. This matters in loops like: // 1: x = (Phi y) // y = (Add x 1) // goto 1 // If we replace Phi->Copy, we get // 1: x = (Copy y) // y = (Add x 1) // goto 1 // (Phi y) refers to the *previous* value of y, whereas // (Copy y) refers to the *current* value of y. // The modified code has a cycle and the scheduler // will barf on it. // // Fortunately, this situation can only happen for dead // code loops. We know the code we're working with is // not dead, so we're ok. // Proof: If we have a potential bad cycle, we have a // situation like this: // x = (Phi z) // y = (op1 x ...) // z = (op2 y ...) // Where opX are not Phi ops. But such a situation // implies a cycle in the dominator graph. In the // example, x.Block dominates y.Block, y.Block dominates // z.Block, and z.Block dominates x.Block (treating // "dominates" as reflexive). Cycles in the dominator // graph can only happen in an unreachable cycle. } }
the_stack
'use strict'; /* Directive tells jshint that suite and test are globals defined by mocha */ /* global suite */ /* global test */ import * as _ from "underscore"; import * as assert from "assert"; const async = require("async"); const util = require('./util/util'); const cp = util.commonParameters; const testRequire = util.testRequire; const MemoryCache = testRequire('memory-cache'); const CacheDriver = testRequire('cache-driver'); suite('CacheDriver', function() { function unexpectedRefreshFunction() { assert(false, 'Unexpected attempt to refresh a token.'); } function assertEntriesEqual(expected: any, received: any, message: string) { if (!_.isEqual(expected, received)) { util.findDiffs(expected, received); console.log('Expected:'); console.log(expected); console.log('Received'); console.log(received); assert(false, message); } } /* * Compares two lists of cache entries. The lists will be sorted before comparison and the comparison will * take in to account the different ways that MRRT is indicated when a cache entry is submitted to the cache * and once it is in the cache. */ function compareInputAndCache(input: any, cache: any, numMRRTTokens: any, mrrtRefreshToken?: any) { var foundNumMRRTTokens = 0; var cacheEntries = cache._entries; var authority = cp.authorityTenant; var userId = cp.username; assert(input.length === cacheEntries.length, 'Input responses and cache entries lengths are not the same: ' + input.length + ',' + cacheEntries.length); input = _.sortBy(input, 'accessToken'); cacheEntries = _.sortBy(cacheEntries, 'accessToken'); for (var j = 0; j < cacheEntries.length; j++) { var expected = _.clone(input[j]); var received = _.clone(cacheEntries[j]); if (received.isMRRT) { foundNumMRRTTokens++; if (received._authority === authority && received.userId === userId) { // Everything should match except the refresh token. We will check that below. delete expected['refreshToken']; delete received['refreshToken']; } } assertEntriesEqual(expected, received, 'Found a modified entry number ' + j); } if (numMRRTTokens) { assert(numMRRTTokens === foundNumMRRTTokens, 'Found wrong number of MRRT tokens in the cache: ' + numMRRTTokens + ',' + foundNumMRRTTokens); // Ensure that when the last refresh token was added that all mrrt refresh tokens were updated to contain that same // refresh token. for (var i = 0; i < cacheEntries[i].length; i++) { if (cacheEntries[i].isMRRT) { assert(cacheEntries[i]['refreshToken'] === mrrtRefreshToken, 'One of the responses refresh token was not correctly updated: ' + i); } } } } test('add-entry', function(done) { var fakeTokenRequest = util.createEmptyADALObject(); var response = util.createResponse(); var expectedResponse = response.cachedResponse; var memCache = new MemoryCache(); var cacheDriver = new CacheDriver(fakeTokenRequest._callContext, response.authority, response.resource, response.clientId, memCache, unexpectedRefreshFunction); cacheDriver.add(response.decodedResponse, function(err: any) { var stack = err ? err.stack : null; assert(!err, 'Received unexpected error: ' + stack); var length = memCache._entries.length; assert(length === 1, 'Cache after test has does not have the correct number of entries ' + length + ': ' + memCache._entries); assertEntriesEqual(expectedResponse, memCache._entries[0], 'The saved cache entry has been modified'); done(); }); }); test('add-entry-no-cache', function(done) { var fakeTokenRequest = util.createEmptyADALObject(); var response = util.createResponse(); var cacheDriver = new CacheDriver(fakeTokenRequest._callContext, response.authority, response.resource, cp.clientId, null, unexpectedRefreshFunction); cacheDriver.add(response.decodedResponse, function(err: any) { var stack = err ? err.stack : null; assert(!err, 'Received unexpected error: ' + stack); done(); }); }); test('add-entry-single-mrrt', function(done) { var fakeTokenRequest = util.createEmptyADALObject(); var responseOptions = { mrrt : true }; var response = util.createResponse(responseOptions); var expectedResponse = response.cachedResponse; var resource = response.resource; var memCache = new MemoryCache(); var cacheDriver = new CacheDriver(fakeTokenRequest._callContext, response.authority, resource, cp.clientId, memCache, unexpectedRefreshFunction); cacheDriver.add(response.decodedResponse, function(err: any) { var stack = err ? err.stack : null; assert(!err, 'Received unexpected error: ' + stack); var length = memCache._entries.length; assert(length === 1, 'Cache after test has does not have the correct number of entrie ' + length + ': ' + memCache._entries); assertEntriesEqual(expectedResponse, memCache._entries[0], 'The saved cache entry has been modified'); done(); }); }); /** * Creates a new CacheDriver with a MemoryCache and fills it with test entries. * @param {int} numEntries The total number of entries that should be in the cache * @param {int} numMrrt The number of tokens in the cache that should be mrrt tokens. This number must * be smaller than numEntries. * @param {Function} callback returns an object with the CacheDriver etc... */ function fillCache(numEntries: any, numMrrt: any, addExpired: any, callback: any) { var fakeTokenRequest = util.createEmptyADALObject(); var memCache = new MemoryCache(); var authority = cp.authorityTenant; var responses: any = []; var divisor = Math.floor(numEntries / numMrrt); var finalMrrt: any; var expiredEntry: any; for (var i = 0; i < numEntries; i++) { var responseOptions: any = { authority : cp.authorityTenant}; if (numMrrt && ((i + 1) % divisor) === 0) { responseOptions.mrrt = true; } else if (addExpired) { responseOptions.expired = expiredEntry ? false : true; } var newResponse = util.createResponse(responseOptions, i); finalMrrt = responseOptions.mrrt ? newResponse.refreshToken : finalMrrt; expiredEntry = responseOptions.expired ? newResponse : expiredEntry; responses.push(newResponse); } var count = 0; var finalRefreshToken: any; async.whilst( function() { return count < numEntries; }, function(callback: any) { var resource = responses[count].resource; var clientId = responses[count].clientId; var cacheDriver = new CacheDriver(fakeTokenRequest._callContext, authority, resource, clientId, memCache, unexpectedRefreshFunction); var responseToAdd = _.clone(responses[count].decodedResponse); cacheDriver.add(responseToAdd, function(err: any) { count++; process.nextTick(function() { callback(err); return; }); }); }, function(err: any) { var cachedResponses = []; for (var j = 0; j < responses.length; j++) { cachedResponses.push(responses[j].cachedResponse); } var testValues = { cachedResponses : cachedResponses, memCache : memCache, finalMrrt : finalMrrt, fakeTokenRequest : fakeTokenRequest, authority : authority, expiredEntry : expiredEntry }; callback(err, testValues, finalRefreshToken); } ); } test('add-multiple-entries-ensure-authority-respected', function(done) { var numMRRTTokens = 6; fillCache(20, numMRRTTokens, false, function(err: any, testValues: any) { var responses = testValues.cachedResponses; var memCache = testValues.memCache; var fakeTokenRequest = testValues.fakeTokenRequest; if (!err) { compareInputAndCache(responses, memCache, numMRRTTokens); var otherAuthority = 'someOtherAuthority'; var responseOptions = { authority : otherAuthority, mrrt : true, resource : responses[0].resource }; var differentAuthorityResponse = util.createResponse(responseOptions, 21); delete responseOptions.authority; var extraMRRTResponse = util.createResponse(responseOptions, 21); responses.push(extraMRRTResponse.cachedResponse); responses.push(differentAuthorityResponse.cachedResponse); numMRRTTokens += 2; // order is important here. We want to ensure that when we add the second MRRT it has only updated // the refresh token of the entries with the same authority. // update: with mega refresh token(cross tenant RT), refresh token of the entry will be updated if there is a match with userId, clientId. var cacheDriver = new CacheDriver(fakeTokenRequest._callContext, otherAuthority, differentAuthorityResponse.resource, differentAuthorityResponse.clientId, memCache, unexpectedRefreshFunction); cacheDriver.add(differentAuthorityResponse.decodedResponse, function(err: any) { assert(!err, 'Unexpected err adding entry with different authority.'); var cacheDriver2 = new CacheDriver(fakeTokenRequest._callContext, cp.authorityTenant, extraMRRTResponse.resource, extraMRRTResponse.clientId, memCache, unexpectedRefreshFunction); cacheDriver2.add(extraMRRTResponse.decodedResponse, function(err2: any) { assert(!err2, 'Unexpected error adding second entry with previous authority.'); // ensure that we only find the mrrt with the different authority. cacheDriver.find( { resource : differentAuthorityResponse.resource}, function(err3: any, entry: any) { assert(!err3, 'Unexpected error returned from find.'); assertEntriesEqual(differentAuthorityResponse.cachedResponse, entry, 'Queried entry did not match expected indicating authority was not respected'); }); done(); }); }); } }); }); test('add-multiple-entries-find-non-mrrt', function(done) { var numMRRTTokens = 6; fillCache(20, numMRRTTokens, false, function(err: any, testValues: any) { var responses = testValues.cachedResponses; var memCache = testValues.memCache; var fakeTokenRequest = testValues.fakeTokenRequest; if (!err) { compareInputAndCache(responses, memCache, numMRRTTokens); var findResponse = _.find(responses, function(entry: any) { return !entry.isMRRT; }); var cacheDriver = new CacheDriver(fakeTokenRequest._callContext, cp.authorityTenant, findResponse.resource, findResponse.clientId, memCache, unexpectedRefreshFunction); cacheDriver.find({}, function(err: any, entry: any) { if (!err) { assert(entry, 'Find did not return any entry'); assertEntriesEqual(findResponse, entry, 'Queried entry did not match expected: ' + JSON.stringify(entry)); } done(err); return; }); } else { done(err); return; } }); }); test('add-multiple-entries-mrrt', function(done) { var numMRRTTokens = 6; fillCache(19, numMRRTTokens, false, function(err: any, testValues: any) { var responses = testValues.cachedResponses; var memCache = testValues.memCache; var finalMrrt = testValues.finalMrrt; if (!err) { compareInputAndCache(responses, memCache, numMRRTTokens, finalMrrt); } done(); return; }); }); // This test is actually testing two different things. // 1. When a new MRRT is added to the cache only MRRT // tokens with the same userId are updated. // 2. Check that url safe base64 decoding is happening // correctly. test('add-multiple-entries-mrrt-different-users--url-safe-id_token', function(done) { var numMRRTTokens = 6; fillCache(19, numMRRTTokens, false, function(err: any, testValues: any) { err; var responses = testValues.cachedResponses; var memCache = testValues.memCache; var finalMrrt = testValues.finalMrrt; var fakeTokenRequest = testValues.fakeTokenRequest; var responseOptions = { mrrt : true, refreshedRefresh : true, urlSafeUserId : true }; var refreshedResponse = util.createResponse(responseOptions); // verify that the returned response contains an id_token that will actually // test url safe base64 decoding. assert(-1 !== refreshedResponse.wireResponse['id_token'].indexOf('_'), 'No special characters in the test id_token. ' + 'This test is not testing one of the things it was intended to test.'); responses.push(refreshedResponse.cachedResponse); var cacheDriver = new CacheDriver(fakeTokenRequest._callContext, testValues.authority, refreshedResponse.resource, refreshedResponse.clientId, memCache, unexpectedRefreshFunction); cacheDriver.add(refreshedResponse.decodedResponse, function(err: any) { if (!err) { compareInputAndCache(responses, memCache, numMRRTTokens + 1, finalMrrt); } done(err); return; }); }); }); test('add-multiple-entries-find-mrrt', function(done) { var numMRRTTokens = 6; fillCache(20, numMRRTTokens, false, function(err: any, testValues: any) { var responses = testValues.cachedResponses; var memCache = testValues.memCache; var fakeTokenRequest = testValues.fakeTokenRequest; var mrrtEntry: any = _.findWhere(memCache._entries, { isMRRT : true }); if (!err) { compareInputAndCache(responses, memCache, numMRRTTokens); var cacheDriver = new CacheDriver(fakeTokenRequest._callContext, cp.authorityTenant, mrrtEntry.resource, mrrtEntry._clientId, memCache, unexpectedRefreshFunction); cacheDriver.find({}, function(err: any, entry: any) { if (!err) { assert(entry, 'Find did not return any entry'); assertEntriesEqual(mrrtEntry, entry, 'Queried entry did not match expected: ' + JSON.stringify(entry)); } done(err); return; }); } else { done(err); return; } }); }); function createRefreshFunction(expectedRefreshToken: any, response: any) { var refreshFunction = function(entry: any, resource: any, callback: any) { resource; if (expectedRefreshToken !== entry['refreshToken']) { console.log('RECEIVED:'); console.log(entry.refreshToken); console.log('EXPECTED'); console.log(expectedRefreshToken); assert(false, 'RefreshFunction received unexpected refresh token: ' + entry['refreshToken']); } assert(_.isFunction(callback), 'callback parameter is not a function'); callback(null, response); }; return refreshFunction; } test('add-multiple-entries-mrrt-find-refreshed-mrrt', function(done) { var numMRRTTokens = 5; fillCache(20, 5, false, function(err: any, testValues: any) { var responses = testValues.cachedResponses; var memCache = testValues.memCache; var fakeTokenRequest = testValues.fakeTokenRequest; var finalMrrt = testValues.finalMrrt; var authority = testValues.authority; var unknownResource = 'unknownResource'; var responseOptions = { resource : unknownResource, mrrt : true, refreshedRefresh : true }; var refreshedResponse = util.createResponse(responseOptions); var refreshedRefreshToken = refreshedResponse.refreshToken; var refreshFunction = createRefreshFunction(finalMrrt, refreshedResponse.decodedResponse); if (!err) { compareInputAndCache(responses, memCache, numMRRTTokens, finalMrrt); responses.push(refreshedResponse.cachedResponse); var cacheDriver = new CacheDriver(fakeTokenRequest._callContext, authority, unknownResource, cp.clientId, memCache, refreshFunction); cacheDriver.find(null, function(err: any, entry: any) { if (!err) { assert(entry, 'Expected a matching entry, but none was returned.'); assert(entry.resource === unknownResource, 'Unexpected resource returned:' + entry.resource); assert(refreshedRefreshToken === entry['refreshToken'], 'Returned refresh token did not match expected'); compareInputAndCache(responses, memCache, numMRRTTokens + 1, entry.refreshToken); // Now ensure that the refreshed token can be successfully found in the cache. var query = { userId : entry.userId, clientId : cp.clientId }; cacheDriver.find(query, function(err: any, recentlyCachedEntry: any) { if (!err) { assert(recentlyCachedEntry, 'Expected a returned entry but none was returned.'); assertEntriesEqual(entry, recentlyCachedEntry, 'Token returned from cache was not the same as the one that was recently cached.'); compareInputAndCache(responses, memCache, numMRRTTokens + 1, entry.refreshToken); } done(err); return; }); } else { done(err); return; } }); } else { done(err); return; } }); }); test('add-multiple-entries-failed-mrrt-refresh', function(done) { var numMRRTTokens = 5; fillCache(20, 5, false, function(err: any, testValues: any) { var responses = testValues.cachedResponses; var memCache = testValues.memCache; var fakeTokenRequest = testValues.fakeTokenRequest; var finalMrrt = testValues.finalMrrt; var authority = testValues.authority; var unknownResource = 'unknownResource'; var refreshFunction = function(entry: any, resource: any, callback: any) { entry; resource; callback(new Error('FAILED REFRESH')); }; if (!err) { compareInputAndCache(responses, memCache, numMRRTTokens, finalMrrt); var cacheDriver = new CacheDriver(fakeTokenRequest._callContext, authority, unknownResource, cp.clientId, memCache, refreshFunction); cacheDriver.find(null, function(err: any) { assert(err, 'Did not receive expected error.'); assert(-1 !== err.message.indexOf('FAILED REFRESH'), 'Error message did not contain correct text'); compareInputAndCache(responses, memCache, numMRRTTokens, finalMrrt); done(); return; }); } else { done(err); return; } }); }); function removeResponse(collection: any, response: any) { return _.filter(collection, function(entry) { if (_.isEqual(response, entry)) { return false; } return true; }); } test('expired-access-token', function(done) { var numMRRTTokens = 5; fillCache(20, 5, true, function(err: any, testValues: any) { var responses = testValues.cachedResponses; var memCache = testValues.memCache; var fakeTokenRequest = testValues.fakeTokenRequest; var authority = testValues.authority; var expiredEntry = testValues.expiredEntry.cachedResponse; var finalMrrt = testValues.finalMrrt; var responseOptions = { resource : expiredEntry.resource, refreshedRefresh : true }; var refreshedResponse = util.createResponse(responseOptions); var refreshedRefreshToken = refreshedResponse.refreshToken; var refreshFunction = createRefreshFunction(expiredEntry['refreshToken'], refreshedResponse.decodedResponse); if (!err) { compareInputAndCache(responses, memCache, numMRRTTokens, finalMrrt); responses = removeResponse(responses, expiredEntry); responses.push(refreshedResponse.cachedResponse); var cacheDriver = new CacheDriver(fakeTokenRequest._callContext, authority, expiredEntry.resource, cp.clientId, memCache, refreshFunction); cacheDriver.find(null, function(err: any, entry: any) { if (!err) { assert(entry, 'Expected a matching entry, but none was returned.'); assert(entry.resource === expiredEntry.resource, 'Unexpected resource returned:' + entry.resource); assert(refreshedRefreshToken === entry['refreshToken'], 'Returned refresh token did not match expected'); compareInputAndCache(responses, memCache, numMRRTTokens, finalMrrt); // Now ensure that the refreshed token can be successfully found in the cache. var query = { userId : entry.userId, clientId : cp.clientId }; cacheDriver.find(query, function(err: any, recentlyCachedEntry: any) { if (!err) { assert(recentlyCachedEntry, 'Expected a returned entry but none was returned.'); assertEntriesEqual(entry, recentlyCachedEntry, 'Token returned from cache was not the same as the one that was recently cached.'); compareInputAndCache(responses, memCache, numMRRTTokens, finalMrrt); } done(err); return; }); } else { done(err); return; } }); } else { done(err); return; } }); }); test('expired-access-token-failed-refresh', function(done) { var numMRRTTokens = 5; fillCache(20, 5, true, function(err: any, testValues: any) { var responses = testValues.cachedResponses; var memCache = testValues.memCache; var fakeTokenRequest = testValues.fakeTokenRequest; var authority = testValues.authority; var expiredEntry = testValues.expiredEntry.cachedResponse; var finalMrrt = testValues.finalMrrt; var refreshFunction = function(entry: any, resource: any, callback: any) { entry; resource; callback(new Error('FAILED REFRESH')); }; if (!err) { compareInputAndCache(responses, memCache, numMRRTTokens, finalMrrt); var cacheDriver = new CacheDriver(fakeTokenRequest._callContext, authority, expiredEntry.resource, cp.clientId, memCache, refreshFunction); cacheDriver.find(null, function(err: any) { assert(err, 'Did not receive expected error about failed refresh.'); assert(-1 !== err.message.indexOf('FAILED REFRESH'), 'Error message did not contain correct text'); compareInputAndCache(responses, memCache, numMRRTTokens, finalMrrt); done(); return; }); } else { done(err); return; } }); }); });
the_stack
//@ts-check ///<reference path="devkit.d.ts" /> declare namespace DevKit { namespace FormBooking_Journal_Mobile { interface tab_f1tab_journalDetails_Sections { f1tab_journalDetails_section_3: DevKit.Controls.Section; f1tab_journalDetails_section_4: DevKit.Controls.Section; f1tab_journalDetails_section_journal_cost: DevKit.Controls.Section; f1tab_journalDetails_section_journal_details: DevKit.Controls.Section; } interface tab_fstab_general_Sections { fstab_general_section_2: DevKit.Controls.Section; fstab_general_section_3: DevKit.Controls.Section; fstab_general_section_general: DevKit.Controls.Section; } interface tab_fstab_other_Sections { tab_4_section_1: DevKit.Controls.Section; tab_4_section_2: DevKit.Controls.Section; tab_4_section_3: DevKit.Controls.Section; } interface tab_fstab_sub_grids_Sections { fstab_sub_grids_section: DevKit.Controls.Section; fstab_sub_grids_section_2: DevKit.Controls.Section; fstab_sub_grids_section_3: DevKit.Controls.Section; } interface tab_f1tab_journalDetails extends DevKit.Controls.ITab { Section: tab_f1tab_journalDetails_Sections; } interface tab_fstab_general extends DevKit.Controls.ITab { Section: tab_fstab_general_Sections; } interface tab_fstab_other extends DevKit.Controls.ITab { Section: tab_fstab_other_Sections; } interface tab_fstab_sub_grids extends DevKit.Controls.ITab { Section: tab_fstab_sub_grids_Sections; } interface Tabs { f1tab_journalDetails: tab_f1tab_journalDetails; fstab_general: tab_fstab_general; fstab_other: tab_fstab_other; fstab_sub_grids: tab_fstab_sub_grids; } interface Body { Tab: Tabs; /** Shows the additional cost associated with this journal, if any. (This amount is not multiplied by quantity) */ msdyn_AdditionalCost: DevKit.Controls.Money; /** Shows if this journal is billable. */ msdyn_Billable: DevKit.Controls.Boolean; /** This Resource Booking this journal pertains to */ msdyn_Booking: DevKit.Controls.Lookup; /** Enter the total duration of this journal record. */ msdyn_Duration: DevKit.Controls.Integer; /** Enter the end time of this journal record. */ msdyn_EndTime: DevKit.Controls.DateTime; /** Enter the type of journal. */ msdyn_JournalType: DevKit.Controls.OptionSet; /** Enter the name of the custom entity. */ msdyn_name: DevKit.Controls.String; /** Unique identifier for Resource Pay Type associated with Booking Journal. */ msdyn_PayType: DevKit.Controls.Lookup; /** Enter the start time of this journal record. */ msdyn_StartTime: DevKit.Controls.DateTime; /** Shows the total cost company pays to resource. */ msdyn_TotalCost: DevKit.Controls.Money; /** Enter the hourly cost that company pays to the resource. */ msdyn_UnitCost: DevKit.Controls.Money; notescontrol: DevKit.Controls.Note; /** Owner Id */ OwnerId: DevKit.Controls.Lookup; } interface Navigation { navProcessSessions: DevKit.Controls.NavigationItem } } class FormBooking_Journal_Mobile extends DevKit.IForm { /** * DynamicsCrm.DevKit form Booking_Journal_Mobile * @param executionContext the execution context * @param defaultWebResourceName default resource name. E.g.: "devkit_/resources/Resource" */ constructor(executionContext: any, defaultWebResourceName?: string); /** Utility functions/methods/objects for Dynamics 365 form */ Utility: DevKit.Utility; /** The Body section of form Booking_Journal_Mobile */ Body: DevKit.FormBooking_Journal_Mobile.Body; /** The Navigation of form Booking_Journal_Mobile */ Navigation: DevKit.FormBooking_Journal_Mobile.Navigation; } namespace Formmsdyn_bookingjournal_Information { interface tab_f1tab_journalDetails_Sections { f1tab_journalDetails_section_2: DevKit.Controls.Section; tab_3_section_1: DevKit.Controls.Section; } interface tab_f1tab_journalDetails extends DevKit.Controls.ITab { Section: tab_f1tab_journalDetails_Sections; } interface Tabs { f1tab_journalDetails: tab_f1tab_journalDetails; } interface Body { Tab: Tabs; /** Shows the additional cost associated with this journal, if any. (This amount is not multiplied by quantity) */ msdyn_AdditionalCost: DevKit.Controls.Money; /** Shows if this journal is billable. */ msdyn_Billable: DevKit.Controls.Boolean; /** This Resource Booking this journal pertains to */ msdyn_Booking: DevKit.Controls.Lookup; /** Enter the total duration of this journal record. */ msdyn_Duration: DevKit.Controls.Integer; /** Enter the end time of this journal record. */ msdyn_EndTime: DevKit.Controls.DateTime; /** Enter the type of journal. */ msdyn_JournalType: DevKit.Controls.OptionSet; /** Enter the name of the custom entity. */ msdyn_name: DevKit.Controls.String; /** Unique identifier for Resource Pay Type associated with Booking Journal. */ msdyn_PayType: DevKit.Controls.Lookup; /** Enter the start time of this journal record. */ msdyn_StartTime: DevKit.Controls.DateTime; /** Shows the total cost company pays to resource. */ msdyn_TotalCost: DevKit.Controls.Money; /** Enter the hourly cost that company pays to the resource. */ msdyn_UnitCost: DevKit.Controls.Money; notescontrol: DevKit.Controls.Note; /** Owner Id */ OwnerId: DevKit.Controls.Lookup; } interface Footer extends DevKit.Controls.IFooter { /** Status of the Booking Journal */ statecode: DevKit.Controls.OptionSet; } interface Navigation { navProcessSessions: DevKit.Controls.NavigationItem } } class Formmsdyn_bookingjournal_Information extends DevKit.IForm { /** * DynamicsCrm.DevKit form msdyn_bookingjournal_Information * @param executionContext the execution context * @param defaultWebResourceName default resource name. E.g.: "devkit_/resources/Resource" */ constructor(executionContext: any, defaultWebResourceName?: string); /** Utility functions/methods/objects for Dynamics 365 form */ Utility: DevKit.Utility; /** The Body section of form msdyn_bookingjournal_Information */ Body: DevKit.Formmsdyn_bookingjournal_Information.Body; /** The Footer section of form msdyn_bookingjournal_Information */ Footer: DevKit.Formmsdyn_bookingjournal_Information.Footer; /** The Navigation of form msdyn_bookingjournal_Information */ Navigation: DevKit.Formmsdyn_bookingjournal_Information.Navigation; } class msdyn_bookingjournalApi { /** * DynamicsCrm.DevKit msdyn_bookingjournalApi * @param entity The entity object */ constructor(entity?: any); /** * Get the value of alias * @param alias the alias value * @param isMultiOptionSet true if the alias is multi OptionSet */ getAliasedValue(alias: string, isMultiOptionSet?: boolean): any; /** * Get the formatted value of alias * @param alias the alias value * @param isMultiOptionSet true if the alias is multi OptionSet */ getAliasedFormattedValue(alias: string, isMultiOptionSet?: boolean): string; /** The entity object */ Entity: any; /** The entity name */ EntityName: string; /** The entity collection name */ EntityCollectionName: string; /** The @odata.etag is then used to build a cache of the response that is dependant on the fields that are retrieved */ "@odata.etag": string; /** Unique identifier of the user who created the record. */ CreatedBy: DevKit.WebApi.LookupValueReadonly; /** Shows the date and time when the record was created. The date and time are displayed in the time zone selected in Microsoft Dynamics 365 options. */ CreatedOn_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValueReadonly; /** Shows who created the record on behalf of another user. */ CreatedOnBehalfBy: DevKit.WebApi.LookupValueReadonly; /** Shows the exchange rate for the currency associated with the entity with respect to the base currency. */ ExchangeRate: DevKit.WebApi.DecimalValueReadonly; /** Shows the sequence number of the import that created this record. */ ImportSequenceNumber: DevKit.WebApi.IntegerValue; /** Unique identifier of the user who modified the record. */ ModifiedBy: DevKit.WebApi.LookupValueReadonly; /** Shows the date and time when the record was last updated. The date and time are displayed in the time zone selected in Microsoft Dynamics 365 options. */ ModifiedOn_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValueReadonly; /** Shows who last updated the record on behalf of another user. */ ModifiedOnBehalfBy: DevKit.WebApi.LookupValueReadonly; /** Shows the additional cost associated with this journal, if any. (This amount is not multiplied by quantity) */ msdyn_AdditionalCost: DevKit.WebApi.MoneyValue; /** Shows the value of the additional cost in the base currency. */ msdyn_additionalcost_Base: DevKit.WebApi.MoneyValueReadonly; /** Shows if this journal is billable. */ msdyn_Billable: DevKit.WebApi.BooleanValue; /** This Resource Booking this journal pertains to */ msdyn_Booking: DevKit.WebApi.LookupValue; /** Shows the entity instances. */ msdyn_bookingjournalId: DevKit.WebApi.GuidValue; /** Enter the total duration of this journal record. */ msdyn_Duration: DevKit.WebApi.IntegerValue; /** Enter the end time of this journal record. */ msdyn_EndTime_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValue; /** Enter the type of journal. */ msdyn_JournalType: DevKit.WebApi.OptionSetValue; /** Enter the name of the custom entity. */ msdyn_name: DevKit.WebApi.StringValue; /** Unique identifier for Resource Pay Type associated with Booking Journal. */ msdyn_PayType: DevKit.WebApi.LookupValue; /** Enter the start time of this journal record. */ msdyn_StartTime_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValue; /** Shows the total cost company pays to resource. */ msdyn_TotalCost: DevKit.WebApi.MoneyValue; /** Shows the value of the total cost in the base currency. */ msdyn_totalcost_Base: DevKit.WebApi.MoneyValueReadonly; /** Enter the hourly cost that company pays to the resource. */ msdyn_UnitCost: DevKit.WebApi.MoneyValue; /** Shows the value of the unit cost in the base currency. */ msdyn_unitcost_Base: DevKit.WebApi.MoneyValueReadonly; /** Shows the date and time that the record was migrated. */ OverriddenCreatedOn_UtcDateOnly: DevKit.WebApi.UtcDateOnlyValue; /** Enter the user who is assigned to manage the record. This field is updated every time the record is assigned to a different user */ OwnerId_systemuser: DevKit.WebApi.LookupValue; /** Enter the team who is assigned to manage the record. This field is updated every time the record is assigned to a different team */ OwnerId_team: DevKit.WebApi.LookupValue; /** Unique identifier for the business unit that owns the record */ OwningBusinessUnit: DevKit.WebApi.LookupValueReadonly; /** Unique identifier for the team that owns the record. */ OwningTeam: DevKit.WebApi.LookupValueReadonly; /** Unique identifier for the user that owns the record. */ OwningUser: DevKit.WebApi.LookupValueReadonly; /** Status of the Booking Journal */ statecode: DevKit.WebApi.OptionSetValue; /** Reason for the status of the Booking Journal */ statuscode: DevKit.WebApi.OptionSetValue; /** For internal use only. */ TimeZoneRuleVersionNumber: DevKit.WebApi.IntegerValue; /** Unique identifier of the currency associated with the entity. */ TransactionCurrencyId: DevKit.WebApi.LookupValue; /** Shows the time zone code that was in use when the record was created. */ UTCConversionTimeZoneCode: DevKit.WebApi.IntegerValue; /** Version Number */ VersionNumber: DevKit.WebApi.BigIntValueReadonly; } } declare namespace OptionSet { namespace msdyn_bookingjournal { enum msdyn_JournalType { /** 690970001 */ Break, /** 690970004 */ Business_Closure, /** 690970003 */ Overtime, /** 690970002 */ Travel, /** 690970000 */ Working_Hours } enum statecode { /** 0 */ Active, /** 1 */ Inactive } enum statuscode { /** 1 */ Active, /** 2 */ Inactive } enum RollupState { /** 0 - Attribute value is yet to be calculated */ NotCalculated, /** 1 - Attribute value has been calculated per the last update time in <AttributeSchemaName>_Date attribute */ Calculated, /** 2 - Attribute value calculation lead to overflow error */ OverflowError, /** 3 - Attribute value calculation failed due to an internal error, next run of calculation job will likely fix it */ OtherError, /** 4 - Attribute value calculation failed because the maximum number of retry attempts to calculate the value were exceeded likely due to high number of concurrency and locking conflicts */ RetryLimitExceeded, /** 5 - Attribute value calculation failed because maximum hierarchy depth limit for calculation was reached */ HierarchicalRecursionLimitReached, /** 6 - Attribute value calculation failed because a recursive loop was detected in the hierarchy of the record */ LoopDetected } } } //{'JsForm':['Booking Journal - Mobile','Information'],'JsWebApi':true,'IsDebugForm':true,'IsDebugWebApi':true,'Version':'2.12.31','JsFormVersion':'v2'}
the_stack
import { Component, QueryList, Input, HostListener, ContentChildren, AfterContentInit, ElementRef, TemplateRef, OnChanges, SimpleChanges, ChangeDetectorRef, ViewChild, OnInit } from "@angular/core"; import { Subscription } from "rxjs"; import { EventService } from "carbon-components-angular/utils"; import { TabHeader } from "./tab-header.component"; @Component({ selector: "ibm-tab-header-group", template: ` <nav class="bx--tabs bx--tabs--scrollable" [ngClass]="{ 'bx--skeleton': skeleton, 'bx--tabs--container bx--tabs--scrollable--container': type === 'container' }" role="navigation" [attr.aria-label]="ariaLabel" [attr.aria-labelledby]="ariaLabelledby"> <button #leftOverflowNavButton type="button" [ngClass]="{ 'bx--tab--overflow-nav-button': hasHorizontalOverflow, 'bx--tab--overflow-nav-button--hidden': leftOverflowNavButtonHidden }" (click)="handleOverflowNavClick(-1)" (mousedown)="handleOverflowNavMouseDown(-1)" (mouseup)="handleOverflowNavMouseUp()"> <svg focusable="false" preserveAspectRatio="xMidYMid meet" xmlns="http://www.w3.org/2000/svg" fill="currentColor" width="16" height="16" viewBox="0 0 16 16" aria-hidden="true"> <path d="M5 8L10 3 10.7 3.7 6.4 8 10.7 12.3 10 13z"></path> </svg> </button> <div *ngIf="!leftOverflowNavButtonHidden" class="bx--tabs__overflow-indicator--left"></div> <ul #tabList class="bx--tabs--scrollable__nav" role="tablist" (scroll)="handleScroll()"> <li role="presentation"> <ng-container *ngIf="contentBefore" [ngTemplateOutlet]="contentBefore"></ng-container> </li> <ng-content></ng-content> <li role="presentation"> <ng-container *ngIf="contentAfter" [ngTemplateOutlet]="contentAfter"></ng-container> </li> </ul> <div *ngIf="!rightOverflowNavButtonHidden" class="bx--tabs__overflow-indicator--right"></div> <button #rightOverflowNavButton type="button" [ngClass]="{ 'bx--tab--overflow-nav-button': hasHorizontalOverflow, 'bx--tab--overflow-nav-button--hidden': rightOverflowNavButtonHidden }" (click)="handleOverflowNavClick(1)" (mousedown)="handleOverflowNavMouseDown(1)" (mouseup)="handleOverflowNavMouseUp()"> <svg focusable="false" preserveAspectRatio="xMidYMid meet" xmlns="http://www.w3.org/2000/svg" fill="currentColor" width="16" height="16" viewBox="0 0 16 16" aria-hidden="true"> <path d="M11 8L6 13 5.3 12.3 9.6 8 5.3 3.7 6 3z"></path> </svg> </button> </nav> ` }) export class TabHeaderGroup implements AfterContentInit, OnChanges, OnInit { /** * Set to 'true' to have tabs automatically activated and have their content displayed when they receive focus. */ @Input() followFocus: boolean; /** * Set to `true` to put tabs in a loading state. */ @Input() skeleton = false; /** * Sets the aria label on the nav element. */ @Input() ariaLabel: string; /** * Sets the aria labelledby on the nav element. */ @Input() ariaLabelledby: string; @Input() contentAfter: TemplateRef<any>; @Input() contentBefore: TemplateRef<any>; /** * Set to 'true' to have all pane references associated with each tab header * in the tab header group cached and not reloaded on tab switching. */ @Input() cacheActive = false; @Input() isNavigation = false; @Input() type: "default" | "container" = "default"; /** * ContentChildren of all the tabHeaders. */ @ContentChildren(TabHeader) tabHeaderQuery: QueryList<TabHeader>; // @ts-ignore @ViewChild("tabList", { static: true }) headerContainer; // @ts-ignore @ViewChild("rightOverflowNavButton", { static: true }) rightOverflowNavButton; // @ts-ignore @ViewChild("leftOverflowNavButton", { static: true }) leftOverflowNavButton; /** * Keeps track of all the subscriptions to the tab header selection events. */ selectedSubscriptionTracker = new Subscription(); /** * Controls the manual focusing done by tabbing through headings. */ public currentSelectedIndex = 0; public get hasHorizontalOverflow() { const tabList = this.headerContainer.nativeElement; return tabList.scrollWidth > tabList.clientWidth; } public get leftOverflowNavButtonHidden() { const tabList = this.headerContainer.nativeElement; return !this.hasHorizontalOverflow || !tabList.scrollLeft; } public get rightOverflowNavButtonHidden() { const tabList = this.headerContainer.nativeElement; return !this.hasHorizontalOverflow || (tabList.scrollLeft + tabList.clientWidth) === tabList.scrollWidth; } // width of the overflow buttons OVERFLOW_BUTTON_OFFSET = 40; private _cacheActive = false; private overflowNavInterval; constructor( protected elementRef: ElementRef, protected changeDetectorRef: ChangeDetectorRef, protected eventService: EventService ) { } // keyboard accessibility /** * Controls the keydown events used for tabbing through the headings. */ @HostListener("keydown", ["$event"]) keyboardInput(event) { let tabHeadersArray = Array.from<any>(this.tabHeaderQuery); if (event.key === "Right" || event.key === "ArrowRight") { if (this.currentSelectedIndex < tabHeadersArray.length - 1) { event.preventDefault(); if (this.followFocus && !tabHeadersArray[this.currentSelectedIndex + 1].disabled) { tabHeadersArray[this.currentSelectedIndex + 1].selectTab(); } else { tabHeadersArray[this.currentSelectedIndex + 1].tabItem.nativeElement.focus(); this.currentSelectedIndex++; } } else { event.preventDefault(); if (this.followFocus && !tabHeadersArray[0].disabled) { tabHeadersArray[0].selectTab(); } else { tabHeadersArray[0].tabItem.nativeElement.focus(); this.currentSelectedIndex = 0; } } } if (event.key === "Left" || event.key === "ArrowLeft") { if (this.currentSelectedIndex > 0) { event.preventDefault(); if (this.followFocus && !tabHeadersArray[this.currentSelectedIndex - 1].disabled) { tabHeadersArray[this.currentSelectedIndex - 1].selectTab(); } else { tabHeadersArray[this.currentSelectedIndex - 1].tabItem.nativeElement.focus(); this.currentSelectedIndex--; } } else { event.preventDefault(); if (this.followFocus && !tabHeadersArray[tabHeadersArray.length - 1].disabled) { tabHeadersArray[tabHeadersArray.length - 1].selectTab(); } else { tabHeadersArray[tabHeadersArray.length - 1].tabItem.nativeElement.focus(); this.currentSelectedIndex = tabHeadersArray.length - 1; } } } if (event.key === "Home") { event.preventDefault(); if (this.followFocus && !tabHeadersArray[0].disabled) { tabHeadersArray[0].selectTab(); } else { tabHeadersArray[0].tabItem.nativeElement.focus(); this.currentSelectedIndex = 0; } } if (event.key === "End") { event.preventDefault(); if (this.followFocus && !tabHeadersArray[tabHeadersArray.length - 1].disabled) { tabHeadersArray[tabHeadersArray.length - 1].selectTab(); } else { tabHeadersArray[tabHeadersArray.length - 1].tabItem.nativeElement.focus(); this.currentSelectedIndex = tabHeadersArray.length - 1; } } // `"Spacebar"` is IE11 specific value if ((event.key === " " || event.key === "Spacebar") && !this.followFocus) { tabHeadersArray[this.currentSelectedIndex].selectTab(); } } ngOnInit() { this.eventService.on(window as any, "resize", () => this.handleScroll()); } ngAfterContentInit() { this.selectedSubscriptionTracker.unsubscribe(); if (this.tabHeaderQuery) { this.tabHeaderQuery.toArray() .forEach(tabHeader => { tabHeader.cacheActive = this.cacheActive; tabHeader.paneTabIndex = this.isNavigation ? null : 0; }); } const selectedSubscriptions = this.tabHeaderQuery.toArray().forEach(tabHeader => { tabHeader.selected.subscribe(() => { this.currentSelectedIndex = this.tabHeaderQuery.toArray().indexOf(tabHeader); // The Filter takes the current selected tab out, then all other headers are // deactivated and their associated pane references are also deactivated. this.tabHeaderQuery.toArray().filter(header => header !== tabHeader) .forEach(filteredHeader => { filteredHeader.active = false; if (filteredHeader.paneReference) { filteredHeader.paneReference.active = false; } }); }); }); this.selectedSubscriptionTracker.add(selectedSubscriptions); setTimeout(() => this.tabHeaderQuery.toArray()[this.currentSelectedIndex].selectTab()); } ngOnChanges(changes: SimpleChanges) { if (this.tabHeaderQuery) { if (changes.cacheActive) { this.tabHeaderQuery.toArray().forEach(tabHeader => tabHeader.cacheActive = this.cacheActive); } if (changes.isNavigation) { this.tabHeaderQuery.toArray() .forEach(tabHeader => tabHeader.paneTabIndex = this.isNavigation ? null : 0); } } } public getSelectedTab(): any { const selected = this.tabHeaderQuery.toArray()[this.currentSelectedIndex]; if (selected) { return selected; } return { headingIsTemplate: false, heading: "" }; } public handleScroll() { this.changeDetectorRef.markForCheck(); } public handleOverflowNavClick(direction: number, multiplier = 15) { const tabList = this.headerContainer.nativeElement; const { clientWidth, scrollLeft, scrollWidth } = tabList; if (direction === 1 && !scrollLeft) { tabList.scrollLeft += this.OVERFLOW_BUTTON_OFFSET; } tabList.scrollLeft += direction * multiplier; const leftEdgeReached = direction === -1 && scrollLeft < this.OVERFLOW_BUTTON_OFFSET; const rightEdgeReached = direction === 1 && scrollLeft + clientWidth >= scrollWidth - this.OVERFLOW_BUTTON_OFFSET; if (leftEdgeReached) { this.rightOverflowNavButton.nativeElement.focus(); } if (rightEdgeReached) { this.leftOverflowNavButton.nativeElement.focus(); } } public handleOverflowNavMouseDown(direction: number) { const tabList = this.headerContainer.nativeElement; this.overflowNavInterval = setInterval(() => { const { clientWidth, scrollLeft, scrollWidth } = tabList; // clear interval if scroll reaches left or right edge const leftEdgeReached = direction === -1 && scrollLeft < this.OVERFLOW_BUTTON_OFFSET; const rightEdgeReached = direction === 1 && scrollLeft + clientWidth >= scrollWidth - this.OVERFLOW_BUTTON_OFFSET; if (leftEdgeReached || rightEdgeReached) { clearInterval(this.overflowNavInterval); } // account for overflow button appearing and causing tablist width change this.handleOverflowNavClick(direction); }); } public handleOverflowNavMouseUp() { clearInterval(this.overflowNavInterval); } }
the_stack
//@ts-check ///<reference path="devkit.d.ts" /> declare namespace DevKit { namespace FormCampaign_Activity { interface Header extends DevKit.Controls.IHeader { /** Unique identifier of the user or team who owns the activity. */ OwnerId: DevKit.Controls.Lookup; /** Select the priority so that preferred customers or critical issues are handled quickly. */ PriorityCode: DevKit.Controls.OptionSet; /** Select the campaign activity's status. */ StatusCode: DevKit.Controls.OptionSet; } interface tab_audiences_tab_Sections { excluded_accounts_section: DevKit.Controls.Section; excluded_contacts_section: DevKit.Controls.Section; excluded_leads_section: DevKit.Controls.Section; selected_accounts_section: DevKit.Controls.Section; selected_contacts_section: DevKit.Controls.Section; selected_leads_section: DevKit.Controls.Section; } interface tab_Campaign_Activity_Sections { Anti_Spam: DevKit.Controls.Section; Financials: DevKit.Controls.Section; Marketing_list: DevKit.Controls.Section; Social_Pane: DevKit.Controls.Section; Summary: DevKit.Controls.Section; } interface tab_audiences_tab extends DevKit.Controls.ITab { Section: tab_audiences_tab_Sections; } interface tab_Campaign_Activity extends DevKit.Controls.ITab { Section: tab_Campaign_Activity_Sections; } interface Tabs { audiences_tab: tab_audiences_tab; Campaign_Activity: tab_Campaign_Activity; } interface Body { Tab: Tabs; /** Type the actual cost of the campaign activity. The value entered is rolled up to the related campaign in the total cost calculations. */ ActualCost: DevKit.Controls.Money; /** Enter the date when the campaign activity was actually completed. */ ActualEnd: DevKit.Controls.Date; /** Enter the actual start date and time for the campaign activity to determine if the campaign activity started on the scheduled time. */ ActualStart: DevKit.Controls.Date; /** Type the allocated budget of the campaign activity for estimated versus actual cost reporting. */ BudgetedCost: DevKit.Controls.Money; /** Select how communications for this activity will be sent, such as phone, letter, fax, or email. */ ChannelTypeCode: DevKit.Controls.OptionSet; /** Type additional information to describe the campaign activity, such as key talking points, objectives, or details about the target audience. */ Description: DevKit.Controls.String; /** Limits the frequency (in days) of marketing activities directed at any contact. Contacts that have been contacted more recently than this will be excluded from new campaign activity distributions. Enter a value of zero to disable the limit. */ ExcludeIfContactedInXDays: DevKit.Controls.Integer; notescontrol: DevKit.Controls.Note; /** Outsource vendor with which activity is associated. */ Partners: DevKit.Controls.Lookup; /** Choose the parent campaign so that the campaign activity costs reflect in the correct campaign for reporting. */ RegardingObjectId: DevKit.Controls.Lookup; /** Enter the expected due date and time for the activity to be completed to provide details about the timing of the campaign activity. */ ScheduledEnd: DevKit.Controls.Date; /** Enter the expected start date and time for the activity to provide details about timing of the campaign activity. */ ScheduledStart: DevKit.Controls.Date; /** Select the campaign activity's status. */ StatusCode: DevKit.Controls.OptionSet; /** Type a short description about the objective or primary topic of the campaign activity. */ Subject: DevKit.Controls.String; /** Choose the local currency for the record to make sure budgets are reported in the correct currency. */ TransactionCurrencyId: DevKit.Controls.Lookup; /** Select the type of campaign activity to indicate the purpose of the activity. */ TypeCode: DevKit.Controls.OptionSet; } interface Navigation { navActivities: DevKit.Controls.NavigationItem, navAsyncOperations: DevKit.Controls.NavigationItem, navProcessSessions: DevKit.Controls.NavigationItem, navRelationshipCABulkOperationLogs: DevKit.Controls.NavigationItem, navTargetLists: DevKit.Controls.NavigationItem } interface Grid { marketing_lists_grid: DevKit.Controls.Grid; selected_accounts: DevKit.Controls.Grid; excluded_accounts: DevKit.Controls.Grid; selected_contacts: DevKit.Controls.Grid; excluded_contacts: DevKit.Controls.Grid; selected_leads: DevKit.Controls.Grid; excluded_leads: DevKit.Controls.Grid; } } class FormCampaign_Activity extends DevKit.IForm { /** * DynamicsCrm.DevKit form Campaign_Activity * @param executionContext the execution context * @param defaultWebResourceName default resource name. E.g.: "devkit_/resources/Resource" */ constructor(executionContext: any, defaultWebResourceName?: string); /** Utility functions/methods/objects for Dynamics 365 form */ Utility: DevKit.Utility; /** The Body section of form Campaign_Activity */ Body: DevKit.FormCampaign_Activity.Body; /** The Header section of form Campaign_Activity */ Header: DevKit.FormCampaign_Activity.Header; /** The Navigation of form Campaign_Activity */ Navigation: DevKit.FormCampaign_Activity.Navigation; /** The Grid of form Campaign_Activity */ Grid: DevKit.FormCampaign_Activity.Grid; } namespace FormCampaign_Activity_deprecated { interface Header extends DevKit.Controls.IHeader { /** Unique identifier of the user or team who owns the activity. */ OwnerId: DevKit.Controls.Lookup; /** Select the priority so that preferred customers or critical issues are handled quickly. */ PriorityCode: DevKit.Controls.OptionSet; /** Select the campaign activity's status. */ StatusCode: DevKit.Controls.OptionSet; } interface tab_Campaign_Activity_Sections { Anti_Spam: DevKit.Controls.Section; Financials: DevKit.Controls.Section; Marketing_list: DevKit.Controls.Section; Social_Pane: DevKit.Controls.Section; Summary: DevKit.Controls.Section; } interface tab_FailuresActivities_Sections { failures_activities_grid: DevKit.Controls.Section; } interface tab_Campaign_Activity extends DevKit.Controls.ITab { Section: tab_Campaign_Activity_Sections; } interface tab_FailuresActivities extends DevKit.Controls.ITab { Section: tab_FailuresActivities_Sections; } interface Tabs { Campaign_Activity: tab_Campaign_Activity; FailuresActivities: tab_FailuresActivities; } interface Body { Tab: Tabs; /** Type the actual cost of the campaign activity. The value entered is rolled up to the related campaign in the total cost calculations. */ ActualCost: DevKit.Controls.Money; /** Enter the date when the campaign activity was actually completed. */ ActualEnd: DevKit.Controls.Date; /** Enter the actual start date and time for the campaign activity to determine if the campaign activity started on the scheduled time. */ ActualStart: DevKit.Controls.Date; /** Type the allocated budget of the campaign activity for estimated versus actual cost reporting. */ BudgetedCost: DevKit.Controls.Money; /** Select how communications for this activity will be sent, such as phone, letter, fax, or email. */ ChannelTypeCode: DevKit.Controls.OptionSet; /** Type additional information to describe the campaign activity, such as key talking points, objectives, or details about the target audience. */ Description: DevKit.Controls.String; /** Limits the frequency (in days) of marketing activities directed at any contact. Contacts that have been contacted more recently than this will be excluded from new campaign activity distributions. Enter a value of zero to disable the limit. */ ExcludeIfContactedInXDays: DevKit.Controls.Integer; notescontrol: DevKit.Controls.Note; /** Outsource vendor with which activity is associated. */ Partners: DevKit.Controls.Lookup; /** Choose the parent campaign so that the campaign activity costs reflect in the correct campaign for reporting. */ RegardingObjectId: DevKit.Controls.Lookup; /** Enter the expected due date and time for the activity to be completed to provide details about the timing of the campaign activity. */ ScheduledEnd: DevKit.Controls.Date; /** Enter the expected start date and time for the activity to provide details about timing of the campaign activity. */ ScheduledStart: DevKit.Controls.Date; /** Select the campaign activity's status. */ StatusCode: DevKit.Controls.OptionSet; /** Type a short description about the objective or primary topic of the campaign activity. */ Subject: DevKit.Controls.String; /** Choose the local currency for the record to make sure budgets are reported in the correct currency. */ TransactionCurrencyId: DevKit.Controls.Lookup; /** Select the type of campaign activity to indicate the purpose of the activity. */ TypeCode: DevKit.Controls.OptionSet; } interface Navigation { navActivities: DevKit.Controls.NavigationItem, navAsyncOperations: DevKit.Controls.NavigationItem, navProcessSessions: DevKit.Controls.NavigationItem, navTargetLists: DevKit.Controls.NavigationItem } interface Grid { marketing_lists_grid: DevKit.Controls.Grid; failuresGrid: DevKit.Controls.Grid; } } class FormCampaign_Activity_deprecated extends DevKit.IForm { /** * DynamicsCrm.DevKit form Campaign_Activity_deprecated * @param executionContext the execution context * @param defaultWebResourceName default resource name. E.g.: "devkit_/resources/Resource" */ constructor(executionContext: any, defaultWebResourceName?: string); /** Utility functions/methods/objects for Dynamics 365 form */ Utility: DevKit.Utility; /** The Body section of form Campaign_Activity_deprecated */ Body: DevKit.FormCampaign_Activity_deprecated.Body; /** The Header section of form Campaign_Activity_deprecated */ Header: DevKit.FormCampaign_Activity_deprecated.Header; /** The Navigation of form Campaign_Activity_deprecated */ Navigation: DevKit.FormCampaign_Activity_deprecated.Navigation; /** The Grid of form Campaign_Activity_deprecated */ Grid: DevKit.FormCampaign_Activity_deprecated.Grid; } class CampaignActivityApi { /** * DynamicsCrm.DevKit CampaignActivityApi * @param entity The entity object */ constructor(entity?: any); /** * Get the value of alias * @param alias the alias value * @param isMultiOptionSet true if the alias is multi OptionSet */ getAliasedValue(alias: string, isMultiOptionSet?: boolean): any; /** * Get the formatted value of alias * @param alias the alias value * @param isMultiOptionSet true if the alias is multi OptionSet */ getAliasedFormattedValue(alias: string, isMultiOptionSet?: boolean): string; /** The entity object */ Entity: any; /** The entity name */ EntityName: string; /** The entity collection name */ EntityCollectionName: string; /** The @odata.etag is then used to build a cache of the response that is dependant on the fields that are retrieved */ "@odata.etag": string; /** Additional information provided by the external application as JSON. For internal use only. */ ActivityAdditionalParams: DevKit.WebApi.StringValue; /** Unique identifier of the campaign activity. */ ActivityId: DevKit.WebApi.GuidValue; /** Type the actual cost of the campaign activity. The value entered is rolled up to the related campaign in the total cost calculations. */ ActualCost: DevKit.WebApi.MoneyValue; /** Value of the Actual Cost in base currency. */ ActualCost_Base: DevKit.WebApi.MoneyValueReadonly; /** Shows the value selected in the Duration field on the campaign activity. The duration is used to report the time spent on the activity. */ ActualDurationMinutes: DevKit.WebApi.IntegerValue; /** Enter the date when the campaign activity was actually completed. */ ActualEnd_UtcDateOnly: DevKit.WebApi.UtcDateOnlyValue; /** Enter the actual start date and time for the campaign activity to determine if the campaign activity started on the scheduled time. */ ActualStart_UtcDateOnly: DevKit.WebApi.UtcDateOnlyValue; /** Type the allocated budget of the campaign activity for estimated versus actual cost reporting. */ BudgetedCost: DevKit.WebApi.MoneyValue; /** Value of the Budget Allocated in base currency. */ BudgetedCost_Base: DevKit.WebApi.MoneyValueReadonly; /** Type a category to identify the campaign activity type, such as new business development or customer retention, to tie the campaign activity to a business group or function. */ Category: DevKit.WebApi.StringValue; /** Select how communications for this activity will be sent, such as phone, letter, fax, or email. */ ChannelTypeCode: DevKit.WebApi.OptionSetValue; /** Shows how contact about the social activity originated, such as from Twitter or Facebook. This field is read-only. */ Community: DevKit.WebApi.OptionSetValue; /** Unique identifier of the user who created the activity. */ CreatedBy: DevKit.WebApi.LookupValueReadonly; /** Date and time when the activity was created. */ CreatedOn_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValueReadonly; /** Unique identifier of the delegate user who created the activitypointer. */ CreatedOnBehalfBy: DevKit.WebApi.LookupValueReadonly; /** Date and time when the delivery of the activity was last attempted. */ DeliveryLastAttemptedOn_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValueReadonly; /** Priority of delivery of the activity to the email server. */ DeliveryPriorityCode: DevKit.WebApi.OptionSetValue; /** Type additional information to describe the campaign activity, such as key talking points, objectives, or details about the target audience. */ Description: DevKit.WebApi.StringValue; /** Select whether to override the opt-out settings on leads, contacts, and accounts for the members of the target marketing lists of the campaign activity. If No is selected, marketing materials will be sent to members who have opted out. */ DoNotSendOnOptOut: DevKit.WebApi.BooleanValue; /** The message id of activity which is returned from Exchange Server. */ ExchangeItemId: DevKit.WebApi.StringValue; /** Shows the conversion rate of the record's currency. The exchange rate is used to convert all money fields in the record from the local currency to the system's default currency. */ ExchangeRate: DevKit.WebApi.DecimalValueReadonly; /** Shows the web link of Activity of type email. */ ExchangeWebLink: DevKit.WebApi.StringValue; /** Limits the frequency (in days) of marketing activities directed at any contact. Contacts that have been contacted more recently than this will be excluded from new campaign activity distributions. Enter a value of zero to disable the limit. */ ExcludeIfContactedInXDays: DevKit.WebApi.IntegerValue; /** Select whether inactive marketing list members will be excluded from the campaign activity distribution. */ IgnoreInactiveListMembers: DevKit.WebApi.BooleanValue; /** Sequence number of the import that created this record. */ ImportSequenceNumber: DevKit.WebApi.IntegerValue; /** Type of instance of a recurring series. */ InstanceTypeCode: DevKit.WebApi.OptionSetValueReadonly; /** Information regarding whether the campaign activity was billed as part of resolving a case. */ IsBilled: DevKit.WebApi.BooleanValue; /** For internal use only. */ IsMapiPrivate: DevKit.WebApi.BooleanValue; /** Information regarding whether the activity is a regular activity type or event type. */ IsRegularActivity: DevKit.WebApi.BooleanValueReadonly; /** Information about whether the campaign activity is created by a workflow rule. */ IsWorkflowCreated: DevKit.WebApi.BooleanValue; /** Contains the date and time stamp of the last on hold time. */ LastOnHoldTime_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValue; /** Left the voice mail */ LeftVoiceMail: DevKit.WebApi.BooleanValue; /** Unique identifier of user who last modified the activity. */ ModifiedBy: DevKit.WebApi.LookupValueReadonly; /** Date and time when activity was last modified. */ ModifiedOn_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValueReadonly; /** Unique identifier of the delegate user who last modified the activitypointer. */ ModifiedOnBehalfBy: DevKit.WebApi.LookupValueReadonly; /** Shows how long, in minutes, that the record was on hold. */ OnHoldTime: DevKit.WebApi.IntegerValueReadonly; /** Date and time that the record was migrated. */ OverriddenCreatedOn_UtcDateOnly: DevKit.WebApi.UtcDateOnlyValue; /** Enter the user who is assigned to manage the record. This field is updated every time the record is assigned to a different user */ OwnerId_systemuser: DevKit.WebApi.LookupValue; /** Enter the team who is assigned to manage the record. This field is updated every time the record is assigned to a different team */ OwnerId_team: DevKit.WebApi.LookupValue; /** Unique identifier of the business unit that owns the activity. */ OwningBusinessUnit: DevKit.WebApi.LookupValueReadonly; /** Unique identifier of the team that owns the activity. */ OwningTeam: DevKit.WebApi.LookupValueReadonly; /** Unique identifier of the user that owns the activity. */ OwningUser: DevKit.WebApi.LookupValueReadonly; /** For internal use only. */ PostponeActivityProcessingUntil_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValueReadonly; /** Select the priority so that preferred customers or critical issues are handled quickly. */ PriorityCode: DevKit.WebApi.OptionSetValue; /** Unique identifier of the Process. */ ProcessId: DevKit.WebApi.GuidValue; /** Choose the parent campaign so that the campaign activity costs reflect in the correct campaign for reporting. */ RegardingObjectId: DevKit.WebApi.LookupValue; /** Scheduled duration, specified in minutes, of the campaign activity. */ ScheduledDurationMinutes: DevKit.WebApi.IntegerValueReadonly; /** Enter the expected due date and time for the activity to be completed to provide details about the timing of the campaign activity. */ ScheduledEnd_UtcDateOnly: DevKit.WebApi.UtcDateOnlyValue; /** Enter the expected start date and time for the activity to provide details about timing of the campaign activity. */ ScheduledStart_UtcDateOnly: DevKit.WebApi.UtcDateOnlyValue; /** Unique identifier of the mailbox associated with the sender of the email message. */ SenderMailboxId: DevKit.WebApi.LookupValueReadonly; /** Date and time when the activity was sent. */ SentOn_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValueReadonly; /** Uniqueidentifier specifying the id of recurring series of an instance. */ SeriesId: DevKit.WebApi.GuidValueReadonly; /** Unique identifier of the associated service. */ ServiceId: DevKit.WebApi.LookupValue; /** Choose the service level agreement (SLA) that you want to apply to the case record. */ SLAId: DevKit.WebApi.LookupValue; /** Last SLA that was applied to this case. This field is for internal use only. */ SLAInvokedId: DevKit.WebApi.LookupValueReadonly; SLAName: DevKit.WebApi.StringValueReadonly; /** Shows the date and time by which the activities are sorted. */ SortDate_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValue; /** Unique identifier of the Stage. */ StageId: DevKit.WebApi.GuidValue; /** Shows whether the campaign activity is open, completed, or canceled. Completed and canceled campaign activities are read-only and can't be edited. */ StateCode: DevKit.WebApi.OptionSetValue; /** Select the campaign activity's status. */ StatusCode: DevKit.WebApi.OptionSetValue; /** Type a subcategory to identify the campaign activity type and relate the activity to a specific product, sales region, business group, or other function. */ Subcategory: DevKit.WebApi.StringValue; /** Type a short description about the objective or primary topic of the campaign activity. */ Subject: DevKit.WebApi.StringValue; /** For internal use only. */ TimeZoneRuleVersionNumber: DevKit.WebApi.IntegerValue; /** Choose the local currency for the record to make sure budgets are reported in the correct currency. */ TransactionCurrencyId: DevKit.WebApi.LookupValueReadonly; /** For internal use only. */ TraversedPath: DevKit.WebApi.StringValue; /** Select the type of campaign activity to indicate the purpose of the activity. */ TypeCode: DevKit.WebApi.OptionSetValue; /** Time zone code that was in use when the record was created. */ UTCConversionTimeZoneCode: DevKit.WebApi.IntegerValue; /** Version number of the activity. */ VersionNumber: DevKit.WebApi.BigIntValueReadonly; /** The array of object that can cast object to ActivityPartyApi class */ ActivityParties: Array<any>; } } declare namespace OptionSet { namespace CampaignActivity { enum ChannelTypeCode { /** 2 */ Appointment, /** 7 */ Email, /** 8 */ Email_via_Mail_Mergedeprecated, /** 5 */ Fax, /** 6 */ Fax_via_Mail_Mergedeprecated, /** 3 */ Letter, /** 4 */ Letter_via_Mail_Mergedeprecated, /** 9 */ Other, /** 1 */ Phone } enum Community { /** 5 */ Cortana, /** 6 */ Direct_Line, /** 8 */ Direct_Line_Speech, /** 9 */ Email, /** 1 */ Facebook, /** 10 */ GroupMe, /** 11 */ Kik, /** 3 */ Line, /** 7 */ Microsoft_Teams, /** 0 */ Other, /** 13 */ Skype, /** 14 */ Slack, /** 12 */ Telegram, /** 2 */ Twitter, /** 4 */ Wechat, /** 15 */ WhatsApp } enum DeliveryPriorityCode { /** 2 */ High, /** 0 */ Low, /** 1 */ Normal } enum InstanceTypeCode { /** 0 */ Not_Recurring, /** 3 */ Recurring_Exception, /** 4 */ Recurring_Future_Exception, /** 2 */ Recurring_Instance, /** 1 */ Recurring_Master } enum PriorityCode { /** 2 */ High, /** 0 */ Low, /** 1 */ Normal } enum StateCode { /** 2 */ Canceled, /** 1 */ Closed, /** 0 */ Open } enum StatusCode { /** 3 */ Canceled, /** 2 */ Closed, /** 6 */ Completed, /** 0 */ In_Progress, /** 4 */ Pending, /** 1 */ Proposed, /** 5 */ System_Aborted } enum TypeCode { /** 5 */ Content_Distribution, /** 2 */ Content_Preparation, /** 7 */ Direct_Follow_Up_Contact, /** 6 */ Direct_Initial_Contact, /** 4 */ Lead_Qualification, /** 8 */ Reminder_Distribution, /** 1 */ Research, /** 3 */ Target_Marketing_List_Creation } enum RollupState { /** 0 - Attribute value is yet to be calculated */ NotCalculated, /** 1 - Attribute value has been calculated per the last update time in <AttributeSchemaName>_Date attribute */ Calculated, /** 2 - Attribute value calculation lead to overflow error */ OverflowError, /** 3 - Attribute value calculation failed due to an internal error, next run of calculation job will likely fix it */ OtherError, /** 4 - Attribute value calculation failed because the maximum number of retry attempts to calculate the value were exceeded likely due to high number of concurrency and locking conflicts */ RetryLimitExceeded, /** 5 - Attribute value calculation failed because maximum hierarchy depth limit for calculation was reached */ HierarchicalRecursionLimitReached, /** 6 - Attribute value calculation failed because a recursive loop was detected in the hierarchy of the record */ LoopDetected } } } //{'JsForm':['Campaign Activity','Campaign Activity (deprecated)'],'JsWebApi':true,'IsDebugForm':true,'IsDebugWebApi':true,'Version':'2.12.31','JsFormVersion':'v2'}
the_stack
import { IMathematicsHelper } from "./IMathematicsHelper"; import { Utility } from "../../utility/Utility"; export class MathematicsHelper implements IMathematicsHelper { public static epsilon: number = Utility.epsilon; public static epsilonUp: number = 1 - MathematicsHelper.epsilon; public static epsilonMinute: number = 0.000000001; public static epsilonMinuteUp: number = 1 - MathematicsHelper.epsilonMinute; public static epsilonMinuteNegative: number = -MathematicsHelper.epsilonMinute; public static readonly mathematicsHelperObject: IMathematicsHelper = new MathematicsHelper(); public static GetMathematicsHelperObject(): IMathematicsHelper { return MathematicsHelper.mathematicsHelperObject; } // ---- NOTE-REFERENCE ---- https://en.wikipedia.org/wiki/Softmax_function public softmaxSingleFunction(inputs: number[], index: number): number { const max: number = this.getIndexOnFirstMaxEntry(inputs).max; const inputShifts: number[] = inputs.map((entry: number) => entry - max); const inputShiftExps: number[] = inputShifts.map((entry: number) => Math.exp(entry)); const inputShiftExpsSum: number = inputShiftExps.reduce( (accumulation: number, entry: number) => accumulation + entry, 0); return inputShiftExps[index] / inputShiftExpsSum; } public smoothArgmaxApproximationSingleFunction(inputs: number[], index: number): number { return this.softmaxSingleFunction(inputs, index); } // ---- NOTE-REFERENCE ---- https://en.wikipedia.org/wiki/Softmax_function public softmaxFunction(inputs: number[]): number[] { const max: number = this.getIndexOnFirstMaxEntry(inputs).max; const inputShifts: number[] = inputs.map((entry: number) => entry - max); const inputShiftExps: number[] = inputShifts.map((entry: number) => Math.exp(entry)); const inputShiftExpsSum: number = inputShiftExps.reduce( (accumulation: number, entry: number) => accumulation + entry, 0); return inputShiftExps.map((entry: number) => entry / inputShiftExpsSum); } public smoothArgmaxApproximationFunction(inputs: number[]): number[] { return this.softmaxFunction(inputs); } // ---- NOTE-REFERENCE ---- https://en.wikipedia.org/wiki/LogSumExp public logsumexpStrictConvexSingleFunction(inputs: number[]): number { let max: number = this.getIndexOnFirstMaxEntry(inputs).max; if (max < 0) { max = 0; } const inputShifts: number[] = inputs.map((entry: number) => entry - max); inputShifts.push(-max); const inputShiftExps: number[] = inputShifts.map((entry: number) => Math.exp(entry)); const inputShiftExpsSum: number = inputShiftExps.reduce( (accumulation: number, entry: number) => accumulation + entry, 0); return max + this.safeLog(inputShiftExpsSum); } public smoothMaxApproximationStrictConvexFunction(inputs: number[]): number { return this.logsumexpStrictConvexSingleFunction(inputs); } // ---- NOTE-REFERENCE ---- https://en.wikipedia.org/wiki/LogSumExp public logsumexpSingleFunction(inputs: number[]): number { const max: number = this.getIndexOnFirstMaxEntry(inputs).max; const inputShifts: number[] = inputs.map((entry: number) => entry - max); const inputShiftExps: number[] = inputShifts.map((entry: number) => Math.exp(entry)); const inputShiftExpsSum: number = inputShiftExps.reduce( (accumulation: number, entry: number) => accumulation + entry, 0); return max + this.safeLog(inputShiftExpsSum); } public smoothMaxApproximationFunction(inputs: number[]): number { return this.logsumexpSingleFunction(inputs); } public sigmoidLogisticGradientFunction(input: number): number { const logisticValue: number = this.sigmoidLogisticFunction(input); return logisticValue * (1 - logisticValue); } public sigmoidLogisticFunction(input: number): number { return 1 / (1 + Math.exp(-input)); } public sigmoidHyperbolicTangentFunction(input: number): number { const exponent: number = Math.exp(input); const exponentNegative: number = Math.exp(-input); return (exponent - exponentNegative) / (exponent + exponentNegative); } public sigmoidArctangentFunction(input: number): number { return Math.atan(input); } public sigmoidGudermannianFunction(input: number): number { return 2 * Math.atan(Math.tanh(input / 2)); } public sigmoidGeneralizedLogisticFunction(input: number, alpha: number = 1): number { return Math.pow(1 + Math.exp(-input), -alpha); } public sigmoidAlgebraicFunction(input: number): number { return input / Math.sqrt(1 + input * input); } public getL1Regularized(weight: number, l1Regularization: number): number { if (weight > 0) { return l1Regularization; } if (weight < 0) { return -l1Regularization; } return 0; } public getL2Regularized(weight: number, l2Regularization: number): number { return (weight * l2Regularization); } public getL1l2RegularizedWeightOptimizedSparse( weight: number, l1Regularization: number, l2Regularization: number): number { if (weight === 0) { return 0; // ---- NOTE: most sparse weights would remain zero, this check is to optimize the branches. } let regularized = weight * l2Regularization; if (weight > 0) { regularized += l1Regularization; if (weight <= regularized) { return 0; // ---- NOTE: cap weight at 0. } } else if (weight < 0) { regularized -= l1Regularization; if (weight >= regularized) { return 0; // ---- NOTE: cap weight at 0. } } weight -= regularized; return weight; } public getL1l2RegularizedWeightOptimizedDense( weight: number, l1Regularization: number, l2Regularization: number): number { let regularized = weight * l2Regularization; if (weight > 0) { regularized += l1Regularization; if (weight <= regularized) { return 0; // ---- NOTE: cap weight at 0. } } else if (weight < 0) { regularized -= l1Regularization; if (weight >= regularized) { return 0; // ---- NOTE: cap weight at 0. } } weight -= regularized; return weight; } /* * return: * softmaxVectors: number[][] * update: * matrixWeightDenseArrays * biasVectorDenseValueArray * input: * instanceGroundTruthPositiveLabelIndexes: * Each element is a label index. * Dimension: N, N: #instances. * instanceFeatureVectorSparseIndexArrays: * Each row represents a sparse feature, one-hot-encoder vector for an input instance. * #rows is the number of input instances * #columns is the number of space feature index for that row/instance. * There is no limit to the length of each row, as long as the elements, feature indexes, * fall in the feature range [0, #features). * Dimension: N X iF, N: #instances, iF: indefinite #features. * matrixWeightDenseArrays: * Each row represents a dense feature, floating-point weight vector for a label. * Row length is equal to #features. * Dimension: L x F, L: #labels, F: #features. * biasVectorDenseValueArray: * A bias vector, each element is for a label. * #biases is equal to #labels. * Dimension: L, L: #labels. * learningRate: * learning rate for SGD. * l1Regularization: * l1 regularization coefficient. * l2Regularization: * l2 regularization coefficient. * instanceFeatureVectorIndexBegin: * The begin index for a mini batch. * instanceFeatureVectorIndexEnd: * The end index for a mini batch. * internal data structure: * matrixWeightGradientDenseArrays: * Each row represents a dense feature gradient vector for a label. * Row length is equal to #features. * Dimension: L x F, L: #labels, F: #features. * biasVectorGradientDenseValueArray: * Each element represents a bias-term gradient for a label. * #biases is equal to #labels. * Dimension: L, L: #labels. */ public softmaxLogLossGradientUpdate( instanceGroundTruthPositiveLabelIndexes: number[], instanceFeatureVectorSparseIndexArrays: number[][], matrixWeightDenseArrays: number[][], biasVectorDenseValueArray: number[], learningRate: number = 0.1, l1Regularization: number = 0.01, l2Regularization: number = 0.01, instanceFeatureVectorIndexBegin: number = 0, instanceFeatureVectorIndexEnd: number = 0): number[][] { // Utility.debuggingLog( // `instanceFeatureVectorIndexBegin=${instanceFeatureVectorIndexBegin}`); // Utility.debuggingLog( // `instanceFeatureVectorIndexEnd=${instanceFeatureVectorIndexEnd}`); if ((!instanceGroundTruthPositiveLabelIndexes) || (instanceGroundTruthPositiveLabelIndexes.length <= 0)) { Utility.debuggingThrow( `instanceGroundTruthPositiveLabelIndexes is empty`); } if ((!instanceFeatureVectorSparseIndexArrays) || (instanceFeatureVectorSparseIndexArrays.length <= 0)) { Utility.debuggingThrow( `instanceFeatureVectorSparseIndexArrays is empty`); } if ((!matrixWeightDenseArrays) || (matrixWeightDenseArrays.length <= 0)) { Utility.debuggingThrow( `matrixWeightDenseArrays is empty`); } if ((!biasVectorDenseValueArray) || (biasVectorDenseValueArray.length <= 0)) { Utility.debuggingThrow( `biasVectorDenseValueArray is empty`); } const softmaxVectors: number[][] = this.matrixVectorProductSoftmaxSparseIndexes( instanceFeatureVectorSparseIndexArrays, matrixWeightDenseArrays, biasVectorDenseValueArray, instanceFeatureVectorIndexBegin, instanceFeatureVectorIndexEnd); const numberInstances: number = softmaxVectors.length; const numberLabels: number = matrixWeightDenseArrays.length; // const numberFeatures: number = matrixWeightDenseArrays[0].length; const matrixWeightGradientDenseArrays: number[][] = this.matrixNewLikeWithZeroCells(matrixWeightDenseArrays); const biasVectorGradientDenseValueArray: number[] = this.vectorNewLikeWithZeroElements(biasVectorDenseValueArray); let instanceFeatureVectorIndex = instanceFeatureVectorIndexBegin; for (let instance: number = 0; instance < numberInstances; instance++) { const instanceFeatureVectorSparseIndexArray: number[] = instanceFeatureVectorSparseIndexArrays[instanceFeatureVectorIndex]; const instanceLabel: number = instanceGroundTruthPositiveLabelIndexes[instanceFeatureVectorIndex]; const softmaxVector: number[] = softmaxVectors[instance]; /** ---- NOTE-FOR-REFERENCE ---- the loop for calculating gradients. * for (let label: number = 0; label < numberLabels; label++) { * const probability: number = softmaxVector[label]; * const labelWeightGradientDenseArray: number[] = * matrixWeightGradientDenseArrays[label]; * { * const gradient: number = probability; * if (label == instanceLabel) { * gradient = probability - 1; * } * { * for (let featureIndex of instanceFeatureVectorSparseIndexArray) { * labelWeightGradientDenseArray[featureIndex] += gradient; * } * { * biasVectorGradientDenseValueArray[label] += gradient; * } * } * } * } */ { { // ---- NOTE: un-looping for optimizing the gradient computation process. const probability: number = softmaxVector[instanceLabel]; const labelWeightGradientDenseArray: number[] = matrixWeightGradientDenseArrays[instanceLabel]; { const gradient: number = probability - 1; /** ---- NOTE-FOR-REFERENCE ---- * if (label == instanceLabel) { * gradient = probability - 1; * } */ { for (const featureIndex of instanceFeatureVectorSparseIndexArray) { labelWeightGradientDenseArray[featureIndex] += gradient; } { biasVectorGradientDenseValueArray[instanceLabel] += gradient; } } } } for (let label: number = 0; label < instanceLabel; label++) { // ---- NOTE: un-looping for optimizing the gradient computation process. const probability: number = softmaxVector[label]; const labelWeightGradientDenseArray: number[] = matrixWeightGradientDenseArrays[label]; { const gradient: number = probability; /** ---- NOTE-FOR-REFERENCE ---- * if (label == instanceLabel) { * gradient = probability ; * } */ { for (const featureIndex of instanceFeatureVectorSparseIndexArray) { labelWeightGradientDenseArray[featureIndex] += gradient; } { biasVectorGradientDenseValueArray[label] += gradient; } } } } for (let label: number = instanceLabel + 1; label < numberLabels; label++) { // ---- NOTE: un-looping for optimizing the gradient computation process. const probability: number = softmaxVector[label]; const labelWeightGradientDenseArray: number[] = matrixWeightGradientDenseArrays[label]; { const gradient: number = probability; /** ---- NOTE-FOR-REFERENCE ---- * if (label == instanceLabel) { * gradient = probability; * } */ { for (const featureIndex of instanceFeatureVectorSparseIndexArray) { labelWeightGradientDenseArray[featureIndex] += gradient; } { biasVectorGradientDenseValueArray[label] += gradient; } } } } } instanceFeatureVectorIndex++; } const factorConstant: number = learningRate / numberInstances; if ((l1Regularization > 0) || (l2Regularization > 0)) { this.matrixDenseSubtractScaledFromAndL1l2RegularizedSparseTo( matrixWeightDenseArrays, matrixWeightGradientDenseArrays, factorConstant, l1Regularization, l2Regularization); this.vectorDenseSubtractScaledFromAndL1l2RegularizedSparseTo( biasVectorDenseValueArray, biasVectorGradientDenseValueArray, factorConstant, l1Regularization, l2Regularization); } else { this.matrixDenseSubtractScaledFrom( matrixWeightDenseArrays, matrixWeightGradientDenseArrays, factorConstant); this.vectorDenseSubtractScaledFrom( biasVectorDenseValueArray, biasVectorGradientDenseValueArray, factorConstant); } return softmaxVectors; } public logLoss( probabilityVector: number[], instanceGroundTruthPositiveLabelIndex: number): number { let probability = probabilityVector[instanceGroundTruthPositiveLabelIndex]; probability = this.clipValue(probability); if (!probability || (probability <= 0)) { Utility.debuggingThrow( `probability=${probability}, instanceGroundTruthPositiveLabelIndex=${instanceGroundTruthPositiveLabelIndex}`); } const loss: number = Math.log(probability); return -loss; } public logLossGeneric( probabilityVector: number[], labelVector: number[]): number { let loss: number = 0; for (let i: number = 0; i < labelVector.length; i++) { const label = labelVector[i]; let probability = probabilityVector[i]; probability = this.clipValue(probability); if (!probability || (probability <= 0)) { Utility.debuggingThrow( `probability=${probability}, labelVector=${labelVector}, probabilityVector=${probabilityVector}`); } if (label > 0) { loss += label * Math.log(probability); } } return -loss; } public softmaxLogLoss( softmaxVectors: number[][], instanceGroundTruthPositiveLabelIndexes: number[]): number { let softmaxLogLossSum: number = 0; for (let i: number = 0; i < softmaxVectors.length; i++) { softmaxLogLossSum += this.logLoss(softmaxVectors[i], instanceGroundTruthPositiveLabelIndexes[i]); } return softmaxLogLossSum / softmaxVectors.length; } public softmaxLogLossGeneric( softmaxVectors: number[][], labelVectors: number[][]): number { let softmaxLogLossSum: number = 0; for (let i: number = 0; i < softmaxVectors.length; i++) { softmaxLogLossSum += this.logLossGeneric(softmaxVectors[i], labelVectors[i]); } return softmaxLogLossSum / softmaxVectors.length; } public matrixVectorProductSoftmaxSparseIndexesValues( instanceFeatureVectorSparseIndexArrays: number[][], instanceFeatureVectorSparseValueArrays: number[][], matrixWeightDenseArrays: number[][], biasVectorDenseValueArray: number[], instanceFeatureVectorIndexBegin: number = 0, instanceFeatureVectorIndexEnd: number= 0): number[][] { if (Utility.isEmptyNumberArrays(instanceFeatureVectorSparseIndexArrays)) { Utility.debuggingThrow( "instanceFeatureVectorSparseIndexArrays is empty."); } if (Utility.isEmptyNumberArrays(instanceFeatureVectorSparseValueArrays)) { Utility.debuggingThrow( "instanceFeatureVectorSparseValueArrays is empty."); } if (Utility.isEmptyNumberArrays(matrixWeightDenseArrays)) { Utility.debuggingThrow( "matrixWeightDenseArrays is empty."); } const numberVectors = instanceFeatureVectorSparseIndexArrays.length; if (Utility.isEmptyNumberArrays(instanceFeatureVectorSparseValueArrays) || (numberVectors > instanceFeatureVectorSparseValueArrays.length)) { Utility.debuggingThrow( "instanceFeatureVectorSparseValueArrays is empty or " + "does not have enough entries to match instanceFeatureVectorSparseIndexArrays."); } if (numberVectors <= 0) { Utility.debuggingThrow( `numberVectors is empty`); } if (instanceFeatureVectorIndexBegin < 0) { instanceFeatureVectorIndexBegin = 0; } if (instanceFeatureVectorIndexEnd <= 0) { instanceFeatureVectorIndexEnd = numberVectors; } if (instanceFeatureVectorIndexEnd > numberVectors) { instanceFeatureVectorIndexEnd = numberVectors; } const numberSoftmaxVectors: number = instanceFeatureVectorIndexEnd - instanceFeatureVectorIndexBegin; if (numberSoftmaxVectors <= 0) { Utility.debuggingThrow( `numberSoftmaxVectors is empty`); } const softmaxVectors: number[][] = new Array<number[]>(numberSoftmaxVectors); let indexSoftmaxVectors: number = 0; for (let i: number = instanceFeatureVectorIndexBegin; i < instanceFeatureVectorIndexEnd; i++) { const instanceFeatureVectorSparseIndexArray: number[] = instanceFeatureVectorSparseIndexArrays[i]; const instanceFeatureVectorSparseValueArray: number[] = instanceFeatureVectorSparseValueArrays[i]; const matrixVectorProduct: number[] = this.matrixVectorProductSparseIndexesValues( instanceFeatureVectorSparseIndexArray, instanceFeatureVectorSparseValueArray, matrixWeightDenseArrays, biasVectorDenseValueArray); Utility.debuggingLog( `i=${i}, instanceFeatureVectorSparseIndexArray=${instanceFeatureVectorSparseIndexArray}`); Utility.debuggingLog( `i=${i}, instanceFeatureVectorSparseValueArray=${instanceFeatureVectorSparseValueArray}`); Utility.debuggingLog( `i=${i}, matrixWeightDenseArrays=${matrixWeightDenseArrays}`); Utility.debuggingLog( `i=${i}, biasVectorDenseValueArray=${biasVectorDenseValueArray}`); Utility.debuggingLog( `i=${i}, matrixVectorProduct=${matrixVectorProduct}`); const softmaxVector: number[] = this.softmaxFunction(matrixVectorProduct); softmaxVectors[indexSoftmaxVectors++] = softmaxVector; } return softmaxVectors; } /* * return: * softmaxVectors: * Each row is a softmax vector for an input instance. * #rows is equivalent to #labels. * Dimension: N X L, N: #instances, L: #labels. * inputs: * instanceFeatureVectorSparseIndexArrays: * Each row represents a sparse feature, one-hot-encoder vector for an input instance. * #rows is the number of input instances * #columns is the number of space feature index for that row/instance. * There is no limit to the length of each row, as long as the elements, feature indexes, * fall in the feature range [0, #features). * Dimension: N X iF, N: #instances, iF: indefinite #features. * matrixWeightDenseArrays: * Each row represents a dense feature, floating-point weight vector for a label. * Row length is equal to #features. * Dimension: L x F, L: #labels, F: #features. * biasVectorDenseValueArray: * A bias vector, each element is for a label. * #biases is equal to #labels. * Dimension: L, L: #labels. * instanceFeatureVectorIndexBegin: * The begin index for a mini batch. * instanceFeatureVectorIndexEnd: * The end index for a mini batch. */ public matrixVectorProductSoftmaxSparseIndexes( instanceFeatureVectorSparseIndexArrays: number[][], matrixWeightDenseArrays: number[][], biasVectorDenseValueArray: number[], instanceFeatureVectorIndexBegin: number = 0, instanceFeatureVectorIndexEnd: number = 0): number[][] { if (Utility.isEmptyNumberArrays(instanceFeatureVectorSparseIndexArrays)) { Utility.debuggingThrow( "instanceFeatureVectorSparseIndexArrays is empty."); } if (Utility.isEmptyNumberArrays(matrixWeightDenseArrays)) { Utility.debuggingThrow( "matrixWeightDenseArrays is empty."); } const numberVectors = instanceFeatureVectorSparseIndexArrays.length; if (numberVectors <= 0) { Utility.debuggingThrow( `numberVectors is empty`); } if (instanceFeatureVectorIndexBegin < 0) { instanceFeatureVectorIndexBegin = 0; } if (instanceFeatureVectorIndexEnd <= 0) { instanceFeatureVectorIndexEnd = numberVectors; } if (instanceFeatureVectorIndexEnd > numberVectors) { instanceFeatureVectorIndexEnd = numberVectors; } const numberSoftmaxVectors: number = instanceFeatureVectorIndexEnd - instanceFeatureVectorIndexBegin; if (numberSoftmaxVectors <= 0) { Utility.debuggingThrow( `numberSoftmaxVectors is empty`); } const softmaxVectors: number[][] = new Array<number[]>(numberSoftmaxVectors); let indexSoftmaxVectors: number = 0; for (let i: number = instanceFeatureVectorIndexBegin; i < instanceFeatureVectorIndexEnd; i++) { const instanceFeatureVectorSparseIndexArray: number[] = instanceFeatureVectorSparseIndexArrays[i]; const matrixVectorProduct: number[] = this.matrixVectorProductSparseIndexes( instanceFeatureVectorSparseIndexArray, matrixWeightDenseArrays, biasVectorDenseValueArray); const softmaxVector: number[] = this.softmaxFunction(matrixVectorProduct); softmaxVectors[indexSoftmaxVectors++] = softmaxVector; } return softmaxVectors; } /* * return: * softmaxVectors: * Each row is a softmax vector for an input instance. * #rows is equivalent to #labels. * Dimension: N X L, N: #instances, L: #labels. * inputs: * instanceFeatureVectorDenseValueArrays: * Each row represents a dense feature value vector for an input instance. * #rows is the number of input instances * #columns is the number of dense features for that row/instance. * There is no limit to the length of each row should be equal to the number of features. * Dimension: N X F, N: #instances, F: #features. * matrixWeightDenseArrays: * Each row represents a dense feature, floating-point weight vector for a label. * Row length is equal to #features. * Dimension: L x F, L: #labels, F: #features. * biasVectorDenseValueArray: * A bias vector, each element is for a label. * #biases is equal to #labels. * Dimension: L, L: #labels. * instanceFeatureVectorIndexBegin: * The begin index for a mini batch. * instanceFeatureVectorIndexEnd: * The end index for a mini batch. */ public matrixVectorProductSoftmaxDenseValues( instanceFeatureVectorDenseValueArrays: number[][], matrixWeightDenseArrays: number[][], biasVectorDenseValueArray: number[], instanceFeatureVectorIndexBegin: number = 0, instanceFeatureVectorIndexEnd: number = 0): number[][] { if (Utility.isEmptyNumberArrays(instanceFeatureVectorDenseValueArrays)) { Utility.debuggingThrow( "instanceFeatureVectorDenseValueArrays is empty."); } if (Utility.isEmptyNumberArrays(matrixWeightDenseArrays)) { Utility.debuggingThrow( "matrixWeightDenseArrays is empty."); } const numberVectors = instanceFeatureVectorDenseValueArrays.length; if (numberVectors <= 0) { Utility.debuggingThrow( `numberVectors is empty`); } if (instanceFeatureVectorIndexBegin < 0) { instanceFeatureVectorIndexBegin = 0; } if (instanceFeatureVectorIndexEnd <= 0) { instanceFeatureVectorIndexEnd = numberVectors; } if (instanceFeatureVectorIndexEnd > numberVectors) { instanceFeatureVectorIndexEnd = numberVectors; } const numberSoftmaxVectors: number = instanceFeatureVectorIndexEnd - instanceFeatureVectorIndexBegin; if (numberSoftmaxVectors <= 0) { Utility.debuggingThrow( `numberSoftmaxVectors is empty`); } const softmaxVectors: number[][] = new Array<number[]>(numberSoftmaxVectors); let indexSoftmaxVectors: number = 0; for (let i: number = instanceFeatureVectorIndexBegin; i < instanceFeatureVectorIndexEnd; i++) { const instanceFeatureVectorDenseValueArray: number[] = instanceFeatureVectorDenseValueArrays[i]; const matrixVectorProduct: number[] = this.matrixVectorProductDenseValues( instanceFeatureVectorDenseValueArray, matrixWeightDenseArrays, biasVectorDenseValueArray); const softmaxVector: number[] = this.softmaxFunction(matrixVectorProduct); softmaxVectors[indexSoftmaxVectors++] = softmaxVector; } return softmaxVectors; } public matrixVectorProductSparseIndexesValues( instanceFeatureVectorSparseIndexArray: number[], instanceFeatureVectorSparseValueArray: number[], matrixWeightDenseArrays: number[][], biasVectorDenseValueArray: number[]): number[] { const lengthRowMatrix: number = matrixWeightDenseArrays.length; const matrixVectorProduct: number[] = this.vectorNewWithZeroElements(lengthRowMatrix); return this.matrixVectorProductSparseIndexesValuesTo( matrixVectorProduct, instanceFeatureVectorSparseIndexArray, instanceFeatureVectorSparseValueArray, matrixWeightDenseArrays, biasVectorDenseValueArray); } public matrixVectorProductSparseIndexes( instanceFeatureVectorSparseIndexArray: number[], matrixWeightDenseArrays: number[][], biasVectorDenseValueArray: number[]): number[] { const lengthRowMatrix: number = matrixWeightDenseArrays.length; const matrixVectorProduct: number[] = this.vectorNewWithZeroElements(lengthRowMatrix); return this.matrixVectorProductSparseIndexesTo( matrixVectorProduct, instanceFeatureVectorSparseIndexArray, matrixWeightDenseArrays, biasVectorDenseValueArray); } public matrixVectorProductDenseValues( vectorDenseValueArray: number[], matrixWeightDenseArrays: number[][], biasVectorDenseValueArray: number[]): number[] { const lengthRowMatrix: number = matrixWeightDenseArrays.length; const matrixVectorProduct: number[] = this.vectorNewWithZeroElements(lengthRowMatrix); return this.matrixVectorProductDenseValuesTo( matrixVectorProduct, vectorDenseValueArray, matrixWeightDenseArrays, biasVectorDenseValueArray); } public matrixVectorProductSparseIndexesValuesTo( matrixVectorProduct: number[], instanceFeatureVectorSparseIndexArray: number[], instanceFeatureVectorSparseValueArray: number[], matrixWeightDenseArrays: number[][], biasVectorDenseValueArray: number[]): number[] { const lengthRowMatrix: number = matrixWeightDenseArrays.length; for (let row: number = 0; row < lengthRowMatrix; row++) { const matrixWeightDenseArray: number[] = matrixWeightDenseArrays[row]; let biasVectorDenseValue: number = 0; if (!Utility.isEmptyNumberArray(biasVectorDenseValueArray)) { biasVectorDenseValue = biasVectorDenseValueArray[row]; } const dotProduct: number = this.dotProductSparseIndexesValues( instanceFeatureVectorSparseIndexArray, instanceFeatureVectorSparseValueArray, matrixWeightDenseArray, biasVectorDenseValue); matrixVectorProduct[row] = dotProduct; } return matrixVectorProduct; } public matrixVectorProductSparseIndexesTo( matrixVectorProduct: number[], instanceFeatureVectorSparseIndexArray: number[], matrixWeightDenseArrays: number[][], biasVectorDenseValueArray: number[]): number[] { const lengthRowMatrix: number = matrixWeightDenseArrays.length; for (let row: number = 0; row < lengthRowMatrix; row++) { const matrixWeightDenseArray: number[] = matrixWeightDenseArrays[row]; let biasVectorDenseValue: number = 0; if (!Utility.isEmptyNumberArray(biasVectorDenseValueArray)) { biasVectorDenseValue = biasVectorDenseValueArray[row]; } const dotProduct: number = this.dotProductSparseIndexes( instanceFeatureVectorSparseIndexArray, matrixWeightDenseArray, biasVectorDenseValue); matrixVectorProduct[row] = dotProduct; } return matrixVectorProduct; } public matrixVectorProductDenseValuesTo( matrixVectorProduct: number[], vectorDenseValueArray: number[], matrixWeightDenseArrays: number[][], biasVectorDenseValueArray: number[]): number[] { const lengthRowMatrix: number = matrixWeightDenseArrays.length; for (let row: number = 0; row < lengthRowMatrix; row++) { const matrixWeightDenseArray: number[] = matrixWeightDenseArrays[row]; let biasVectorDenseValue: number = 0; if (!Utility.isEmptyNumberArray(biasVectorDenseValueArray)) { biasVectorDenseValue = biasVectorDenseValueArray[row]; } const dotProduct: number = this.dotProductDenseValues( vectorDenseValueArray, matrixWeightDenseArray, biasVectorDenseValue); matrixVectorProduct[row] = dotProduct; } return matrixVectorProduct; } public vectorMatrixProductSparseIndexesValues( instanceFeatureVectorSparseIndexArray: number[], instanceFeatureVectorSparseValueArray: number[], matrixWeightDenseArrays: number[][], biasVectorDenseValueArray: number[]): number[] { // const lengthRowMatrix: number = // matrixWeightDenseArrays.length; const lengthColumnMatrix: number = matrixWeightDenseArrays[0].length; const vectorMatrixProduct: number[] = this.vectorNewWithZeroElements(lengthColumnMatrix); return this.vectorMatrixProductSparseIndexesValuesTo( vectorMatrixProduct, instanceFeatureVectorSparseIndexArray, instanceFeatureVectorSparseValueArray, matrixWeightDenseArrays, biasVectorDenseValueArray); } public vectorMatrixProductSparseIndexes( instanceFeatureVectorSparseIndexArray: number[], matrixWeightDenseArrays: number[][], biasVectorDenseValueArray: number[]): number[] { // const lengthRowMatrix: number = // matrixWeightDenseArrays.length; const lengthColumnMatrix: number = matrixWeightDenseArrays[0].length; const vectorMatrixProduct: number[] = this.vectorNewWithZeroElements(lengthColumnMatrix); return this.vectorMatrixProductSparseIndexesTo( vectorMatrixProduct, instanceFeatureVectorSparseIndexArray, matrixWeightDenseArrays, biasVectorDenseValueArray); } public vectorMatrixProductDenseValues( vectorDenseValueArray: number[], matrixWeightDenseArrays: number[][], biasVectorDenseValueArray: number[]): number[] { const lengthRowMatrix: number = matrixWeightDenseArrays.length; const lengthColumnMatrix: number = matrixWeightDenseArrays[0].length; const vectorMatrixProduct: number[] = this.vectorNewWithZeroElements(lengthColumnMatrix); return this.vectorMatrixProductDenseValuesTo( vectorMatrixProduct, vectorDenseValueArray, matrixWeightDenseArrays, biasVectorDenseValueArray); } public vectorMatrixProductSparseIndexesValuesTo( vectorMatrixProduct: number[], instanceFeatureVectorSparseIndexArray: number[], instanceFeatureVectorSparseValueArray: number[], matrixWeightDenseArrays: number[][], biasVectorDenseValueArray: number[]): number[] { // const lengthRowMatrix: number = // matrixWeightDenseArrays.length; // const lengthColumnMatrix: number = // matrixWeightDenseArrays[0].length; for (let i: number = 0; i < instanceFeatureVectorSparseIndexArray.length; i++) { const vectorSparseIndex: number = instanceFeatureVectorSparseIndexArray[i]; const vectorSparseValue: number = instanceFeatureVectorSparseValueArray[i]; const matrixWeightDenseArray: number[] = matrixWeightDenseArrays[vectorSparseIndex]; this.vectorDenseAddScaledTo( vectorMatrixProduct, matrixWeightDenseArray, vectorSparseValue); } if (!Utility.isEmptyNumberArray(biasVectorDenseValueArray)) { this.vectorDenseAddTo( vectorMatrixProduct, biasVectorDenseValueArray); } return vectorMatrixProduct; } public vectorMatrixProductSparseIndexesTo( vectorMatrixProduct: number[], instanceFeatureVectorSparseIndexArray: number[], matrixWeightDenseArrays: number[][], biasVectorDenseValueArray: number[]): number[] { // const lengthRowMatrix: number = // matrixWeightDenseArrays.length; // const lengthColumnMatrix: number = // matrixWeightDenseArrays[0].length; for (const vectorSparseIndex of instanceFeatureVectorSparseIndexArray) { const vectorSparseValue: number = 1; const matrixWeightDenseArray: number[] = matrixWeightDenseArrays[vectorSparseIndex]; this.vectorDenseAddScaledTo( vectorMatrixProduct, matrixWeightDenseArray, vectorSparseValue); } if (!Utility.isEmptyNumberArray(biasVectorDenseValueArray)) { this.vectorDenseAddTo( vectorMatrixProduct, biasVectorDenseValueArray); } return vectorMatrixProduct; } public vectorMatrixProductDenseValuesTo( vectorMatrixProduct: number[], vectorDenseValueArray: number[], matrixWeightDenseArrays: number[][], biasVectorDenseValueArray: number[]): number[] { const lengthRowMatrix: number = matrixWeightDenseArrays.length; // const lengthColumnMatrix: number = // matrixWeightDenseArrays[0].length; for (let row: number = 0; row < lengthRowMatrix; row++) { const vectorDenseValue: number = vectorDenseValueArray[row]; const matrixWeightDenseArray: number[] = matrixWeightDenseArrays[row]; this.vectorDenseAddScaledTo( vectorMatrixProduct, matrixWeightDenseArray, vectorDenseValue); } if (!Utility.isEmptyNumberArray(biasVectorDenseValueArray)) { this.vectorDenseAddTo( vectorMatrixProduct, biasVectorDenseValueArray); } return vectorMatrixProduct; } public vectorMatrixProductPartialDenseValuesTo( // ---- NOTE-TODO ---- vectorMatrixProduct: number[], vectorDenseValueArray: number[], matrixWeightDenseArrays: number[][], offsetDenseValueArray: number = 0, offsetMatrixWeightDenseArrays: number = 0, length: number = 0, biasVectorDenseValueArray: number[] = []): number[] { const lengthRowMatrix: number = matrixWeightDenseArrays.length; // const lengthColumnMatrix: number = // matrixWeightDenseArrays[0].length; for (let row: number = 0; row < lengthRowMatrix; row++) { const vectorDenseValue: number = vectorDenseValueArray[row]; const matrixWeightDenseArray: number[] = matrixWeightDenseArrays[row]; this.vectorDenseAddScaledTo( vectorMatrixProduct, matrixWeightDenseArray, vectorDenseValue); } if (!Utility.isEmptyNumberArray(biasVectorDenseValueArray)) { this.vectorDenseAddTo( vectorMatrixProduct, biasVectorDenseValueArray); } return vectorMatrixProduct; } public dotProductSparseIndexesValues( sparseIndexArray: number[], sparseValueArray: number[], weights: number[], weightBias: number = 0): number { const dotProduct: number = sparseIndexArray.reduce( (accumulation: number, entry: number, index: number) => accumulation + sparseValueArray[index] * weights[entry], 0); return (dotProduct + weightBias); } public dotProductSparseIndexes( sparseIndexArray: number[], weights: number[], weightBias: number = 0): number { const dotProduct: number = sparseIndexArray.reduce( (accumulation: number, entry: number) => accumulation + weights[entry], 0); return (dotProduct + weightBias); } public dotProductDenseValues( denseValueArray: number[], weights: number[], weightBias: number = 0): number { let dotProduct: number = 0; for (let i: number = 0; i < weights.length; i++) { dotProduct += denseValueArray[i] * weights[i]; } return (dotProduct + weightBias); } public dotProductPartialDenseValues( // ---- NOTE-TODO denseValueArray: number[], weights: number[], offsetDenseValueArray: number = 0, offsetWeights: number = 0, length: number = 0, weightBias: number = 0): number { if (length === 0) { length = denseValueArray.length; } let dotProduct: number = 0; let indexDenseValueArray: number = offsetDenseValueArray; let indexWeights: number = offsetWeights; for (let i: number = 0; i < length; i++) { dotProduct += denseValueArray[indexDenseValueArray] * weights[indexWeights]; indexDenseValueArray++; indexWeights++; } return (dotProduct + weightBias); } public matrixDenseSubtractScaledFromAndL1l2RegularizedSparseTo( denseValueArray0: number[][], denseValueArray1: number[][], constant: number, l1Regularization: number, l2Regularization: number): number[][] { const rows: number = denseValueArray0.length; const columns: number = denseValueArray0[0].length; for (let row: number = 0; row < rows; row++) { for (let column: number = 0; column < columns; column++) { const adjustment: number = denseValueArray1[row][column]; denseValueArray0[row][column] -= (constant * adjustment); if (adjustment !== 0) { denseValueArray0[row][column] = this.getL1l2RegularizedWeightOptimizedSparse( denseValueArray0[row][column], l1Regularization, l2Regularization); } } } return denseValueArray0; } public matrixDenseSubtractScaledFromAndL1l2RegularizedDenseTo( denseValueArray0: number[][], denseValueArray1: number[][], constant: number, l1Regularization: number, l2Regularization: number): number[][] { const rows: number = denseValueArray0.length; const columns: number = denseValueArray0[0].length; for (let row: number = 0; row < rows; row++) { for (let column: number = 0; column < columns; column++) { const adjustment: number = denseValueArray1[row][column]; denseValueArray0[row][column] -= (constant * adjustment); if (adjustment !== 0) { denseValueArray0[row][column] = this.getL1l2RegularizedWeightOptimizedDense( denseValueArray0[row][column], l1Regularization, l2Regularization); } } } return denseValueArray0; } public vectorDenseSubtractScaledFromAndL1l2RegularizedSparseTo( denseValueArray0: number[], denseValueArray1: number[], constant: number, l1Regularization: number, l2Regularization: number): number[] { for (let i: number = 0; i < denseValueArray0.length; i++) { const adjustment: number = denseValueArray1[i]; denseValueArray0[i] -= (constant * adjustment); if (adjustment !== 0) { denseValueArray0[i] = this.getL1l2RegularizedWeightOptimizedSparse( denseValueArray0[i], l1Regularization, l2Regularization); } } return denseValueArray0; } public vectorDenseSubtractScaledFromAndL1l2RegularizedDenseTo( denseValueArray0: number[], denseValueArray1: number[], constant: number, l1Regularization: number, l2Regularization: number): number[] { for (let i: number = 0; i < denseValueArray0.length; i++) { const adjustment: number = denseValueArray1[i]; denseValueArray0[i] -= (constant * adjustment); if (adjustment !== 0) { denseValueArray0[i] = this.getL1l2RegularizedWeightOptimizedDense( denseValueArray0[i], l1Regularization, l2Regularization); } } return denseValueArray0; } public matrixDenseL1l2RegularizedSparseTo( denseValueArray: number[][], l1Regularization: number = 0.01, l2Regularization: number = 0.01): number[][] { const rows: number = denseValueArray.length; const columns: number = denseValueArray[0].length; for (let row: number = 0; row < rows; row++) { for (let column: number = 0; column < columns; column++) { denseValueArray[row][column] = this.getL1l2RegularizedWeightOptimizedSparse( denseValueArray[row][column], l1Regularization, l2Regularization); } } return denseValueArray; } public matrixDenseL1l2RegularizedDenseTo( denseValueArray: number[][], l1Regularization: number = 0.01, l2Regularization: number = 0.01): number[][] { const rows: number = denseValueArray.length; const columns: number = denseValueArray[0].length; for (let row: number = 0; row < rows; row++) { for (let column: number = 0; column < columns; column++) { denseValueArray[row][column] = this.getL1l2RegularizedWeightOptimizedDense( denseValueArray[row][column], l1Regularization, l2Regularization); } } return denseValueArray; } public vectorDenseL1l2RegularizedSparseTo( denseValueArray: number[], l1Regularization: number = 0.01, l2Regularization: number = 0.01): number[] { for (let i: number = 0; i < denseValueArray.length; i++) { denseValueArray[i] = this.getL1l2RegularizedWeightOptimizedSparse( denseValueArray[i], l1Regularization, l2Regularization); } return denseValueArray; } public vectorDenseL1l2RegularizedDenseTo( denseValueArray: number[], l1Regularization: number = 0.01, l2Regularization: number = 0.01): number[] { for (let i: number = 0; i < denseValueArray.length; i++) { denseValueArray[i] = this.getL1l2RegularizedWeightOptimizedDense( denseValueArray[i], l1Regularization, l2Regularization); } return denseValueArray; } public tensor4dDenseAssignRandomTo( denseValueArray0: number[][][][]): number[][][][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.tensor3dDenseAssignRandomTo( denseValueArray0[row]); } return denseValueArray0; } public tensor4dDenseAssignConstantTo( denseValueArray0: number[][][][], constant: number): number[][][][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.tensor3dDenseAssignConstantTo( denseValueArray0[row], constant); } return denseValueArray0; } public tensor4dDenseAddConstantTo( denseValueArray0: number[][][][], constant: number): number[][][][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.tensor3dDenseAddConstantTo( denseValueArray0[row], constant); } return denseValueArray0; } public tensor4dDenseMultiplyConstantTo( denseValueArray0: number[][][][], constant: number): number[][][][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.tensor3dDenseMultiplyConstantTo( denseValueArray0[row], constant); } return denseValueArray0; } public tensor4dDenseSubtractConstantFrom( denseValueArray0: number[][][][], constant: number): number[][][][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.tensor3dDenseSubtractConstantFrom( denseValueArray0[row], constant); } return denseValueArray0; } public tensor4dDenseDivideConstantFrom( denseValueArray0: number[][][][], constant: number): number[][][][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.tensor3dDenseDivideConstantFrom( denseValueArray0[row], constant); } return denseValueArray0; } public tensor4dDenseAssignTo( denseValueArray0: number[][][][], denseValueArray1: number[][][][]): number[][][][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.tensor3dDenseAssignTo( denseValueArray0[row], denseValueArray1[row]); } return denseValueArray0; } public tensor4dDenseAddTo( denseValueArray0: number[][][][], denseValueArray1: number[][][][]): number[][][][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.tensor3dDenseAddTo( denseValueArray0[row], denseValueArray1[row]); } return denseValueArray0; } public tensor4dDenseMultiplyTo( denseValueArray0: number[][][][], denseValueArray1: number[][][][]): number[][][][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.tensor3dDenseMultiplyTo( denseValueArray0[row], denseValueArray1[row]); } return denseValueArray0; } public tensor4dDenseSubtractFrom( denseValueArray0: number[][][][], denseValueArray1: number[][][][]): number[][][][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.tensor3dDenseSubtractFrom( denseValueArray0[row], denseValueArray1[row]); } return denseValueArray0; } public tensor4dDenseDivideFrom( denseValueArray0: number[][][][], denseValueArray1: number[][][][]): number[][][][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.tensor3dDenseDivideFrom( denseValueArray0[row], denseValueArray1[row]); } return denseValueArray0; } public tensor4dDenseAssignScaledTo( denseValueArray0: number[][][][], denseValueArray1: number[][][][], constant: number): number[][][][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.tensor3dDenseAssignScaledTo( denseValueArray0[row], denseValueArray1[row], constant); } return denseValueArray0; } public tensor4dDenseAddScaledTo( denseValueArray0: number[][][][], denseValueArray1: number[][][][], constant: number): number[][][][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.tensor3dDenseAddScaledTo( denseValueArray0[row], denseValueArray1[row], constant); } return denseValueArray0; } public tensor4dDenseMultiplyScaledTo( denseValueArray0: number[][][][], denseValueArray1: number[][][][], constant: number): number[][][][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.tensor3dDenseMultiplyScaledTo( denseValueArray0[row], denseValueArray1[row], constant); } return denseValueArray0; } public tensor4dDenseSubtractScaledFrom( denseValueArray0: number[][][][], denseValueArray1: number[][][][], constant: number): number[][][][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.tensor3dDenseSubtractScaledFrom( denseValueArray0[row], denseValueArray1[row], constant); } return denseValueArray0; } public tensor4dDenseDivideScaledFrom( denseValueArray0: number[][][][], denseValueArray1: number[][][][], constant: number): number[][][][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.tensor3dDenseDivideScaledFrom( denseValueArray0[row], denseValueArray1[row], constant); } return denseValueArray0; } public tensor3dDenseAssignRandomTo( denseValueArray0: number[][][]): number[][][] { const rows: number = denseValueArray0.length; const columns: number = denseValueArray0[0].length; for (let row: number = 0; row < rows; row++) { this.matrixDenseAssignRandomTo( denseValueArray0[row]); } return denseValueArray0; } public tensor3dDenseAssignConstantTo( denseValueArray0: number[][][], constant: number): number[][][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.matrixDenseAssignConstantTo( denseValueArray0[row], constant); } return denseValueArray0; } public tensor3dDenseAddConstantTo( denseValueArray0: number[][][], constant: number): number[][][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.matrixDenseAddConstantTo( denseValueArray0[row], constant); } return denseValueArray0; } public tensor3dDenseMultiplyConstantTo( denseValueArray0: number[][][], constant: number): number[][][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.matrixDenseMultiplyConstantTo( denseValueArray0[row], constant); } return denseValueArray0; } public tensor3dDenseSubtractConstantFrom( denseValueArray0: number[][][], constant: number): number[][][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.matrixDenseSubtractConstantFrom( denseValueArray0[row], constant); } return denseValueArray0; } public tensor3dDenseDivideConstantFrom( denseValueArray0: number[][][], constant: number): number[][][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.matrixDenseDivideConstantFrom( denseValueArray0[row], constant); } return denseValueArray0; } public tensor3dDenseAssignTo( denseValueArray0: number[][][], denseValueArray1: number[][][]): number[][][] { const rows: number = denseValueArray0.length; const columns: number = denseValueArray0[0].length; for (let row: number = 0; row < rows; row++) { this.matrixDenseAssignTo( denseValueArray0[row], denseValueArray1[row]); } return denseValueArray0; } public tensor3dDenseAddTo( denseValueArray0: number[][][], denseValueArray1: number[][][]): number[][][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.matrixDenseAddTo( denseValueArray0[row], denseValueArray1[row]); } return denseValueArray0; } public tensor3dDenseMultiplyTo( denseValueArray0: number[][][], denseValueArray1: number[][][]): number[][][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.matrixDenseMultiplyTo( denseValueArray0[row], denseValueArray1[row]); } return denseValueArray0; } public tensor3dDenseSubtractFrom( denseValueArray0: number[][][], denseValueArray1: number[][][]): number[][][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.matrixDenseSubtractFrom( denseValueArray0[row], denseValueArray1[row]); } return denseValueArray0; } public tensor3dDenseDivideFrom( denseValueArray0: number[][][], denseValueArray1: number[][][]): number[][][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.matrixDenseDivideFrom( denseValueArray0[row], denseValueArray1[row]); } return denseValueArray0; } public tensor3dDenseAssignScaledTo( denseValueArray0: number[][][], denseValueArray1: number[][][], constant: number): number[][][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.matrixDenseAssignScaledTo( denseValueArray0[row], denseValueArray1[row], constant); } return denseValueArray0; } public tensor3dDenseAddScaledTo( denseValueArray0: number[][][], denseValueArray1: number[][][], constant: number): number[][][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.matrixDenseAddScaledTo( denseValueArray0[row], denseValueArray1[row], constant); } return denseValueArray0; } public tensor3dDenseMultiplyScaledTo( denseValueArray0: number[][][], denseValueArray1: number[][][], constant: number): number[][][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.matrixDenseMultiplyScaledTo( denseValueArray0[row], denseValueArray1[row], constant); } return denseValueArray0; } public tensor3dDenseSubtractScaledFrom( denseValueArray0: number[][][], denseValueArray1: number[][][], constant: number): number[][][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.matrixDenseSubtractScaledFrom( denseValueArray0[row], denseValueArray1[row], constant); } return denseValueArray0; } public tensor3dDenseDivideScaledFrom( denseValueArray0: number[][][], denseValueArray1: number[][][], constant: number): number[][][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.matrixDenseDivideScaledFrom( denseValueArray0[row], denseValueArray1[row], constant); } return denseValueArray0; } public matrixDenseAssignRandomTo( denseValueArray0: number[][]): number[][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.vectorDenseAssignRandomTo( denseValueArray0[row]); } return denseValueArray0; } public matrixDenseAssignConstantTo( denseValueArray0: number[][], constant: number): number[][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.vectorDenseAssignConstantTo( denseValueArray0[row], constant); } return denseValueArray0; } public matrixDenseAddConstantTo( denseValueArray0: number[][], constant: number): number[][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.vectorDenseAddConstantTo( denseValueArray0[row], constant); } return denseValueArray0; } public matrixDenseMultiplyConstantTo( denseValueArray0: number[][], constant: number): number[][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.vectorDenseMultiplyConstantTo( denseValueArray0[row], constant); } return denseValueArray0; } public matrixDenseSubtractConstantFrom( denseValueArray0: number[][], constant: number): number[][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.vectorDenseSubtractConstantFrom( denseValueArray0[row], constant); } return denseValueArray0; } public matrixDenseDivideConstantFrom( denseValueArray0: number[][], constant: number): number[][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.vectorDenseDivideConstantFrom( denseValueArray0[row], constant); } return denseValueArray0; } public matrixDenseAssignTo( denseValueArray0: number[][], denseValueArray1: number[][]): number[][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.vectorDenseAssignTo( denseValueArray0[row], denseValueArray1[row]); } return denseValueArray0; } public matrixDenseAddTo( denseValueArray0: number[][], denseValueArray1: number[][]): number[][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.vectorDenseAddTo( denseValueArray0[row], denseValueArray1[row]); } return denseValueArray0; } public matrixDenseMultiplyTo( denseValueArray0: number[][], denseValueArray1: number[][]): number[][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.vectorDenseMultiplyTo( denseValueArray0[row], denseValueArray1[row]); } return denseValueArray0; } public matrixDenseSubtractFrom( denseValueArray0: number[][], denseValueArray1: number[][]): number[][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.vectorDenseSubtractFrom( denseValueArray0[row], denseValueArray1[row]); } return denseValueArray0; } public matrixDenseDivideFrom( denseValueArray0: number[][], denseValueArray1: number[][]): number[][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.vectorDenseDivideFrom( denseValueArray0[row], denseValueArray1[row]); } return denseValueArray0; } public matrixDenseAssignScaledTo( denseValueArray0: number[][], denseValueArray1: number[][], constant: number): number[][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.vectorDenseAssignScaledTo( denseValueArray0[row], denseValueArray1[row], constant); } return denseValueArray0; } public matrixDenseAddScaledTo( denseValueArray0: number[][], denseValueArray1: number[][], constant: number): number[][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.vectorDenseAddScaledTo( denseValueArray0[row], denseValueArray1[row], constant); } return denseValueArray0; } public matrixDenseMultiplyScaledTo( denseValueArray0: number[][], denseValueArray1: number[][], constant: number): number[][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.vectorDenseMultiplyScaledTo( denseValueArray0[row], denseValueArray1[row], constant); } return denseValueArray0; } public matrixDenseSubtractScaledFrom( denseValueArray0: number[][], denseValueArray1: number[][], constant: number): number[][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.vectorDenseSubtractScaledFrom( denseValueArray0[row], denseValueArray1[row], constant); } return denseValueArray0; } public matrixDenseDivideScaledFrom( denseValueArray0: number[][], denseValueArray1: number[][], constant: number): number[][] { const rows: number = denseValueArray0.length; for (let row: number = 0; row < rows; row++) { this.vectorDenseDivideScaledFrom( denseValueArray0[row], denseValueArray1[row], constant); } return denseValueArray0; } public vectorDenseAssignRandomTo( denseValueArray0: number[]): number[] { for (let i: number = 0; i < denseValueArray0.length; i++) { denseValueArray0[i] = Utility.getRandomNumber(); } return denseValueArray0; } public vectorDenseAssignConstantTo( denseValueArray0: number[], constant: number): number[] { for (let i: number = 0; i < denseValueArray0.length; i++) { denseValueArray0[i] = constant; } return denseValueArray0; } public vectorDenseAddConstantTo( denseValueArray0: number[], constant: number): number[] { for (let i: number = 0; i < denseValueArray0.length; i++) { denseValueArray0[i] += constant; } return denseValueArray0; } public vectorDenseMultiplyConstantTo( denseValueArray0: number[], constant: number): number[] { for (let i: number = 0; i < denseValueArray0.length; i++) { denseValueArray0[i] *= constant; } return denseValueArray0; } public vectorDenseSubtractConstantFrom( denseValueArray0: number[], constant: number): number[] { for (let i: number = 0; i < denseValueArray0.length; i++) { denseValueArray0[i] -= constant; } return denseValueArray0; } public vectorDenseDivideConstantFrom( denseValueArray0: number[], constant: number): number[] { for (let i: number = 0; i < denseValueArray0.length; i++) { denseValueArray0[i] /= constant; } return denseValueArray0; } public vectorDenseAssignTo( denseValueArray0: number[], denseValueArray1: number[]): number[] { for (let i: number = 0; i < denseValueArray0.length; i++) { denseValueArray0[i] = denseValueArray1[i]; } return denseValueArray0; } public vectorDenseAddTo( denseValueArray0: number[], denseValueArray1: number[]): number[] { for (let i: number = 0; i < denseValueArray0.length; i++) { denseValueArray0[i] += denseValueArray1[i]; } return denseValueArray0; } public vectorDenseMultiplyTo( denseValueArray0: number[], denseValueArray1: number[]): number[] { for (let i: number = 0; i < denseValueArray0.length; i++) { denseValueArray0[i] *= denseValueArray1[i]; } return denseValueArray0; } public vectorDenseSubtractFrom( denseValueArray0: number[], denseValueArray1: number[]): number[] { for (let i: number = 0; i < denseValueArray0.length; i++) { denseValueArray0[i] -= denseValueArray1[i]; } return denseValueArray0; } public vectorDenseDivideFrom( denseValueArray0: number[], denseValueArray1: number[]): number[] { for (let i: number = 0; i < denseValueArray0.length; i++) { denseValueArray0[i] /= denseValueArray1[i]; } return denseValueArray0; } public vectorDenseAssignScaledTo( denseValueArray0: number[], denseValueArray1: number[], constant: number): number[] { for (let i: number = 0; i < denseValueArray0.length; i++) { denseValueArray0[i] = (constant * denseValueArray1[i]); } return denseValueArray0; } public vectorDenseAddScaledTo( denseValueArray0: number[], denseValueArray1: number[], constant: number): number[] { for (let i: number = 0; i < denseValueArray0.length; i++) { denseValueArray0[i] += (constant * denseValueArray1[i]); } return denseValueArray0; } public vectorDenseMultiplyScaledTo( denseValueArray0: number[], denseValueArray1: number[], constant: number): number[] { for (let i: number = 0; i < denseValueArray0.length; i++) { denseValueArray0[i] *= (constant * denseValueArray1[i]); } return denseValueArray0; } public vectorDenseSubtractScaledFrom( denseValueArray0: number[], denseValueArray1: number[], constant: number): number[] { for (let i: number = 0; i < denseValueArray0.length; i++) { denseValueArray0[i] -= (constant * denseValueArray1[i]); } return denseValueArray0; } public vectorDenseDivideScaledFrom( denseValueArray0: number[], denseValueArray1: number[], constant: number): number[] { for (let i: number = 0; i < denseValueArray0.length; i++) { denseValueArray0[i] /= (constant * denseValueArray1[i]); } return denseValueArray0; } public vectorSparseAssignRandomTo( sparseIndexArray0: number[], sparseValueArray0: number[]): [number[], number[]] { for (let i: number = 0; i < sparseValueArray0.length; i++) { sparseValueArray0[i] = Utility.getRandomNumber(); } return [sparseIndexArray0, sparseValueArray0]; } public vectorSparseAssignConstantTo( sparseIndexArray0: number[], sparseValueArray0: number[], constant: number): [number[], number[]] { for (let i: number = 0; i < sparseValueArray0.length; i++) { sparseValueArray0[i] = constant; } return [sparseIndexArray0, sparseValueArray0]; } public vectorSparseAddConstantTo( sparseIndexArray0: number[], sparseValueArray0: number[], constant: number): [number[], number[]] { for (let i: number = 0; i < sparseValueArray0.length; i++) { sparseValueArray0[i] += constant; } return [sparseIndexArray0, sparseValueArray0]; } public vectorSparseMultiplyConstantTo( sparseIndexArray0: number[], sparseValueArray0: number[], constant: number): [number[], number[]] { for (let i: number = 0; i < sparseValueArray0.length; i++) { sparseValueArray0[i] *= constant; } return [sparseIndexArray0, sparseValueArray0]; } public vectorSparseSubtractConstantFrom( sparseIndexArray0: number[], sparseValueArray0: number[], constant: number): [number[], number[]] { for (let i: number = 0; i < sparseValueArray0.length; i++) { sparseValueArray0[i] -= constant; } return [sparseIndexArray0, sparseValueArray0]; } public vectorSparseDivideConstantFrom( sparseIndexArray0: number[], sparseValueArray0: number[], constant: number): [number[], number[]] { for (let i: number = 0; i < sparseValueArray0.length; i++) { sparseValueArray0[i] /= constant; } return [sparseIndexArray0, sparseValueArray0]; } public vectorSparseAssignTo( sparseIndexArray0: number[], sparseValueArray0: number[], sparseIndexArray1: number[], sparseValueArray1: number[]): [number[], number[]] { sparseIndexArray0.length = 0; sparseValueArray0.length = 0; for (const x of sparseIndexArray1) { sparseIndexArray0.push(x); } for (const x of sparseValueArray1) { sparseValueArray0.push(x); } return [sparseIndexArray0, sparseValueArray0]; } public vectorSparseAddTo( sparseIndexArray0: number[], sparseValueArray0: number[], sparseIndexArray1: number[], sparseValueArray1: number[]): [number[], number[]] { const sparseArrayMap: Map<number, number> = Utility.sparseArrayPairToMap(sparseIndexArray0, sparseValueArray0); for (let i: number = 0; i < sparseIndexArray1.length; i++) { const index: number = sparseIndexArray1[i]; const value: number = sparseValueArray1[i]; if (value !== 0) { // ---- NOTE ---- should not be zero anyway as it's sparse structure! if (sparseArrayMap.has(index)) { sparseArrayMap.set(index, (sparseArrayMap.get(index) as number) + value); } else { sparseArrayMap.set(index, value); } } } sparseIndexArray0.length = 0; sparseValueArray0.length = 0; for (const x of sparseArrayMap.entries()) { const key: number = x[0]; const value: number = x[1]; if (value !== 0) { sparseIndexArray0.push(key); sparseValueArray0.push(value); } } return [sparseIndexArray0, sparseValueArray0]; } public vectorSparseMultiplyTo( sparseIndexArray0: number[], sparseValueArray0: number[], sparseIndexArray1: number[], sparseValueArray1: number[]): [number[], number[]] { const sparseArrayMap: Map<number, number> = Utility.sparseArrayPairToMap(sparseIndexArray0, sparseValueArray0); for (let i: number = 0; i < sparseIndexArray1.length; i++) { const index: number = sparseIndexArray1[i]; const value: number = sparseValueArray1[i]; if (sparseArrayMap.has(index)) { sparseArrayMap.set(index, (sparseArrayMap.get(index) as number) * value); // ---- NOTE ---- multiply to 0 is still 0 ---- } else { // ---- NOTE ---- multiply to 0 is still 0 ---- sparseArrayMap.set(index, value); } } sparseIndexArray0.length = 0; sparseValueArray0.length = 0; for (const x of sparseArrayMap.entries()) { const key: number = x[0]; const value: number = x[1]; if (value !== 0) { sparseIndexArray0.push(key); sparseValueArray0.push(value); } } return [sparseIndexArray0, sparseValueArray0]; } public vectorSparseSubtractFrom( sparseIndexArray0: number[], sparseValueArray0: number[], sparseIndexArray1: number[], sparseValueArray1: number[]): [number[], number[]] { const sparseArrayMap: Map<number, number> = Utility.sparseArrayPairToMap(sparseIndexArray0, sparseValueArray0); for (let i: number = 0; i < sparseIndexArray1.length; i++) { const index: number = sparseIndexArray1[i]; const value: number = sparseValueArray1[i]; if (value !== 0) { // ---- NOTE ---- should not be zero anyway as it's sparse structure! if (sparseArrayMap.has(index)) { sparseArrayMap.set(index, (sparseArrayMap.get(index) as number) - value); } else { sparseArrayMap.set(index, -value); } } } sparseIndexArray0.length = 0; sparseValueArray0.length = 0; for (const x of sparseArrayMap.entries()) { const key: number = x[0]; const value: number = x[1]; if (value !== 0) { sparseIndexArray0.push(key); sparseValueArray0.push(value); } } return [sparseIndexArray0, sparseValueArray0]; } public vectorSparseDivideFrom( sparseIndexArray0: number[], sparseValueArray0: number[], sparseIndexArray1: number[], sparseValueArray1: number[]): [number[], number[]] { const sparseArrayMap: Map<number, number> = Utility.sparseArrayPairToMap(sparseIndexArray0, sparseValueArray0); for (let i: number = 0; i < sparseIndexArray1.length; i++) { const index: number = sparseIndexArray1[i]; const value: number = sparseValueArray1[i]; if (value !== 0) { // ---- NOTE ---- should not be zero anyway as it's sparse structure! if (sparseArrayMap.has(index)) { sparseArrayMap.set(index, (sparseArrayMap.get(index) as number) / value); // ---- NOTE ---- 0 divided is still 0 ---- } else { // ---- NOTE ---- 0 divided is still 0 ---- sparseArrayMap.set(index, value); } } } sparseIndexArray0.length = 0; sparseValueArray0.length = 0; for (const x of sparseArrayMap.entries()) { const key: number = x[0]; const value: number = x[1]; if (value !== 0) { sparseIndexArray0.push(key); sparseValueArray0.push(value); } } return [sparseIndexArray0, sparseValueArray0]; } public vectorSparseAssignScaledTo( sparseIndexArray0: number[], sparseValueArray0: number[], sparseIndexArray1: number[], sparseValueArray1: number[], constant: number): [number[], number[]] { sparseIndexArray0.length = 0; sparseValueArray0.length = 0; for (const x of sparseIndexArray1) { sparseIndexArray0.push(x); } for (const x of sparseValueArray1) { sparseValueArray0.push(constant * x); } return [sparseIndexArray0, sparseValueArray0]; } public vectorSparseAddScaledTo( sparseIndexArray0: number[], sparseValueArray0: number[], sparseIndexArray1: number[], sparseValueArray1: number[], constant: number): [number[], number[]] { if (constant === 0) { // ---- NOTE ---- no effect if constant is zero return [sparseIndexArray0, sparseValueArray0]; } const sparseArrayMap: Map<number, number> = Utility.sparseArrayPairToMap(sparseIndexArray0, sparseValueArray0); for (let i: number = 0; i < sparseIndexArray1.length; i++) { const index: number = sparseIndexArray1[i]; const value: number = sparseValueArray1[i]; if (value !== 0) { // ---- NOTE ---- should not be zero anyway as it's sparse structure! const scaledValue: number = constant * value; if (sparseArrayMap.has(index)) { sparseArrayMap.set(index, (sparseArrayMap.get(index) as number) + scaledValue); } else { sparseArrayMap.set(index, scaledValue); } } } sparseIndexArray0.length = 0; sparseValueArray0.length = 0; for (const x of sparseArrayMap.entries()) { const key: number = x[0]; const value: number = x[1]; if (value !== 0) { sparseIndexArray0.push(key); sparseValueArray0.push(value); } } return [sparseIndexArray0, sparseValueArray0]; } public vectorSparseMultiplyScaledTo( sparseIndexArray0: number[], sparseValueArray0: number[], sparseIndexArray1: number[], sparseValueArray1: number[], constant: number): [number[], number[]] { const sparseArrayMap: Map<number, number> = Utility.sparseArrayPairToMap(sparseIndexArray0, sparseValueArray0); for (let i: number = 0; i < sparseIndexArray1.length; i++) { const index: number = sparseIndexArray1[i]; const value: number = sparseValueArray1[i]; const scaledValue: number = constant * value; if (sparseArrayMap.has(index)) { sparseArrayMap.set(index, (sparseArrayMap.get(index) as number) * scaledValue); // ---- NOTE ---- multiply to 0 is still 0 ---- } else { // ---- NOTE ---- multiply to 0 is still 0 ---- sparseArrayMap.set(index, scaledValue); } } sparseIndexArray0.length = 0; sparseValueArray0.length = 0; for (const x of sparseArrayMap.entries()) { const key: number = x[0]; const value: number = x[1]; if (value !== 0) { sparseIndexArray0.push(key); sparseValueArray0.push(value); } } return [sparseIndexArray0, sparseValueArray0]; } public vectorSparseSubtractScaledFrom( sparseIndexArray0: number[], sparseValueArray0: number[], sparseIndexArray1: number[], sparseValueArray1: number[], constant: number): [number[], number[]] { if (constant === 0) { // ---- NOTE ---- no effect if constant is zero return [sparseIndexArray0, sparseValueArray0]; } const sparseArrayMap: Map<number, number> = Utility.sparseArrayPairToMap(sparseIndexArray0, sparseValueArray0); for (let i: number = 0; i < sparseIndexArray1.length; i++) { const index: number = sparseIndexArray1[i]; const value: number = sparseValueArray1[i]; if (value !== 0) { // ---- NOTE ---- should not be zero anyway as it's sparse structure! const scaledValue: number = constant * value; if (sparseArrayMap.has(index)) { sparseArrayMap.set(index, (sparseArrayMap.get(index) as number) - scaledValue); } else { sparseArrayMap.set(index, -scaledValue); } } } sparseIndexArray0.length = 0; sparseValueArray0.length = 0; for (const x of sparseArrayMap.entries()) { const key: number = x[0]; const value: number = x[1]; if (value !== 0) { sparseIndexArray0.push(key); sparseValueArray0.push(value); } } return [sparseIndexArray0, sparseValueArray0]; } public vectorSparseDivideScaledFrom( sparseIndexArray0: number[], sparseValueArray0: number[], sparseIndexArray1: number[], sparseValueArray1: number[], constant: number): [number[], number[]] { if (constant === 0) { // ---- NOTE ---- divided by 0 is not a good idea ---- return [sparseIndexArray0, sparseValueArray0]; } const sparseArrayMap: Map<number, number> = Utility.sparseArrayPairToMap(sparseIndexArray0, sparseValueArray0); for (let i: number = 0; i < sparseIndexArray1.length; i++) { const index: number = sparseIndexArray1[i]; const value: number = sparseValueArray1[i]; if (value !== 0) { // ---- NOTE ---- should not be zero anyway as it's sparse structure! const scaledValue: number = constant * value; if (sparseArrayMap.has(index)) { sparseArrayMap.set(index, (sparseArrayMap.get(index) as number) / scaledValue); // ---- NOTE ---- 0 divided is still 0 ---- } else { // ---- NOTE ---- 0 divided is still 0 ---- sparseArrayMap.set(index, scaledValue); } } } sparseIndexArray0.length = 0; sparseValueArray0.length = 0; for (const x of sparseArrayMap.entries()) { const key: number = x[0]; const value: number = x[1]; if (value !== 0) { sparseIndexArray0.push(key); sparseValueArray0.push(value); } } return [sparseIndexArray0, sparseValueArray0]; } public vectorSparseIndexDenseArrayAssignRandomTo( sparseIndexArray0: number[], denseValueArray0: number[]): [number[], number[]] { for (const index of sparseIndexArray0) { denseValueArray0[index] = Utility.getRandomNumber(); } return [sparseIndexArray0, denseValueArray0]; } public vectorSparseIndexDenseArrayAssignConstantTo( sparseIndexArray0: number[], denseValueArray0: number[], constant: number): [number[], number[]] { for (const index of sparseIndexArray0) { denseValueArray0[index] = constant; } return [sparseIndexArray0, denseValueArray0]; } public vectorSparseIndexDenseArrayAddConstantTo( sparseIndexArray0: number[], denseValueArray0: number[], constant: number): [number[], number[]] { for (const index of sparseIndexArray0) { denseValueArray0[index] += constant; } return [sparseIndexArray0, denseValueArray0]; } public vectorSparseIndexDenseArrayMultiplyConstantTo( sparseIndexArray0: number[], denseValueArray0: number[], constant: number): [number[], number[]] { for (const index of sparseIndexArray0) { denseValueArray0[index] *= constant; } return [sparseIndexArray0, denseValueArray0]; } public vectorSparseIndexDenseArraySubtractConstantFrom( sparseIndexArray0: number[], denseValueArray0: number[], constant: number): [number[], number[]] { for (const index of sparseIndexArray0) { denseValueArray0[index] -= constant; } return [sparseIndexArray0, denseValueArray0]; } public vectorSparseIndexDenseArrayDivideConstantFrom( sparseIndexArray0: number[], denseValueArray0: number[], constant: number): [number[], number[]] { for (const index of sparseIndexArray0) { denseValueArray0[index] /= constant; } return [sparseIndexArray0, denseValueArray0]; } public vectorSparseIndexDenseArrayAssignTo( sparseIndexArray0: number[], denseValueArray0: number[], denseValueArray1: number[]): [number[], number[]] { for (const index of sparseIndexArray0) { denseValueArray0[index] = denseValueArray1[index]; } return [sparseIndexArray0, denseValueArray0]; } public vectorSparseIndexDenseArrayAddTo( sparseIndexArray0: number[], denseValueArray0: number[], denseValueArray1: number[]): [number[], number[]] { for (const index of sparseIndexArray0) { denseValueArray0[index] += denseValueArray1[index]; } return [sparseIndexArray0, denseValueArray0]; } public vectorSparseIndexDenseArrayMultiplyTo( sparseIndexArray0: number[], denseValueArray0: number[], denseValueArray1: number[]): [number[], number[]] { for (const index of sparseIndexArray0) { denseValueArray0[index] *= denseValueArray1[index]; } return [sparseIndexArray0, denseValueArray0]; } public vectorSparseIndexDenseArraySubtractFrom( sparseIndexArray0: number[], denseValueArray0: number[], denseValueArray1: number[]): [number[], number[]] { for (const index of sparseIndexArray0) { denseValueArray0[index] -= denseValueArray1[index]; } return [sparseIndexArray0, denseValueArray0]; } public vectorSparseIndexDenseArrayDivideFrom( sparseIndexArray0: number[], denseValueArray0: number[], denseValueArray1: number[]): [number[], number[]] { for (const index of sparseIndexArray0) { denseValueArray0[index] /= denseValueArray1[index]; } return [sparseIndexArray0, denseValueArray0]; } public vectorSparseIndexDenseArrayAssignScaledTo( sparseIndexArray0: number[], denseValueArray0: number[], denseValueArray1: number[], constant: number): [number[], number[]] { for (const index of sparseIndexArray0) { denseValueArray0[index] = (constant * denseValueArray1[index]); } return [sparseIndexArray0, denseValueArray0]; } public vectorSparseIndexDenseArrayAddScaledTo( sparseIndexArray0: number[], denseValueArray0: number[], denseValueArray1: number[], constant: number): [number[], number[]] { for (const index of sparseIndexArray0) { denseValueArray0[index] += (constant * denseValueArray1[index]); } return [sparseIndexArray0, denseValueArray0]; } public vectorSparseIndexDenseArrayMultiplyScaledTo( sparseIndexArray0: number[], denseValueArray0: number[], denseValueArray1: number[], constant: number): [number[], number[]] { for (const index of sparseIndexArray0) { denseValueArray0[index] *= (constant * denseValueArray1[index]); } return [sparseIndexArray0, denseValueArray0]; } public vectorSparseIndexDenseArraySubtractScaledFrom( sparseIndexArray0: number[], denseValueArray0: number[], denseValueArray1: number[], constant: number): [number[], number[]] { for (const index of sparseIndexArray0) { denseValueArray0[index] -= (constant * denseValueArray1[index]); } return [sparseIndexArray0, denseValueArray0]; } public vectorSparseIndexDenseArrayDivideScaledFrom( sparseIndexArray0: number[], denseValueArray0: number[], denseValueArray1: number[], constant: number): [number[], number[]] { for (const index of sparseIndexArray0) { denseValueArray0[index] /= (constant * denseValueArray1[index]); } return [sparseIndexArray0, denseValueArray0]; } public vectorIndependentSparseIndexDenseArrayAssignTo( sparseIndexArray0: number[], denseValueArray0: number[], sparseIndexArray1: number[], denseValueArray1: number[]): [number[], number[]] { for (let i: number = 0; i < sparseIndexArray0.length; i++) { const index0 = sparseIndexArray0[i]; const index1 = sparseIndexArray1[i]; denseValueArray0[index0] = denseValueArray1[index1]; } return [sparseIndexArray0, denseValueArray0]; } public vectorIndependentSparseIndexDenseArrayAddTo( sparseIndexArray0: number[], denseValueArray0: number[], sparseIndexArray1: number[], denseValueArray1: number[]): [number[], number[]] { for (let i: number = 0; i < sparseIndexArray0.length; i++) { const index0 = sparseIndexArray0[i]; const index1 = sparseIndexArray1[i]; denseValueArray0[index0] += denseValueArray1[index1]; } return [sparseIndexArray0, denseValueArray0]; } public vectorIndependentSparseIndexDenseArrayMultiplyTo( sparseIndexArray0: number[], denseValueArray0: number[], sparseIndexArray1: number[], denseValueArray1: number[]): [number[], number[]] { for (let i: number = 0; i < sparseIndexArray0.length; i++) { const index0 = sparseIndexArray0[i]; const index1 = sparseIndexArray1[i]; denseValueArray0[index0] *= denseValueArray1[index1]; } return [sparseIndexArray0, denseValueArray0]; } public vectorIndependentSparseIndexDenseArraySubtractFrom( sparseIndexArray0: number[], denseValueArray0: number[], sparseIndexArray1: number[], denseValueArray1: number[]): [number[], number[]] { for (let i: number = 0; i < sparseIndexArray0.length; i++) { const index0 = sparseIndexArray0[i]; const index1 = sparseIndexArray1[i]; denseValueArray0[index0] -= denseValueArray1[index1]; } return [sparseIndexArray0, denseValueArray0]; } public vectorIndependentSparseIndexDenseArrayDivideFrom( sparseIndexArray0: number[], denseValueArray0: number[], sparseIndexArray1: number[], denseValueArray1: number[]): [number[], number[]] { for (let i: number = 0; i < sparseIndexArray0.length; i++) { const index0 = sparseIndexArray0[i]; const index1 = sparseIndexArray1[i]; denseValueArray0[index0] /= denseValueArray1[index1]; } return [sparseIndexArray0, denseValueArray0]; } public vectorIndependentSparseIndexDenseArrayAssignScaledTo( sparseIndexArray0: number[], denseValueArray0: number[], sparseIndexArray1: number[], denseValueArray1: number[], constant: number): [number[], number[]] { for (let i: number = 0; i < sparseIndexArray0.length; i++) { const index0 = sparseIndexArray0[i]; const index1 = sparseIndexArray1[i]; denseValueArray0[index0] = (constant * denseValueArray1[index1]); } return [sparseIndexArray0, denseValueArray0]; } public vectorIndependentSparseIndexDenseArrayAddScaledTo( sparseIndexArray0: number[], denseValueArray0: number[], sparseIndexArray1: number[], denseValueArray1: number[], constant: number): [number[], number[]] { for (let i: number = 0; i < sparseIndexArray0.length; i++) { const index0 = sparseIndexArray0[i]; const index1 = sparseIndexArray1[i]; denseValueArray0[index0] += (constant * denseValueArray1[index1]); } return [sparseIndexArray0, denseValueArray0]; } public vectorIndependentSparseIndexDenseArrayMultiplyScaledTo( sparseIndexArray0: number[], denseValueArray0: number[], sparseIndexArray1: number[], denseValueArray1: number[], constant: number): [number[], number[]] { for (let i: number = 0; i < sparseIndexArray0.length; i++) { const index0 = sparseIndexArray0[i]; const index1 = sparseIndexArray1[i]; denseValueArray0[index0] *= (constant * denseValueArray1[index1]); } return [sparseIndexArray0, denseValueArray0]; } public vectorIndependentSparseIndexDenseArraySubtractScaledFrom( sparseIndexArray0: number[], denseValueArray0: number[], sparseIndexArray1: number[], denseValueArray1: number[], constant: number): [number[], number[]] { for (let i: number = 0; i < sparseIndexArray0.length; i++) { const index0 = sparseIndexArray0[i]; const index1 = sparseIndexArray1[i]; denseValueArray0[index0] -= (constant * denseValueArray1[index1]); } return [sparseIndexArray0, denseValueArray0]; } public vectorIndependentSparseIndexDenseArrayDivideScaledFrom( sparseIndexArray0: number[], denseValueArray0: number[], sparseIndexArray1: number[], denseValueArray1: number[], constant: number): [number[], number[]] { for (let i: number = 0; i < sparseIndexArray0.length; i++) { const index0 = sparseIndexArray0[i]; const index1 = sparseIndexArray1[i]; denseValueArray0[index0] /= (constant * denseValueArray1[index1]); } return [sparseIndexArray0, denseValueArray0]; } public vectorSparseMapDenseArrayAssignRandomTo( sparseIndexArray0: number[], sparseValueArray0: number[], denseValueArray0: number[]): [number[], number[]] { for (const index of sparseIndexArray0) { denseValueArray0[index] = Utility.getRandomNumber(); } return [sparseIndexArray0, denseValueArray0]; } public vectorSparseMapDenseArrayAssignConstantTo( sparseIndexArray0: number[], sparseValueArray0: number[], denseValueArray0: number[], constant: number): [number[], number[]] { for (const index of sparseIndexArray0) { denseValueArray0[index] = constant; } return [sparseIndexArray0, denseValueArray0]; } public vectorSparseMapDenseArrayAddConstantTo( sparseIndexArray0: number[], sparseValueArray0: number[], denseValueArray0: number[], constant: number): [number[], number[]] { for (const index of sparseIndexArray0) { denseValueArray0[index] += constant; } return [sparseIndexArray0, denseValueArray0]; } public vectorSparseMapDenseArrayMultiplyConstantTo( sparseIndexArray0: number[], sparseValueArray0: number[], denseValueArray0: number[], constant: number): [number[], number[]] { for (const index of sparseIndexArray0) { denseValueArray0[index] *= constant; } return [sparseIndexArray0, denseValueArray0]; } public vectorSparseMapDenseArraySubtractConstantFrom( sparseIndexArray0: number[], sparseValueArray0: number[], denseValueArray0: number[], constant: number): [number[], number[]] { for (const index of sparseIndexArray0) { denseValueArray0[index] -= constant; } return [sparseIndexArray0, denseValueArray0]; } public vectorSparseMapDenseArrayDivideConstantFrom( sparseIndexArray0: number[], sparseValueArray0: number[], denseValueArray0: number[], constant: number): [number[], number[]] { for (const index of sparseIndexArray0) { denseValueArray0[index] /= constant; } return [sparseIndexArray0, denseValueArray0]; } public vectorSparseMapDenseArrayAssignTo( sparseIndexArray0: number[], sparseValueArray0: number[], denseValueArray0: number[], denseValueArray1: number[]): [number[], number[]] { for (const index of sparseIndexArray0) { denseValueArray0[index] = denseValueArray1[index]; } return [sparseIndexArray0, denseValueArray0]; } public vectorSparseMapDenseArrayAddTo( sparseIndexArray0: number[], sparseValueArray0: number[], denseValueArray0: number[], denseValueArray1: number[]): [number[], number[]] { for (const index of sparseIndexArray0) { denseValueArray0[index] += denseValueArray1[index]; } return [sparseIndexArray0, denseValueArray0]; } public vectorSparseMapDenseArrayMultiplyTo( sparseIndexArray0: number[], sparseValueArray0: number[], denseValueArray0: number[], denseValueArray1: number[]): [number[], number[]] { for (const index of sparseIndexArray0) { denseValueArray0[index] *= denseValueArray1[index]; } return [sparseIndexArray0, denseValueArray0]; } public vectorSparseMapDenseArraySubtractFrom( sparseIndexArray0: number[], sparseValueArray0: number[], denseValueArray0: number[], denseValueArray1: number[]): [number[], number[]] { for (const index of sparseIndexArray0) { denseValueArray0[index] -= denseValueArray1[index]; } return [sparseIndexArray0, denseValueArray0]; } public vectorSparseMapDenseArrayDivideFrom( sparseIndexArray0: number[], sparseValueArray0: number[], denseValueArray0: number[], denseValueArray1: number[]): [number[], number[]] { for (const index of sparseIndexArray0) { denseValueArray0[index] /= denseValueArray1[index]; } return [sparseIndexArray0, denseValueArray0]; } public vectorSparseMapDenseArrayAssignScaledTo( sparseIndexArray0: number[], sparseValueArray0: number[], denseValueArray0: number[], denseValueArray1: number[], constant: number): [number[], number[]] { for (const index of sparseIndexArray0) { denseValueArray0[index] = (constant * denseValueArray1[index]); } return [sparseIndexArray0, denseValueArray0]; } public vectorSparseMapDenseArrayAddScaledTo( sparseIndexArray0: number[], sparseValueArray0: number[], denseValueArray0: number[], denseValueArray1: number[], constant: number): [number[], number[]] { for (const index of sparseIndexArray0) { denseValueArray0[index] += (constant * denseValueArray1[index]); } return [sparseIndexArray0, denseValueArray0]; } public vectorSparseMapDenseArrayMultiplyScaledTo( sparseIndexArray0: number[], sparseValueArray0: number[], denseValueArray0: number[], denseValueArray1: number[], constant: number): [number[], number[]] { for (const index of sparseIndexArray0) { denseValueArray0[index] *= (constant * denseValueArray1[index]); } return [sparseIndexArray0, denseValueArray0]; } public vectorSparseMapDenseArraySubtractScaledFrom( sparseIndexArray0: number[], sparseValueArray0: number[], denseValueArray0: number[], denseValueArray1: number[], constant: number): [number[], number[]] { for (const index of sparseIndexArray0) { denseValueArray0[index] -= (constant * denseValueArray1[index]); } return [sparseIndexArray0, denseValueArray0]; } public vectorSparseMapDenseArrayDivideScaledFrom( sparseIndexArray0: number[], sparseValueArray0: number[], denseValueArray0: number[], denseValueArray1: number[], constant: number): [number[], number[]] { for (const index of sparseIndexArray0) { denseValueArray0[index] /= (constant * denseValueArray1[index]); } return [sparseIndexArray0, denseValueArray0]; } public vectorIndependentSparseMapDenseArrayAssignTo( sparseIndexArray0: number[], sparseValueArray0: number[], denseValueArray0: number[], sparseIndexArray1: number[], sparseValueArray1: number[], denseValueArray1: number[]): [number[], number[]] { for (let i: number = 0; i < sparseIndexArray0.length; i++) { const index0 = sparseIndexArray0[i]; const index1 = sparseIndexArray1[i]; denseValueArray0[index0] = denseValueArray1[index1]; } return [sparseIndexArray0, denseValueArray0]; } public vectorIndependentSparseMapDenseArrayAddTo( sparseIndexArray0: number[], sparseValueArray0: number[], denseValueArray0: number[], sparseIndexArray1: number[], sparseValueArray1: number[], denseValueArray1: number[]): [number[], number[]] { for (let i: number = 0; i < sparseIndexArray0.length; i++) { const index0 = sparseIndexArray0[i]; const index1 = sparseIndexArray1[i]; denseValueArray0[index0] += denseValueArray1[index1]; } return [sparseIndexArray0, denseValueArray0]; } public vectorIndependentSparseMapDenseArrayMultiplyTo( sparseIndexArray0: number[], sparseValueArray0: number[], denseValueArray0: number[], sparseIndexArray1: number[], sparseValueArray1: number[], denseValueArray1: number[]): [number[], number[]] { for (let i: number = 0; i < sparseIndexArray0.length; i++) { const index0 = sparseIndexArray0[i]; const index1 = sparseIndexArray1[i]; denseValueArray0[index0] *= denseValueArray1[index1]; } return [sparseIndexArray0, denseValueArray0]; } public vectorIndependentSparseMapDenseArraySubtractFrom( sparseIndexArray0: number[], sparseValueArray0: number[], denseValueArray0: number[], sparseIndexArray1: number[], sparseValueArray1: number[], denseValueArray1: number[]): [number[], number[]] { for (let i: number = 0; i < sparseIndexArray0.length; i++) { const index0 = sparseIndexArray0[i]; const index1 = sparseIndexArray1[i]; denseValueArray0[index0] -= denseValueArray1[index1]; } return [sparseIndexArray0, denseValueArray0]; } public vectorIndependentSparseMapDenseArrayDivideFrom( sparseIndexArray0: number[], sparseValueArray0: number[], denseValueArray0: number[], sparseIndexArray1: number[], sparseValueArray1: number[], denseValueArray1: number[]): [number[], number[]] { for (let i: number = 0; i < sparseIndexArray0.length; i++) { const index0 = sparseIndexArray0[i]; const index1 = sparseIndexArray1[i]; denseValueArray0[index0] /= denseValueArray1[index1]; } return [sparseIndexArray0, denseValueArray0]; } public vectorIndependentSparseMapDenseArrayAssignScaledTo( sparseIndexArray0: number[], sparseValueArray0: number[], denseValueArray0: number[], sparseIndexArray1: number[], sparseValueArray1: number[], denseValueArray1: number[], constant: number): [number[], number[]] { for (let i: number = 0; i < sparseIndexArray0.length; i++) { const index0 = sparseIndexArray0[i]; const index1 = sparseIndexArray1[i]; denseValueArray0[index0] = (constant * denseValueArray1[index1]); } return [sparseIndexArray0, denseValueArray0]; } public vectorIndependentSparseMapDenseArrayAddScaledTo( sparseIndexArray0: number[], sparseValueArray0: number[], denseValueArray0: number[], sparseIndexArray1: number[], sparseValueArray1: number[], denseValueArray1: number[], constant: number): [number[], number[]] { for (let i: number = 0; i < sparseIndexArray0.length; i++) { const index0 = sparseIndexArray0[i]; const index1 = sparseIndexArray1[i]; denseValueArray0[index0] += (constant * denseValueArray1[index1]); } return [sparseIndexArray0, denseValueArray0]; } public vectorIndependentSparseMapDenseArrayMultiplyScaledTo( sparseIndexArray0: number[], sparseValueArray0: number[], denseValueArray0: number[], sparseIndexArray1: number[], sparseValueArray1: number[], denseValueArray1: number[], constant: number): [number[], number[]] { for (let i: number = 0; i < sparseIndexArray0.length; i++) { const index0 = sparseIndexArray0[i]; const index1 = sparseIndexArray1[i]; denseValueArray0[index0] *= (constant * denseValueArray1[index1]); } return [sparseIndexArray0, denseValueArray0]; } public vectorIndependentSparseMapDenseArraySubtractScaledFrom( sparseIndexArray0: number[], sparseValueArray0: number[], denseValueArray0: number[], sparseIndexArray1: number[], sparseValueArray1: number[], denseValueArray1: number[], constant: number): [number[], number[]] { for (let i: number = 0; i < sparseIndexArray0.length; i++) { const index0 = sparseIndexArray0[i]; const index1 = sparseIndexArray1[i]; denseValueArray0[index0] -= (constant * denseValueArray1[index1]); } return [sparseIndexArray0, denseValueArray0]; } public vectorIndependentSparseMapDenseArrayDivideScaledFrom( sparseIndexArray0: number[], sparseValueArray0: number[], denseValueArray0: number[], sparseIndexArray1: number[], sparseValueArray1: number[], denseValueArray1: number[], constant: number): [number[], number[]] { for (let i: number = 0; i < sparseIndexArray0.length; i++) { const index0 = sparseIndexArray0[i]; const index1 = sparseIndexArray1[i]; denseValueArray0[index0] /= (constant * denseValueArray1[index1]); } return [sparseIndexArray0, denseValueArray0]; } public tensor4dNewLikeWithRandomCells( tensor4d: number[][][][]): number[][][][] { return this.tensor4dNewWithRandomCells( tensor4d.length, tensor4d[0].length, tensor4d[0][0].length, tensor4d[0][0][0].length); } public tensor4dNewLikeWithRandomCellsScaled( tensor4d: number[][][][], scale: number = 1): number[][][][] { return this.tensor4dNewWithRandomCellsScaled( tensor4d.length, tensor4d[0].length, tensor4d[0][0].length, tensor4d[0][0][0].length, scale); } public tensor4dNewLikeWithZeroCells( tensor4d: number[][][][]): number[][][][] { return this.tensor4dNewWithZeroCells( tensor4d.length, tensor4d[0].length, tensor4d[0][0].length, tensor4d[0][0][0].length); } public tensor4dNewLikeWithConstantCells( tensor4d: number[][][][], constant: number = 1): number[][][][] { return this.tensor4dNewWithConstantCells( tensor4d.length, tensor4d[0].length, tensor4d[0][0].length, tensor4d[0][0][0].length, constant); } public tensor4dNewLikeWithScaledCells( tensor4d: number[][][][], scale: number = 1): number[][][][] { return this.tensor4dNewWithScaledCells(tensor4d, scale); } public tensor4dNewLikeWithL1l2RegularizedSparseCells( tensor4d: number[][][][], l1Regularization: number = 0.01, l2Regularization: number = 0.01): number[][][][] { return this.tensor4dNewWithL1l2RegularizedSparseCells( tensor4d, l1Regularization, l2Regularization); } public tensor4dNewLikeWithL1l2RegularizedDenseCells( tensor4d: number[][][][], l1Regularization: number = 0.01, l2Regularization: number = 0.01): number[][][][] { return this.tensor4dNewWithL1l2RegularizedDenseCells( tensor4d, l1Regularization, l2Regularization); } public tensor3dNewLikeWithRandomCells( tensor3d: number[][][]): number[][][] { return this.tensor3dNewWithRandomCells( tensor3d.length, tensor3d[0].length, tensor3d[0][0].length); } public tensor3dNewLikeWithRandomCellsScaled( tensor3d: number[][][], scale: number = 1): number[][][] { return this.tensor3dNewWithRandomCellsScaled( tensor3d.length, tensor3d[0].length, tensor3d[0][0].length, scale); } public tensor3dNewLikeWithZeroCells( tensor3d: number[][][]): number[][][] { return this.tensor3dNewWithZeroCells( tensor3d.length, tensor3d[0].length, tensor3d[0][0].length); } public tensor3dNewLikeWithConstantCells( tensor3d: number[][][], constant: number = 1): number[][][] { return this.tensor3dNewWithConstantCells( tensor3d.length, tensor3d[0].length, tensor3d[0][0].length, constant); } public tensor3dNewLikeWithScaledCells( tensor3d: number[][][], scale: number = 1): number[][][] { return this.tensor3dNewWithScaledCells(tensor3d, scale); } public tensor3dNewLikeWithL1l2RegularizedSparseCells( tensor3d: number[][][], l1Regularization: number = 0.01, l2Regularization: number = 0.01): number[][][] { return this.tensor3dNewWithL1l2RegularizedSparseCells( tensor3d, l1Regularization, l2Regularization); } public tensor3dNewLikeWithL1l2RegularizedDenseCells( tensor3d: number[][][], l1Regularization: number = 0.01, l2Regularization: number = 0.01): number[][][] { return this.tensor3dNewWithL1l2RegularizedDenseCells( tensor3d, l1Regularization, l2Regularization); } public matrixNewLikeWithRandomCells( matrix: number[][]): number[][] { return this.matrixNewWithRandomCells(matrix.length, matrix[0].length); } public matrixNewLikeWithRandomCellsScaled( matrix: number[][], scale: number = 1): number[][] { return this.matrixNewWithRandomCellsScaled(matrix.length, matrix[0].length, scale); } public matrixNewLikeWithZeroCells( matrix: number[][]): number[][] { return this.matrixNewWithZeroCells(matrix.length, matrix[0].length); } public matrixNewLikeWithConstantCells( matrix: number[][], constant: number = 1): number[][] { return this.matrixNewWithConstantCells(matrix.length, matrix[0].length, constant); } public matrixNewLikeWithScaledCells( matrix: number[][], scale: number = 1): number[][] { return this.matrixNewWithScaledCells(matrix, scale); } public matrixNewLikeWithL1l2RegularizedSparseCells( matrix: number[][], l1Regularization: number = 0.01, l2Regularization: number = 0.01): number[][] { return this.matrixNewWithL1l2RegularizedSparseCells( matrix, l1Regularization, l2Regularization); } public matrixNewLikeWithL1l2RegularizedDenseCells( matrix: number[][], l1Regularization: number = 0.01, l2Regularization: number = 0.01): number[][] { return this.matrixNewWithL1l2RegularizedDenseCells( matrix, l1Regularization, l2Regularization); } public vectorNewLikeWithRandomElements( vector: number[]): number[] { return this.vectorNewWithRandomElements(vector.length); } public vectorNewLikeWithRandomElementsScaled( vector: number[], scale: number = 1): number[] { return this.vectorNewWithRandomElementsScaled(vector.length, scale); } public vectorNewLikeWithZeroElements( vector: number[]): number[] { return this.vectorNewWithZeroElements(vector.length); } public vectorNewLikeWithConstantElements( vector: number[], constant: number = 1): number[] { return this.vectorNewWithConstantElements(vector.length, constant); } public vectorNewLikeWithScaledElements( vector: number[], scale: number = 1): number[] { return this.vectorNewWithScaledElements(vector, scale); } public vectorNewLikeWithL1l2RegularizedSparseElements( vector: number[], l1Regularization: number = 0.01, l2Regularization: number = 0.01): number[] { return this.vectorNewWithL1l2RegularizedSparseElements( vector, l1Regularization, l2Regularization); } public vectorNewLikeWithL1l2RegularizedDenseElements( vector: number[], l1Regularization: number = 0.01, l2Regularization: number = 0.01): number[] { return this.vectorNewWithL1l2RegularizedDenseElements( vector, l1Regularization, l2Regularization); } public tensor4dNewWithRandomCells( rows: number, columns: number, dimension3ds: number, dimension4ds: number): number[][][][] { const tensor4d: number[][][][] = new Array<number[][][]>(rows); for (let row: number = 0; row < rows; row++) { tensor4d[row] = this.tensor3dNewWithRandomCells( columns, dimension3ds, dimension4ds); } return tensor4d; } public tensor4dNewWithRandomCellsScaled( rows: number, columns: number, dimension3ds: number, dimension4ds: number, scale: number = 1): number[][][][] { const tensor4d: number[][][][] = new Array<number[][][]>(rows); for (let row: number = 0; row < rows; row++) { tensor4d[row] = this.tensor3dNewWithRandomCellsScaled( columns, dimension3ds, dimension4ds, scale); } return tensor4d; } public tensor4dNewWithZeroCells( rows: number, columns: number, dimension3ds: number, dimension4ds: number): number[][][][] { const tensor4d: number[][][][] = new Array<number[][][]>(rows); for (let row: number = 0; row < rows; row++) { tensor4d[row] = this.tensor3dNewWithZeroCells( columns, dimension3ds, dimension4ds); } return tensor4d; } public tensor4dNewWithConstantCells( rows: number, columns: number, dimension3ds: number, dimension4ds: number, constant: number = 1): number[][][][] { const tensor4d: number[][][][] = new Array<number[][][]>(rows); for (let row: number = 0; row < rows; row++) { tensor4d[row] = this.tensor3dNewWithConstantCells( columns, dimension3ds, dimension4ds, constant); } return tensor4d; } public tensor4dNewWithScaledCells( existingTensor4d: number[][][][], scale: number = 1): number[][][][] { const rows: number = existingTensor4d.length; const tensor4d: number[][][][] = new Array<number[][][]>(rows); for (let row: number = 0; row < rows; row++) { tensor4d[row] = this.tensor3dNewWithScaledCells(existingTensor4d[row], scale); } return tensor4d; } public tensor4dNewWithL1l2RegularizedSparseCells( existingTensor4d: number[][][][], l1Regularization: number = 0.01, l2Regularization: number = 0.01): number[][][][] { const rows: number = existingTensor4d.length; const tensor4d: number[][][][] = new Array<number[][][]>(rows); for (let row: number = 0; row < rows; row++) { tensor4d[row] = this.tensor3dNewWithL1l2RegularizedSparseCells( existingTensor4d[row], l1Regularization, l2Regularization); } return tensor4d; } public tensor4dNewWithL1l2RegularizedDenseCells( existingTensor4d: number[][][][], l1Regularization: number = 0.01, l2Regularization: number = 0.01): number[][][][] { const rows: number = existingTensor4d.length; const tensor4d: number[][][][] = new Array<number[][][]>(rows); for (let row: number = 0; row < rows; row++) { tensor4d[row] = this.tensor3dNewWithL1l2RegularizedDenseCells( existingTensor4d[row], l1Regularization, l2Regularization); } return tensor4d; } public tensor3dNewWithRandomCells( rows: number, columns: number, dimension3ds: number): number[][][] { const tensor3d: number[][][] = new Array<number[][]>(rows); for (let row: number = 0; row < rows; row++) { tensor3d[row] = this.matrixNewWithRandomCells(columns, dimension3ds); } return tensor3d; } public tensor3dNewWithRandomCellsScaled( rows: number, columns: number, dimension3ds: number, scale: number = 1): number[][][] { const tensor3d: number[][][] = new Array<number[][]>(rows); for (let row: number = 0; row < rows; row++) { tensor3d[row] = this.matrixNewWithRandomCellsScaled(columns, dimension3ds, scale); } return tensor3d; } public tensor3dNewWithZeroCells( rows: number, columns: number, dimension3ds: number): number[][][] { const tensor3d: number[][][] = new Array<number[][]>(rows); for (let row: number = 0; row < rows; row++) { tensor3d[row] = this.matrixNewWithZeroCells(columns, dimension3ds); } return tensor3d; } public tensor3dNewWithConstantCells( rows: number, columns: number, dimension3ds: number, constant: number = 1): number[][][] { const tensor3d: number[][][] = new Array<number[][]>(rows); for (let row: number = 0; row < rows; row++) { tensor3d[row] = this.matrixNewWithConstantCells(columns, dimension3ds, constant); } return tensor3d; } public tensor3dNewWithScaledCells( existingTensor3d: number[][][], scale: number = 1): number[][][] { const rows: number = existingTensor3d.length; const tensor3d: number[][][] = new Array<number[][]>(rows); for (let row: number = 0; row < rows; row++) { tensor3d[row] = this.matrixNewWithScaledCells(existingTensor3d[row], scale); } return tensor3d; } public tensor3dNewWithL1l2RegularizedSparseCells( existingTensor3d: number[][][], l1Regularization: number = 0.01, l2Regularization: number = 0.01): number[][][] { const rows: number = existingTensor3d.length; const tensor3d: number[][][] = new Array<number[][]>(rows); for (let row: number = 0; row < rows; row++) { tensor3d[row] = this.matrixNewWithL1l2RegularizedSparseCells( existingTensor3d[row], l1Regularization, l2Regularization); } return tensor3d; } public tensor3dNewWithL1l2RegularizedDenseCells( existingTensor3d: number[][][], l1Regularization: number = 0.01, l2Regularization: number = 0.01): number[][][] { const rows: number = existingTensor3d.length; const tensor3d: number[][][] = new Array<number[][]>(rows); for (let row: number = 0; row < rows; row++) { tensor3d[row] = this.matrixNewWithL1l2RegularizedDenseCells( existingTensor3d[row], l1Regularization, l2Regularization); } return tensor3d; } public matrixNewWithRandomCells( rows: number, columns: number): number[][] { const matrix: number[][] = new Array<number[]>(rows); for (let row: number = 0; row < rows; row++) { matrix[row] = this.vectorNewWithRandomElements(columns); } return matrix; } public matrixNewWithRandomCellsScaled( rows: number, columns: number, scale: number = 1): number[][] { const matrix: number[][] = new Array<number[]>(rows); for (let row: number = 0; row < rows; row++) { matrix[row] = this.vectorNewWithRandomElementsScaled(columns, scale); } return matrix; } public matrixNewWithZeroCells( rows: number, columns: number): number[][] { const matrix: number[][] = new Array<number[]>(rows); for (let row: number = 0; row < rows; row++) { matrix[row] = this.vectorNewWithZeroElements(columns); } return matrix; } public matrixNewWithConstantCells( rows: number, columns: number, constant: number = 1): number[][] { const matrix: number[][] = new Array<number[]>(rows); for (let row: number = 0; row < rows; row++) { matrix[row] = this.vectorNewWithConstantElements(columns, constant); } return matrix; } public matrixNewWithScaledCells( existingMatrix: number[][], scale: number = 1): number[][] { const rows: number = existingMatrix.length; const matrix: number[][] = new Array<number[]>(rows); for (let row: number = 0; row < rows; row++) { matrix[row] = this.vectorNewWithScaledElements(existingMatrix[row], scale); } return matrix; } public matrixNewWithL1l2RegularizedSparseCells( existingMatrix: number[][], l1Regularization: number = 0.01, l2Regularization: number = 0.01): number[][] { const rows: number = existingMatrix.length; const matrix: number[][] = new Array<number[]>(rows); for (let row: number = 0; row < rows; row++) { matrix[row] = this.vectorNewWithL1l2RegularizedSparseElements( existingMatrix[row], l1Regularization, l2Regularization); } return matrix; } public matrixNewWithL1l2RegularizedDenseCells( existingMatrix: number[][], l1Regularization: number = 0.01, l2Regularization: number = 0.01): number[][] { const rows: number = existingMatrix.length; const matrix: number[][] = new Array<number[]>(rows); for (let row: number = 0; row < rows; row++) { matrix[row] = this.vectorNewWithL1l2RegularizedDenseElements( existingMatrix[row], l1Regularization, l2Regularization); } return matrix; } public vectorNewWithRandomElements( length: number): number[] { const vector: number[] = new Array<number>(length); for (let i: number = 0; i < length; i++) { vector[i] = Utility.getRandomNumber(); } return vector; } public vectorNewWithRandomElementsScaled( length: number, scale: number = 1): number[] { const vector: number[] = new Array<number>(length); for (let i: number = 0; i < length; i++) { vector[i] = Utility.getRandomNumber() * scale; } return vector; } public vectorNewWithZeroElements( length: number): number[] { const vector: number[] = new Array<number>(length); for (let i: number = 0; i < length; i++) { vector[i] = 0; } return vector; } public vectorNewWithConstantElements( length: number, constant: number = 1): number[] { const vector: number[] = new Array<number>(length); for (let i: number = 0; i < length; i++) { vector[i] = constant; } return vector; } public vectorNewWithScaledElements( existingVector: number[], scale: number = 1): number[] { const length: number = existingVector.length; const vector: number[] = new Array<number>(length); for (let i: number = 0; i < length; i++) { vector[i] = existingVector[i] * scale; } return vector; } public vectorNewWithL1l2RegularizedSparseElements( existingVector: number[], l1Regularization: number = 0.01, l2Regularization: number = 0.01): number[] { const length: number = existingVector.length; const vector: number[] = new Array<number>(length); for (let i: number = 0; i < length; i++) { const regularized: number = this.getL1l2RegularizedWeightOptimizedSparse( existingVector[i], l1Regularization, l2Regularization); vector[i] = regularized; } return vector; } public vectorNewWithL1l2RegularizedDenseElements( existingVector: number[], l1Regularization: number = 0.01, l2Regularization: number = 0.01): number[] { const length: number = existingVector.length; const vector: number[] = new Array<number>(length); for (let i: number = 0; i < length; i++) { const regularized: number = this.getL1l2RegularizedWeightOptimizedDense( existingVector[i], l1Regularization, l2Regularization); vector[i] = regularized; } return vector; } public getIndexesOnMaxOrEntriesOverThresholdOnArray( inputArray: Float32Array | Int32Array | Uint8Array, threshold: number): { "indexesMax": number[]; "max": number } { if (Utility.isEmptyNumberF32I32U8Array(inputArray)) { Utility.debuggingThrow("inputArray is empty"); } const indexesOverTheThreshold: number[] = []; let indexesMax: number[] = [0]; let max: number = inputArray[0]; for (let i: number = 1; i < inputArray.length; i++) { const inputCurrent: number = inputArray[i]; if (inputCurrent > threshold) { indexesOverTheThreshold.push(i); } if (inputCurrent > max) { max = inputCurrent; indexesMax = [i]; continue; } if (inputCurrent === max) { indexesMax.push(i); } } if (indexesOverTheThreshold.length > 0) { indexesMax = indexesOverTheThreshold; } return {indexesMax, max}; } public getIndexesOnMaxOrEntriesOverThreshold( inputArray: number[], threshold: number): { "indexesMax": number[]; "max": number } { if (Utility.isEmptyNumberArray(inputArray)) { Utility.debuggingThrow("inputArray is empty"); } const indexesOverTheThreshold: number[] = []; let indexesMax: number[] = [0]; let max: number = inputArray[0]; for (let i: number = 1; i < inputArray.length; i++) { const inputCurrent: number = inputArray[i]; if (inputCurrent > threshold) { indexesOverTheThreshold.push(i); } if (inputCurrent > max) { max = inputCurrent; indexesMax = [i]; continue; } if (inputCurrent === max) { indexesMax.push(i); } } if (indexesOverTheThreshold.length > 0) { indexesMax = indexesOverTheThreshold; } return {indexesMax, max}; } public getIndexesOnMaxEntriesOnArray( inputArray: Float32Array | Int32Array | Uint8Array): { "indexesMax": number[], "max": number } { if (Utility.isEmptyNumberF32I32U8Array(inputArray)) { Utility.debuggingThrow("inputArray is empty"); } let indexesMax: number[] = [0]; let max: number = inputArray[0]; for (let i: number = 1; i < inputArray.length; i++) { const inputCurrent: number = inputArray[i]; if (inputCurrent > max) { max = inputCurrent; indexesMax = [i]; continue; } if (inputCurrent === max) { indexesMax.push(i); } } return { indexesMax, max }; } public getIndexesOnMaxEntries( inputArray: number[]): { "indexesMax": number[], "max": number } { if (Utility.isEmptyNumberArray(inputArray)) { Utility.debuggingThrow("inputArray is empty"); } let indexesMax: number[] = [0]; let max: number = inputArray[0]; for (let i: number = 1; i < inputArray.length; i++) { const inputCurrent: number = inputArray[i]; if (inputCurrent > max) { max = inputCurrent; indexesMax = [i]; continue; } if (inputCurrent === max) { indexesMax.push(i); } } return { indexesMax, max }; } public getIndexOnFirstMaxEntryOnArray( inputArray: Float32Array | Int32Array | Uint8Array): { "indexMax": number, "max": number } { if (Utility.isEmptyNumberF32I32U8Array(inputArray)) { Utility.debuggingThrow("inputArray is empty"); } let indexMax: number = 0; let max: number = inputArray[0]; for (let i: number = 1; i < inputArray.length; i++) { const inputCurrent: number = inputArray[i]; if (inputCurrent > max) { max = inputCurrent; indexMax = i; } } return { indexMax, max }; } public getIndexOnLastMaxEntryOnArray( inputArray: Float32Array | Int32Array | Uint8Array): { "indexMax": number, "max": number } { if (Utility.isEmptyNumberF32I32U8Array(inputArray)) { Utility.debuggingThrow("inputArray is empty"); } let indexMax: number = 0; let max: number = inputArray[0]; for (let i: number = 1; i < inputArray.length; i++) { const inputCurrent: number = inputArray[i]; if (inputCurrent >= max) { max = inputCurrent; indexMax = i; } } return { indexMax, max }; } public getIndexOnFirstMaxEntry( inputArray: number[]): { "indexMax": number, "max": number } { if (Utility.isEmptyNumberArray(inputArray)) { Utility.debuggingThrow("inputArray is empty"); } let indexMax: number = 0; let max: number = inputArray[0]; for (let i: number = 1; i < inputArray.length; i++) { const inputCurrent: number = inputArray[i]; if (inputCurrent > max) { max = inputCurrent; indexMax = i; } } return { indexMax, max }; } public getIndexOnLastMaxEntry( inputArray: number[]): { "indexMax": number, "max": number } { if (Utility.isEmptyNumberArray(inputArray)) { Utility.debuggingThrow("inputArray is empty"); } let indexMax: number = 0; let max: number = inputArray[0]; for (let i: number = 1; i < inputArray.length; i++) { const inputCurrent: number = inputArray[i]; if (inputCurrent >= max) { max = inputCurrent; indexMax = i; } } return { indexMax, max }; } public getIndexesOnMinOrEntriesLessThanThresholdOnArray( inputArray: Float32Array | Int32Array | Uint8Array, threshold: number): { "indexesMin": number[]; "min": number } { if (Utility.isEmptyNumberF32I32U8Array(inputArray)) { Utility.debuggingThrow("inputArray is empty"); } const indexesOverTheThreshold: number[] = []; let indexesMin: number[] = [0]; let min: number = inputArray[0]; for (let i: number = 1; i < inputArray.length; i++) { const inputCurrent: number = inputArray[i]; if (inputCurrent < threshold) { indexesOverTheThreshold.push(i); } if (inputCurrent < min) { min = inputCurrent; indexesMin = [i]; continue; } if (inputCurrent === min) { indexesMin.push(i); } } if (indexesOverTheThreshold.length > 0) { indexesMin = indexesOverTheThreshold; } return {indexesMin, min}; } public getIndexesOnMinOrEntriesLessThanThreshold( inputArray: number[], threshold: number): { "indexesMin": number[]; "min": number } { if (Utility.isEmptyNumberArray(inputArray)) { Utility.debuggingThrow("inputArray is empty"); } const indexesOverTheThreshold: number[] = []; let indexesMin: number[] = [0]; let min: number = inputArray[0]; for (let i: number = 1; i < inputArray.length; i++) { const inputCurrent: number = inputArray[i]; if (inputCurrent < threshold) { indexesOverTheThreshold.push(i); } if (inputCurrent < min) { min = inputCurrent; indexesMin = [i]; continue; } if (inputCurrent === min) { indexesMin.push(i); } } if (indexesOverTheThreshold.length > 0) { indexesMin = indexesOverTheThreshold; } return {indexesMin, min}; } public getIndexesOnMinEntriesOnArray( inputArray: Float32Array | Int32Array | Uint8Array): { "indexesMin": number[], "min": number } { if (Utility.isEmptyNumberF32I32U8Array(inputArray)) { Utility.debuggingThrow("inputArray is empty"); } let indexesMin: number[] = [0]; let min: number = inputArray[0]; for (let i: number = 1; i < inputArray.length; i++) { const inputCurrent: number = inputArray[i]; if (inputCurrent < min) { min = inputCurrent; indexesMin = [i]; continue; } if (inputCurrent === min) { indexesMin.push(i); } } return { indexesMin, min }; } public getIndexesOnMinEntries( inputArray: number[]): { "indexesMin": number[], "min": number } { if (Utility.isEmptyNumberArray(inputArray)) { Utility.debuggingThrow("inputArray is empty"); } let indexesMin: number[] = [0]; let min: number = inputArray[0]; for (let i: number = 1; i < inputArray.length; i++) { const inputCurrent: number = inputArray[i]; if (inputCurrent < min) { min = inputCurrent; indexesMin = [i]; continue; } if (inputCurrent === min) { indexesMin.push(i); } } return { indexesMin, min }; } public getIndexOnFirstMinEntryOnArray( inputArray: Float32Array | Int32Array | Uint8Array): { "indexMin": number, "min": number } { if (Utility.isEmptyNumberF32I32U8Array(inputArray)) { Utility.debuggingThrow("inputArray is empty"); } let indexMin: number = 0; let min: number = inputArray[0]; for (let i: number = 1; i < inputArray.length; i++) { const inputCurrent: number = inputArray[i]; if (inputCurrent < min) { min = inputCurrent; indexMin = i; } } return { indexMin, min }; } public getIndexOnLastMinEntryOnArray( inputArray: Float32Array | Int32Array | Uint8Array): { "indexMin": number, "min": number } { if (Utility.isEmptyNumberF32I32U8Array(inputArray)) { Utility.debuggingThrow("inputArray is empty"); } let indexMin: number = 0; let min: number = inputArray[0]; for (let i: number = 1; i < inputArray.length; i++) { const inputCurrent: number = inputArray[i]; if (inputCurrent <= min) { min = inputCurrent; indexMin = i; } } return { indexMin, min }; } public getIndexOnFirstMinEntry( inputArray: number[]): { "indexMin": number, "min": number } { if (Utility.isEmptyNumberArray(inputArray)) { Utility.debuggingThrow("inputArray is empty"); } let indexMin: number = 0; let min: number = inputArray[0]; for (let i: number = 1; i < inputArray.length; i++) { const inputCurrent: number = inputArray[i]; if (inputCurrent < min) { min = inputCurrent; indexMin = i; } } return { indexMin, min }; } public getIndexOnLastMinEntry( inputArray: number[]): { "indexMin": number, "min": number } { if (Utility.isEmptyNumberArray(inputArray)) { Utility.debuggingThrow("inputArray is empty"); } let indexMin: number = 0; let min: number = inputArray[0]; for (let i: number = 1; i < inputArray.length; i++) { const inputCurrent: number = inputArray[i]; if (inputCurrent <= min) { min = inputCurrent; indexMin = i; } } return { indexMin, min }; } public safeDivide(numerator: number, denominator: number): number { if (numerator === 0) { return 0; } return (numerator / denominator); } public safeLog(value: number): number { if (value < 0) { return Number.NaN; } if (value === 0) { return Number.MIN_VALUE; } return Math.log(value); } public clipValue(value: number): number { if (value <= 0) { return MathematicsHelper.epsilon; } if (value >= 1) { return MathematicsHelper.epsilonUp; } return value; } public safeZeroSmallNegativeErrorSubtract(value0: number, value1: number): number { const difference: number = value0 - value1; if (difference >= 0) { return difference; } if (difference > MathematicsHelper.epsilonMinuteNegative) { return 0; } return difference; } }
the_stack
import {v4} from 'uuid'; import {memoize, throttle} from 'lodash'; import {promiseSleep} from './promiseSleep'; import {CommsNode, CommsNodeCallbacks, CommsNodeOptions, SendToOptions} from './commsNode'; import {closeMessage} from './peerMessageHandler'; export enum McastMessageType { connect = 'connect', otherPeers = 'otherPeers', data = 'data', close = 'close' } interface McastMessage { peerId: string; userId: string; type: McastMessageType; recipientIds?: string[]; payload?: any; } interface McastConnection { timestamp: number; userId: string; } /** * A node in a multicast network. Uses gtove-relay-server to do the multicasting. */ export class McastNode extends CommsNode { static MCAST_URL = 'https://radiant-thicket-18054.herokuapp.com/mcast/'; private signalChannelId: string; private readonly onEvents: CommsNodeCallbacks; private connectedPeers: {[key: string]: McastConnection}; private ignoredPeers: {[key: string]: boolean}; private readonly memoizedThrottle: (key: string, func: (...args: any[]) => any) => (...args: any[]) => any; private sequenceId: number | null = null; private requestOffersInterval: number; /** * @param signalChannelId The unique string used to identify the multi-cast channel on gtove-relay-server. All * McastNodes with the same signalChannelId will signal each other and connect. * @param userId A string uniquely identifying the owner of this node. A single user can own multiple nodes across * the network. * @param commsNodeOptions The options this node is initialised with */ constructor(signalChannelId: string, userId: string, commsNodeOptions: CommsNodeOptions) { super(); this.signalChannelId = signalChannelId; this.options = commsNodeOptions; this.onEvents = commsNodeOptions.onEvents || {}; this.connectedPeers = {}; this.peerId = v4(); this.ignoredPeers = {[this.peerId]: true}; this.userId = userId; const throttleWait = commsNodeOptions.throttleWait || 250; console.log(`Created multi-cast node for user ${this.userId} with id ${this.peerId}`); // Create a memoized throttle function wrapper. Calls with the same (truthy) throttleKey will be throttled so // the function is called at most once each throttleWait milliseconds. This is used to wrap the send function, // so things like dragging minis doesn't flood the connection - since each position update supersedes the // previous one, we don't need to send every intermediate value. this.memoizedThrottle = memoize((throttleKey, func) => (throttle(func, throttleWait))); this.sendToRaw = this.sendToRaw.bind(this); } async init() { const listenPromise = this.listen(); await this.sendConnectMessage(); this.startHeartbeat(); return listenPromise; } startHeartbeat() { if (!this.requestOffersInterval) { this.requestOffersInterval = window.setInterval(this.heartbeat.bind(this), CommsNode.HEARTBEAT_INTERVAL_MS); } } async heartbeat() { // Periodically send connect message, which also lets connected peers know I'm still active. await this.sendConnectMessage(); // Check if any peers have failed to signal for a while. const timeout = Date.now() - 2 * CommsNode.HEARTBEAT_INTERVAL_MS; for (let peerId of Object.keys(this.connectedPeers)) { if (this.connectedPeers[peerId].timestamp < timeout) { // peer is idle - time it out. console.warn(`Peer ${peerId} sent last message ${(Date.now() - this.connectedPeers[peerId].timestamp)/1000} seconds ago - destroying it.`); this.destroyPeer(peerId); } } } async getFromMcastServer(): Promise<McastMessage> { const response = await fetch(`${McastNode.MCAST_URL}${this.signalChannelId}${this.sequenceId !== null ? `?sequenceId=${this.sequenceId}` : ''}`, { cache: 'no-store' }); if (response.ok) { this.sequenceId = Number(response.headers.get('x-relay-sequenceId')); return response.json(); } else { throw new Error('invalid response on GET from mcast server: ' + response.statusText); } } async postToMcastServer(body: McastMessage): Promise<void> { const response = await fetch(`${McastNode.MCAST_URL}${this.signalChannelId}`, { method: 'POST', body: JSON.stringify(body) }); if (!response.ok) { throw new Error('invalid response on POST to mcast server: ' + response.statusText); } } async sendConnectMessage() { await this.postToMcastServer({type: McastMessageType.connect, peerId: this.peerId, userId: this.userId}); } /** * Listens for messages from the mcast server, gtove-relay-server. The messages are JSON McastMessage objects. * * @return {Promise} A promise which continues to listen for future signalling messages. */ async listen(): Promise<void> { while (!this.shutdown) { try { const message = await this.getFromMcastServer(); if (this.shutdown || !message.type || this.ignoredPeers[message.peerId]) { // Ignore message if shut down, or without a type (caused by a timeout), or from an ignored peer continue; } if (message.recipientIds === undefined || message.recipientIds!.indexOf(this.peerId) >= 0) { // A message I'm potentially interested in. await this.onEvent(message.type, message.peerId, message.userId, message.payload); } if (!this.ignoredPeers[message.peerId] && !this.connectedPeers[message.peerId]) { // If the message is from a peerId we don't know, send another "connect" message await this.sendConnectMessage(); } if (this.connectedPeers[message.peerId]) { // Update timestamp, so they aren't timed out. this.connectedPeers[message.peerId].timestamp = Date.now(); } } catch (err) { console.error(err); await promiseSleep(5000); } } } handleNewConnection(peerId: string, userId: string) { if (!this.ignoredPeers[peerId] && !this.connectedPeers[peerId]) { if (this.onEvents.shouldConnect && !this.onEvents.shouldConnect(this, peerId, userId)) { this.ignoredPeers[peerId] = true; } else { console.log('Established connection with', peerId); this.connectedPeers[peerId] = { timestamp: Date.now(), userId }; return true; } } return false; } async onEvent(type: McastMessageType, senderId: string, userId: string, payload: any): Promise<void> { // Do in-built actions first. switch (type) { case McastMessageType.connect: if (this.handleNewConnection(senderId, userId)) { // New connection - tell them the already connected peers. await this.postToMcastServer({ type: McastMessageType.otherPeers, recipientIds: [senderId], peerId: this.peerId, userId: this.userId, payload: Object.keys(this.connectedPeers) .map((peerId) => ({peerId, userId: this.connectedPeers[peerId].userId})) .concat({peerId: this.peerId, userId: this.userId}) }); } else { return; } break; case McastMessageType.otherPeers: const connectedPeers = payload as {peerId: string, userId: string}[]; for (let peer of connectedPeers) { if (this.handleNewConnection(peer.peerId, peer.userId)) { await this.doCustomEvents(McastMessageType.connect, peer.peerId, null); } } break; case McastMessageType.close: this.onClose(senderId); break; default: break; } await this.doCustomEvents(type, senderId, payload); } async doCustomEvents(type: McastMessageType, senderId: string, payload: any): Promise<void> { // Perform any custom user actions for the given message type if (this.onEvents[type]) { await this.onEvents[type](this, senderId, payload); } } onClose(peerId: string) { if (this.connectedPeers[peerId]) { console.log('Lost connection with', peerId); delete(this.connectedPeers[peerId]); } delete(this.ignoredPeers[peerId]); } async destroyPeer(peerId: string) { this.onClose(peerId); await this.doCustomEvents(McastMessageType.close, peerId, null); } private async sendToRaw(message: string | object, recipientIds: string[], onSentMessage?: (recipients: string[], message: string | object) => void): Promise<void> { // JSON has no "undefined" value, so if JSON-stringifying, convert undefined values to null. const payload: string = (typeof(message) === 'object') ? JSON.stringify(message, (k, v) => (v === undefined ? null : v)) : message; await this.postToMcastServer({ type: McastMessageType.data, recipientIds, peerId: this.peerId, userId: this.userId, payload }); onSentMessage && onSentMessage(recipientIds, message); } /** * Send a message to peers on the network. * * @param message The message to send. If it is an object, it will be JSON.stringified. * @param only (optional) Array of peerIds to receive the message. If omitted, sends the message to all connected * peers (except any listed in except) * @param except (optional) Array of peerIds who should not receive the message. * @param throttleKey (optional) If specified, messages with the same throttleKey are throttled so only one message * is actually sent every throttleWait milliseconds - calling sendTo more frequently than that will discard * messages. The last message is always delivered. Only use this for sending messages which supersede previous * messages with the same throttleKey value, such as updating an object's position using absolute coordinates. * @param onSentMessage (optional) Function that will be called after messages have been sent, with the list of * peerId recipients provided as the parameter. */ async sendTo(message: string | object, {only, except, throttleKey, onSentMessage}: SendToOptions = {}): Promise<void> { const recipients = (only || Object.keys(this.connectedPeers)) .filter((peerId) => (!except || except.indexOf(peerId) < 0)); if (recipients && recipients.length === 0) { // No recipients - send nothing, but still trigger onSentMessage if provided. onSentMessage && onSentMessage(recipients, message); return; } if (throttleKey) { await this.memoizedThrottle(throttleKey, this.sendToRaw)(message, recipients, onSentMessage); } else { await this.sendToRaw(message, recipients, onSentMessage); } } async disconnectAll(): Promise<void> { await this.postToMcastServer({ type: McastMessageType.close, peerId: this.peerId, userId: this.userId }); for (let peerId of Object.keys(this.connectedPeers)) { await this.doCustomEvents(McastMessageType.close, peerId, null); } this.connectedPeers = {}; } async destroy() { console.log('Shutting down multicast node', this.peerId); this.shutdown = true; await this.disconnectAll(); } async close(peerId: string, reason?: string) { if (this.connectedPeers[peerId]) { if (reason) { await this.sendToRaw(closeMessage(reason), [peerId]); } await this.destroyPeer(peerId); } } }
the_stack
import * as _ from 'lodash'; import * as pixelWidth from 'string-pixel-width'; import { Component, ElementRef, EventEmitter, Injector, Input, OnDestroy, OnInit, Output, ViewChild } from '@angular/core'; import {Alert} from '@common/util/alert.util'; import {AbstractPopupComponent} from '@common/component/abstract-popup.component'; import {GridOption} from '@common/component/grid/grid.option'; import {GridComponent} from '@common/component/grid/grid.component'; import {Header, SlickGridHeader} from '@common/component/grid/grid.header'; import {DatasourceInfo, Field} from '@domain/datasource/datasource'; import {DatasourceService} from '../../../../../datasource/service/datasource.service'; import {DataSourceCreateService, FileDetail, FileResult} from '../../../../service/data-source-create.service'; @Component({ selector: 'stream-preview', templateUrl: './stream-preview.component.html', }) export class StreamPreviewComponent extends AbstractPopupComponent implements OnInit, OnDestroy { /*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= | Private Variables |-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/ // 생성될 데이터소스 정보 @Input('sourceData') private readonly sourceData: DatasourceInfo; @ViewChild(GridComponent) private readonly gridComponent: GridComponent; // file results public fileResult: FileResult; // selected file detail data public selectedFileDetailData: FileDetail; @Input() public step: string; @Output() public readonly stepChange: EventEmitter<string> = new EventEmitter(); @Output() public readonly onComplete: EventEmitter<any> = new EventEmitter(); // 그리드 row public rowNum: number; // flag public typeShowFl: boolean = false; // grid hide public clearGrid = true; public typeList = []; // flag public isValidFile: boolean; public globalErrorMessage: string; public patches = []; public dataList = []; public intervalValidMessage: string; // interval valid public intervalValid: boolean; // granularity unit public granularityUnit: number; // 생성자 constructor(private datasourceService: DatasourceService, private _dataSourceCreateService: DataSourceCreateService, protected elementRef: ElementRef, protected injector: Injector) { super(elementRef, injector); } // Init public ngOnInit() { // Init super.ngOnInit(); if (!_.isNil(this.sourceData.kafkaData.fieldList) && !_.isNil(this.sourceData.kafkaData.fieldData)) { this._updateGrid(this.sourceData.kafkaData.fieldData, this.sourceData.kafkaData.fieldList); } else { this.fileResult = this.sourceData.uploadData.fileResult; // 현재 페이지 데이터소스 파일보가 있다면 if (this.sourceData.hasOwnProperty('fileData') && !this.isNullOrUndefined(this.sourceData.fileData.selectedFileDetailData)) { // init data this._initData(_.cloneDeep(this.sourceData.fileData)); } else { this._setFileDetail(true); } } } // Destory public ngOnDestroy() { // Destory super.ngOnDestroy(); } public prev() { // 기존 파일 데이터 삭제후 생성 this._deleteAndSaveFileData(); this.step = 'stream-select'; this.stepChange.emit(this.step); } /** * 다음화면으로 이동 */ public next() { if (!_.isNil(this.sourceData.kafkaData.fieldList) && !_.isNil(this.sourceData.kafkaData.fieldData)) { this.sourceData.fieldList = this.sourceData.kafkaData.fieldList; this.sourceData.fieldData = this.sourceData.kafkaData.fieldData; this._nextStep(); } else { if (this.selectedFileDetailData === undefined) { return; } if (this.fileResult) { // validation if (this.isEnableNext()) { // 데이터 변경이 일어난경우 스키마 데이터와 적재데이터 제거 this._deleteSchemaData(); // 기존 파일 데이터 삭제후 생성 this._deleteAndSaveFileData(); this._nextStep(); } } } } /** * logical Types * @returns {{}} */ public setTypeList(): void { const result = this.selectedFileDetailData.fields.reduce((acc, field) => { // result 에 해당 타입이 있다면 if (acc.hasOwnProperty(field.logicalType)) { acc[field.logicalType] += 1; } else { // 없다면 새로 생성 acc[field.logicalType] = 1; } return acc; }, {}); this.typeList = Object.keys(result).reduce((acc, key) => { acc.push({label: key, value: result[key]}); return acc; }, []); } /** * Is enable next * @return {boolean} */ public isEnableNext(): boolean { return this.isValidFile; } public isEnableNextButton(): boolean { return this.selectedFileDetailData === undefined && this.sourceData.kafkaData.fieldData === undefined; } /** * row 변경 이벤트 */ public onChangeRowNum() { // row num이 총 row 보다 클경우 변경 if (this.rowNum > this.selectedFileDetailData.totalRows) { this.rowNum = this.selectedFileDetailData.totalRows; } // file data 조회 this._setFileDetail(); } public get getErrorMessage() { if (this.isNullOrUndefined(this.globalErrorMessage)) { return this.selectedFileDetailData.errorMessage; } else { return this.globalErrorMessage; } } /** * 스키마 설정 화면으로 이동 * @private */ private _nextStep(): void { // 다음페이지로 이동 this.step = 'stream-configure-schema'; this.stepChange.emit(this.step); } /** * 데이터가 변경이 일어나고 스키마데이터가 있다면 스키마데이터 삭제 * @private */ private _deleteSchemaData(): void { // 데이터 변경이 일어난경우 스키마 삭제 if (this._isChangeData()) { this.sourceData.hasOwnProperty('schemaData') && (delete this.sourceData.schemaData); this.sourceData.hasOwnProperty('ingestionData') && (delete this.sourceData.ingestionData); } } /** * 기존 파일 삭제후 새로 생성 * @private */ private _deleteAndSaveFileData(): void { // 파일 정보가 있다면 삭제 if (this.sourceData.hasOwnProperty('fileData')) { delete this.sourceData.fileData; } // 현재 페이지의 데이터소스 생성정보 저장 this._saveFileData(this.sourceData); // set field list, field data if (!this.isNullOrUndefined(this.selectedFileDetailData)) { this.sourceData.fieldList = this.selectedFileDetailData.fields; this.sourceData.fieldData = this.selectedFileDetailData.data; } } /** * 현재 페이지의 데이터소스 파일정보 저장 * @param {DatasourceInfo} sourceData * @private */ private _saveFileData(sourceData: DatasourceInfo) { sourceData['fileData'] = { // file result fileResult: this.fileResult, // file data selectedFileDetailData: this.selectedFileDetailData, // 그리드 row rowNum: this.rowNum, // type list typeList: this.typeList, // flag isValidFile: this.isValidFile, }; } /** * 그리드 출력 * @param {any[]} headers * @param {any[]} rows * @private */ private _drawGrid(headers: any[], rows: any[]) { // grid show this.clearGrid = false; this.changeDetect.detectChanges(); // 그리드 옵션은 선택 this.gridComponent.create(headers, rows, new GridOption() .SyncColumnCellResize(true) .MultiColumnSort(true) .RowHeight(32) .build() ); } /** * grid 정보 업데이트 * @param data * @param {Field[]} fields * @private */ private _updateGrid(data: any, fields: Field[]) { // headers const headers: Header[] = this._getHeaders(fields); // rows const rows: any[] = this._getRows(data); // grid 그리기 headers && headers.length > 0 && this._drawGrid(headers, rows); } /** * 헤더정보 얻기 * @param {Field[]} fields * @returns {header[]} * @private */ private _getHeaders(fields: Field[]): Header[] { return fields.map( (field: Field) => { /* 70 는 CSS 상의 padding 수치의 합산임 */ const headerWidth: number = Math.floor(pixelWidth(field.name, {size: 12})) + 70; return new SlickGridHeader() .Id(field.name) .Name('<span style="padding-left:20px;"><em class="' + this.getFieldTypeIconClass(field.logicalType.toString()) + '"></em>' + Field.getSlicedColumnName(field) + '</span>') .Field(field.name) .Behavior('select') .Selectable(false) .CssClass('cell-selection') .Width(headerWidth) .CannotTriggerInsert(true) .Resizable(true) .Unselectable(true) .Sortable(true) .Formatter((_row, _cell, value) => { let content = value; // trans to string if (typeof value === 'number') { content = value + ''; } if (content && content.length > 50) { return content.slice(0, 50); } else { return content; } }) .build(); } ); } /** * rows 얻기 * @param data * @returns {any[]} * @private */ private _getRows(data: any): any[] { let rows: any[] = data; if (data.length > 0 && !data[0].hasOwnProperty('id')) { rows = rows.map((row: any, idx: number) => { row.id = idx; return row; }); } return rows; } /** * 데이터가 변경이 일어났는지 확인 * @return {boolean} * @private */ private _isChangeData(): boolean { if (this.sourceData.fileData) { if (this.sourceData.fileData.fileResult === undefined && this.fileResult.fileKey) { return true; } else if (this.sourceData.fileData.fileResult.fileKey !== this.fileResult.fileKey) { return true; } } return false; } /** * Set file detail data * @param {boolean} initRowNum * @private */ private _setFileDetail(initRowNum?: boolean): void { this.globalErrorMessage = undefined; // init selected file detail data this.selectedFileDetailData = undefined; // grid hide this.clearGrid = true; // 로딩 show this.loadingShow(); // if init row num initRowNum && (this.rowNum = 100); // 파일 조회 this.datasourceService.getDatasourceFile(this.fileResult.fileKey, this._getFileParams()) .then((result: FileDetail) => { // 로딩 hide this.loadingHide(); // if SUCCESS FAIL if (result.success === false) { Alert.warning(this.translateService.instant('msg.storage.alert.file.import.error')); return; } // set file detail data this.selectedFileDetailData = result; // if total row is smaller than rowNum if (result.totalRows < this.rowNum) { // set row num this.rowNum = result.totalRows; } // if result is parsable if (result.isParsable && result.isParsable.valid) { // valid true this.isValidFile = true; // set type list this.setTypeList(); // grid 출력 this._updateGrid(this.selectedFileDetailData.data, this.selectedFileDetailData.fields); // if CSV file } else if (result.isParsable) { // if result is not parsable // set error message this.selectedFileDetailData.errorMessage = this._dataSourceCreateService.getFileErrorMessage(result.isParsable.warning); } }) .catch(error => { if (!this.isNullOrUndefined(error.message)) { this.loadingHide(); this.globalErrorMessage = error.message; } else { this.commonExceptionHandler(error) } }); } /** * Get file params * @return {any} * @private */ private _getFileParams(): any { return { limit: this.rowNum, firstHeaderRow: false }; } /** * init source file data * @param fileData * @private */ private _initData(fileData) { // file result this.fileResult = fileData.fileResult; // file data this.selectedFileDetailData = fileData.selectedFileDetailData; // 그리드 row this.rowNum = fileData.rowNum; // type list this.typeList = fileData.typeList; // flag this.isValidFile = fileData.isValidFile; // grid 출력 this._updateGrid(this.selectedFileDetailData.data, this.selectedFileDetailData.fields); } }
the_stack
import 'styling/_Sort'; import { exportGlobally } from '../../GlobalExports'; import { IBuildingQueryEventArgs, IQueryErrorEventArgs, IQuerySuccessEventArgs, QueryEvents } from '../../events/QueryEvents'; import { Assert } from '../../misc/Assert'; import { MODEL_EVENTS } from '../../models/Model'; import { QUERY_STATE_ATTRIBUTES, QueryStateModel } from '../../models/QueryStateModel'; import { $$ } from '../../utils/Dom'; import { SVGDom } from '../../utils/SVGDom'; import { SVGIcons } from '../../utils/SVGIcons'; import { Utils } from '../../utils/Utils'; import { logSortEvent } from '../Analytics/SharedAnalyticsCalls'; import { Component } from '../Base/Component'; import { IComponentBindings } from '../Base/ComponentBindings'; import { ComponentOptions } from '../Base/ComponentOptions'; import { Initialization } from '../Base/Initialization'; import { SortCriteria, VALID_DIRECTION } from './SortCriteria'; import { AccessibleButton, ArrowDirection } from '../../utils/AccessibleButton'; import { l } from '../../strings/Strings'; import { findIndex, find, any } from 'underscore'; export interface ISortOptions { sortCriteria?: SortCriteria[]; caption?: string; } /** * The `Sort` component renders a widget that the end user can interact with to select the criterion to use when sorting query results. * * To improve accessibility, it's recommended to group `Sort` components in a container with `role="radiogroup"`. */ export class Sort extends Component { static ID = 'Sort'; static doExport = () => { exportGlobally({ Sort: Sort, SortCriteria: SortCriteria }); }; /** * Options for the component * @componentOptions */ static options: ISortOptions = { /** * The sort criterion/criteria the end user can select/toggle between when interacting with this component instance. * * The available sort criteria are: * - `relevancy` * - `date ascending`/`date descending` * - `qre` * - `@field ascending`/`@field descending`, where you must replace `field` with the name of a sortable field in your index (e.g., `data-sort-criteria="@size ascending"`). * * You can specify a comma separated list of sort criteria to toggle between when interacting with this component instance (e.g., `data-sort-criteria="date descending,date ascending"`). * * You can specify multiple sort criteria to be used in the same request by separating them with a semicolon (e.g., `data-sort-criteria="@size ascending;date ascending"` ). * * Interacting with this component instance will cycle through those criteria in the order they are listed in. * Typically, you should only specify a list of sort criteria when you want the end user to be able to to toggle the direction of a `date` or `@field` sort criteria. * Otherwise, you should configure a distinct `Sort` component instance for each sort criterion you want to make available in your search page. * * You must specify a valid value for this option in order for this component instance to work correctly. * * Examples: * * - `data-sort-criteria="date ascending"` createes a Sort component that allows to sort on `date ascending`, without being able to toggle the order. * - `data-sort-criteria="date ascending, date descending"` creates a Sort component that allows end users to toggle between `date ascending` and `date descending` on click. * - `data-sort-criteria="@size ascending; date descending"` creates a Sort component that only allows end users to sort on `@size ascending`. The index then applies a second sort on `date descending` when two items are of equal value. * - `data-sort-criteria="@size ascending; date descending, @size descending; date descending"` creates a Sort component that allows end users to toggle between `@size ascending` and `@size descending`. For each value, the index applies a second sort on `date descending` when two items are of equal value. */ sortCriteria: ComponentOptions.buildCustomListOption( values => { return values.map(criteria => { // 'any' because Underscore won't accept the union type as an argument. if (typeof criteria === 'string') { return new SortCriteria(criteria); } else { return criteria as SortCriteria; } }); }, { required: true } ), /** * The caption to display on this component instance. * * By default, the component uses the text content of the element it is instanciated on. */ caption: ComponentOptions.buildLocalizedStringOption({ required: true }) }; private currentCriteria: SortCriteria; private sortButton: HTMLElement; private directionButton: HTMLElement; private radioGroup: HTMLElement; /** * Creates a new `Sort` component instance. * @param element The HTMLElement on which to instantiate the component. * @param options The options for this component instance. * @param bindings The bindings that the component requires to function normally. If not set, these will be * automatically resolved (with a slower execution time). */ constructor(public element: HTMLElement, public options?: ISortOptions, bindings?: IComponentBindings) { super(element, Sort.ID, bindings); this.options = ComponentOptions.initComponentOptions(element, Sort, options); Assert.isLargerOrEqualsThan(1, this.options.sortCriteria.length); this.bind.onQueryState(MODEL_EVENTS.CHANGE_ONE, QUERY_STATE_ATTRIBUTES.SORT, () => this.handleQueryStateChanged()); this.bind.onRootElement(QueryEvents.querySuccess, (args: IQuerySuccessEventArgs) => this.handleQuerySuccess(args)); this.bind.onRootElement(QueryEvents.buildingQuery, (args: IBuildingQueryEventArgs) => this.handleBuildingQuery(args)); this.bind.onRootElement(QueryEvents.queryError, (args: IQueryErrorEventArgs) => this.handleQueryError(args)); this.ensureDom(); } public createDom() { const el = $$(this.element); el.on('click', () => this.selectAndExecuteQuery()); const innerText = el.text(); el.empty(); this.findOrCreateRadioGroup(); this.createSortButton(innerText); if (this.isToggle()) { this.createDirectionButton(); } this.update(); } /** * Selects this `Sort` component. * * Updates the state model if selecting this component toggles its current [`sortCriteria`]{@link Sort.options.sortCriteria}. * * @param direction The sort direction. Can be one of: `ascending`, `descending`. */ public select(direction?: string) { if (direction) { this.currentCriteria = find(this.options.sortCriteria, (criteria: SortCriteria) => { return criteria.direction == direction; }); this.updateQueryStateModel(); } else if (Utils.exists(this.currentCriteria)) { this.selectNextCriteria(); } else { this.selectFirstCriteria(); } } /** * Selects this `Sort` component, then triggers a query if selecting this component toggles its current [`sortCriteria`]{@link Sort.options.sortCriteria}. * * Also logs an event in the usage analytics with the new current sort criteria. */ public selectAndExecuteQuery() { var oldCriteria = this.currentCriteria; this.select(); if (oldCriteria != this.currentCriteria) { this.executeSearchQuery(); } } public enable() { $$(this.element).removeClass('coveo-tab-disabled'); this.update(); super.enable(); } public disable() { $$(this.element).addClass('coveo-tab-disabled'); super.disable(); } /** * Gets the current [`sortCriteria`]{@link Sort.options.sortCriteria} of this `Sort` component. * @returns {SortCriteria} */ public getCurrentCriteria(): SortCriteria { return this.currentCriteria; } /** * Indicates whether the name of any of the available [`sortCriteria`]{@link Sort.options.sortCriteria} of this `Sort` component matches the argument. * @param sortId The sort criteria name to look for (e.g., `date descending`). */ public match(sortId: string) { return any(this.options.sortCriteria, (sortCriteria: SortCriteria) => sortId == sortCriteria.toString()); } private findOrCreateRadioGroup() { this.radioGroup = this.findRadioGroup(); if (!this.radioGroup) { this.element.setAttribute('role', 'radiogroup'); this.radioGroup = this.element; } } private createSortButton(innerText?: string) { this.sortButton = $$('span').el; this.sortButton.innerText = this.options.caption || innerText; new AccessibleButton() .withElement(this.sortButton) .withEnterKeyboardAction(() => this.selectAndExecuteQuery()) .withArrowsAction((direction, e) => this.onArrowPressed(direction, e)) .withLabel(this.isToggle() ? this.getDirectionalLabel(this.initialDirection as VALID_DIRECTION) : this.getOmnidirectionalLabel()) .withRole('radio') .build(); this.element.appendChild(this.sortButton); } private createDirectionButton() { this.directionButton = $$('span', { className: 'coveo-icon' }, ...this.createIcons()).el; new AccessibleButton() .withElement(this.directionButton) .withSelectAction(e => { e.stopPropagation(); this.selectNextCriteriaAndExecuteQuery(); }) .withArrowsAction((direction, e) => this.onArrowPressed(direction, e)) .withLabel( this.getDirectionalLabel( this.initialDirection === VALID_DIRECTION.DESCENDING ? VALID_DIRECTION.ASCENDING : VALID_DIRECTION.DESCENDING ) ) .withRole('radio') .build(); this.element.appendChild(this.directionButton); } private onArrowPressed(direction: ArrowDirection, e: Event) { this.selectNextRadioButton(direction === ArrowDirection.RIGHT || direction === ArrowDirection.DOWN ? 1 : -1); e.stopPropagation(); } private createIcons() { const iconAscending = $$('span', { className: 'coveo-sort-icon-ascending' }, SVGIcons.icons.arrowUp); SVGDom.addClassToSVGInContainer(iconAscending.el, 'coveo-sort-icon-ascending-svg'); const iconDescending = $$('span', { className: 'coveo-sort-icon-descending' }, SVGIcons.icons.arrowDown); SVGDom.addClassToSVGInContainer(iconDescending.el, 'coveo-sort-icon-descending-svg'); return [iconAscending, iconDescending]; } private findRadioGroup(element = this.element) { if (!element || element === document.body) { return null; } if (element.getAttribute('role') === 'radiogroup') { return element; } return this.findRadioGroup(element.parentElement); } private selectNextRadioButton(direction = 1) { const radioButtons = $$(this.radioGroup).findAll('[role="radio"]'); const currentIndex = findIndex(radioButtons, radio => radio.getAttribute('aria-checked') === 'true'); let indexToSelect: number; const isAnythingSelected = currentIndex !== -1; if (isAnythingSelected) { indexToSelect = (currentIndex + direction + radioButtons.length) % radioButtons.length; } else { if (direction >= 0) { indexToSelect = 0; } else { indexToSelect = radioButtons.length - 1; } } const radioToSelect = radioButtons[indexToSelect]; radioToSelect.focus(); radioToSelect.click(); } private executeSearchQuery() { this.queryController.deferExecuteQuery({ beforeExecuteQuery: () => logSortEvent(this.usageAnalytics, this.currentCriteria.sort + this.currentCriteria.direction) }); } private selectFirstCriteria() { this.currentCriteria = this.options.sortCriteria[0]; this.updateQueryStateModel(); } private selectNextCriteria() { const indexOfCurrentCriteria = this.currentCriteria ? this.options.sortCriteria.indexOf(this.currentCriteria) : 0; this.currentCriteria = this.options.sortCriteria[(indexOfCurrentCriteria + 1) % this.options.sortCriteria.length]; this.updateQueryStateModel(); } private selectNextCriteriaAndExecuteQuery() { const oldCriteria = this.currentCriteria; this.selectNextCriteria(); if (oldCriteria != this.currentCriteria) { this.executeSearchQuery(); } } private handleQueryStateChanged() { this.update(); } private update() { // Basically, if the criteria in the model fits with one of ours, it'll become our active criteria var sortCriteria = <string>this.queryStateModel.get(QueryStateModel.attributesEnum.sort); if (Utils.isNonEmptyString(sortCriteria)) { var criteriaFromModel = SortCriteria.parse(sortCriteria); this.currentCriteria = find(this.options.sortCriteria, (criteria: SortCriteria) => criteriaFromModel.equals(criteria)); } else { this.currentCriteria = null; } this.updateAppearance(); this.updateAccessibilityProperties(); } private get captionIsDefined() { return Utils.isNonEmptyString(this.options.caption); } private get currentDirection() { return this.currentCriteria ? this.currentCriteria.direction : this.initialDirection; } private get initialDirection() { return this.options.sortCriteria[0].direction; } private get displayedSortText() { return this.captionIsDefined ? this.options.caption : this.element.textContent; } private handleBuildingQuery(data: IBuildingQueryEventArgs) { Assert.exists(data); var sort = this.queryStateModel.get(QueryStateModel.attributesEnum.sort); if (sort == QueryStateModel.defaultAttributes.sort || this.isSelected()) { if (this.currentCriteria) { this.currentCriteria.putInQueryBuilder(data.queryBuilder); } } } private handleQuerySuccess(data: IQuerySuccessEventArgs) { if (data.results.results.length == 0) { $$(this.element).addClass('coveo-sort-hidden'); } else { $$(this.element).removeClass('coveo-sort-hidden'); } } private handleQueryError(data: IQueryErrorEventArgs) { $$(this.element).addClass('coveo-sort-hidden'); } private isToggle(): boolean { return this.options.sortCriteria.length > 1; } private isSelected(): boolean { return Utils.exists(this.currentCriteria); } private updateAppearance() { $$(this.element).toggleClass('coveo-selected', this.isSelected()); if (this.isToggle()) { $$(this.element).removeClass('coveo-ascending'); $$(this.element).removeClass('coveo-descending'); if (this.isSelected()) { $$(this.element).addClass(this.currentDirection === 'ascending' ? 'coveo-ascending' : 'coveo-descending'); } } } private updateAccessibilityProperties() { this.sortButton.setAttribute('aria-controls', this.resultListsIds); const directionIsInitial = this.currentDirection === this.initialDirection; this.sortButton.setAttribute('aria-checked', `${this.isSelected() && directionIsInitial}`); if (this.isToggle()) { this.directionButton.setAttribute('aria-controls', this.resultListsIds); this.directionButton.setAttribute('aria-checked', `${this.isSelected() && !directionIsInitial}`); } } private get resultListsIds() { const resultLists = this.searchInterface.getComponents('ResultList') as Component[]; return resultLists.map(resultList => resultList.element.id).join(' '); } private getDirectionalLabel(direction: VALID_DIRECTION) { const localizedCaption = l(this.displayedSortText); return direction === VALID_DIRECTION.DESCENDING ? l('SortResultsByDescending', localizedCaption) : l('SortResultsByAscending', localizedCaption); } private getOmnidirectionalLabel(): string { const localizedCaption = l(this.displayedSortText); return l('SortResultsBy', localizedCaption); } private updateQueryStateModel() { this.queryStateModel.set(QueryStateModel.attributesEnum.sort, this.currentCriteria.toString()); } } Initialization.registerAutoCreateComponent(Sort);
the_stack
import { addCallback, getCollection } from '../vulcan-lib'; import { restrictViewableFields } from '../vulcan-users/permissions'; import SimpleSchema from 'simpl-schema' import { getWithLoader } from "../loaders"; import { isServer } from '../executionEnvironment'; import { asyncFilter } from './asyncUtils'; import type { GraphQLScalarType } from 'graphql'; import DataLoader from 'dataloader'; import * as _ from 'underscore'; import { loggerConstructor } from './logging'; export const generateIdResolverSingle = <CollectionName extends CollectionNameString>({ collectionName, fieldName, nullable }: { collectionName: CollectionName, fieldName: string, nullable: boolean, }) => { type DataType = ObjectsByCollectionName[CollectionName]; return async (doc: any, args: void, context: ResolverContext): Promise<DataType|null> => { if (!doc[fieldName]) return null const { currentUser } = context const collection = context[collectionName] as unknown as CollectionBase<DataType> const loader = context.loaders[collectionName] as DataLoader<string,DataType>; const resolvedDoc: DataType|null = await loader.load(doc[fieldName]) if (!resolvedDoc) { if (!nullable) { // eslint-disable-next-line no-console console.error(`Broken foreign key reference: ${collectionName}.${fieldName}=${doc[fieldName]}`); } return null; } return await accessFilterSingle(currentUser, collection, resolvedDoc, context); } } const generateIdResolverMulti = <CollectionName extends CollectionNameString>({ collectionName, fieldName, getKey = ((a:any)=>a) }: { collectionName: CollectionName, fieldName: string, getKey?: (key: string) => string, }) => { type DbType = ObjectsByCollectionName[CollectionName]; return async (doc: any, args: void, context: ResolverContext): Promise<Array<DbType>> => { if (!doc[fieldName]) return [] const keys = doc[fieldName].map(getKey) const { currentUser } = context const collection = context[collectionName] as unknown as CollectionBase<DbType> const loader = context.loaders[collectionName] as DataLoader<string,DbType>; const resolvedDocs: Array<DbType> = await loader.loadMany(keys) return await accessFilterMultiple(currentUser, collection, resolvedDocs, context); } } // Apply both document-level and field-level permission checks to a single document. // If the user can't access the document, returns null. If the user can access the // document, return a copy of the document in which any fields the user can't access // have been removed. If document is null, returns null. export const accessFilterSingle = async <T extends DbObject>(currentUser: DbUser|null, collection: CollectionBase<T>, document: T|null, context: ResolverContext|null): Promise<T|null> => { const { checkAccess } = collection if (!document) return null; if (checkAccess && !(await checkAccess(currentUser, document, context))) return null const restrictedDoc = restrictViewableFields(currentUser, collection, document) return restrictedDoc; } // Apply both document-level and field-level permission checks to a list of documents. // Returns a list where documents which the user can't access are removed from the // list, and fields which the user can't access are removed from the documents inside // the list. If currentUser is null, applies permission checks for the logged-out // view. export const accessFilterMultiple = async <T extends DbObject>(currentUser: DbUser|null, collection: CollectionBase<T>, unfilteredDocs: Array<T|null>, context: ResolverContext|null): Promise<Array<T>> => { const { checkAccess } = collection // Filter out nulls (docs that were referenced but didn't exist) // Explicit cast because the type-system doesn't detect that this is removing // nulls. const existingDocs: Array<T> = _.filter(unfilteredDocs, d=>!!d) as Array<T>; // Apply the collection's checkAccess function, if it has one, to filter out documents const filteredDocs = checkAccess ? await asyncFilter(existingDocs, async (d: T) => await checkAccess(currentUser, d, context)) : existingDocs // Apply field-level permissions const restrictedDocs = restrictViewableFields(currentUser, collection, filteredDocs) return restrictedDocs; } /** * This field is stored in the database as a string, but resolved as the * referenced document */ export const foreignKeyField = <CollectionName extends CollectionNameString>({idFieldName, resolverName, collectionName, type, nullable=true}: { idFieldName: string, resolverName: string, collectionName: CollectionName, type: string, nullable?: boolean, }) => { if (!idFieldName || !resolverName || !collectionName || !type) throw new Error("Missing argument to foreignKeyField"); return { type: String, foreignKey: collectionName, resolveAs: { fieldName: resolverName, type: nullable ? type : `${type}!`, resolver: generateIdResolverSingle({ collectionName, fieldName: idFieldName, nullable, }), addOriginalField: true, }, } } export function arrayOfForeignKeysField<CollectionName extends keyof CollectionsByName>({idFieldName, resolverName, collectionName, type, getKey}: { idFieldName: string, resolverName: string, collectionName: CollectionName, type: string, getKey?: (key: any)=>string, }) { if (!idFieldName || !resolverName || !collectionName || !type) throw new Error("Missing argument to foreignKeyField"); return { type: Array, resolveAs: { fieldName: resolverName, type: `[${type}!]!`, resolver: generateIdResolverMulti({ collectionName, fieldName: idFieldName, getKey }), addOriginalField: true }, } } export const simplSchemaToGraphQLtype = (type: any): string|null => { if (type === String) return "String"; else if (type === Number) return "Int"; else if (type === Date) return "Date"; else if (type === Boolean) return "Boolean"; else return null; } interface ResolverOnlyFieldArgs<T extends DbObject> extends CollectionFieldSpecification<T> { resolver: (doc: T, args: any, context: ResolverContext) => any, graphQLtype?: string|GraphQLScalarType|null, graphqlArguments?: string|null, } /** * This field is not stored in the database, but is filled in at query-time by * our GraphQL API using the supplied resolver function. */ export const resolverOnlyField = <T extends DbObject>({type, graphQLtype=null, resolver, graphqlArguments=null, ...rest}: ResolverOnlyFieldArgs<T>): CollectionFieldSpecification<T> => { const resolverType = graphQLtype || simplSchemaToGraphQLtype(type); if (!type || !resolverType) throw new Error("Could not determine resolver graphQL type"); return { type: type, optional: true, resolveAs: { type: resolverType, arguments: graphqlArguments, resolver: resolver, }, ...rest } } // Given a collection and a fieldName=>fieldSchema dictionary, add fields to // the collection schema. If any of the fields mentioned are already present, // throws an error. export const addFieldsDict = <T extends DbObject>(collection: CollectionBase<T>, fieldsDict: Record<string,CollectionFieldSpecification<T>>): void => { collection._simpleSchema = null; for (let key in fieldsDict) { if (key in collection._schemaFields) { throw new Error("Field already exists: "+key); } else { collection._schemaFields[key] = fieldsDict[key]; } } } // Given a collection and a fieldName=>fieldSchema dictionary, add properties // to existing fields on the collection schema, by shallow merging them. If any // of the fields named don't already exist, throws an error. This is used for // making parts of the schema (in particular, resolvers, onCreate callbacks, // etc) specific to server-side code. export const augmentFieldsDict = <T extends DbObject>(collection: CollectionBase<T>, fieldsDict: Record<string,CollectionFieldSpecification<T>>): void => { collection._simpleSchema = null; for (let key in fieldsDict) { if (key in collection._schemaFields) { collection._schemaFields[key] = {...collection._schemaFields[key], ...fieldsDict[key]}; } else { throw new Error("Field does not exist: "+key); } } } // For auto-generated database type definitions, provides a (string) definition // of this field's type. Useful for fields that would otherwise be black-box types. SimpleSchema.extendOptions(['typescriptType']) // For denormalized fields, needsUpdate is an optional attribute that // determines whether the denormalization function should be rerun given // the new document after an update or an insert SimpleSchema.extendOptions(['needsUpdate']) // For denormalized fields, getValue returns the new denormalized value of // the field, given the new document after an update or an insert SimpleSchema.extendOptions(['getValue']) // For denormalized fields, marks a field so that we can automatically // get the automatically recompute the new denormalized value via // `Vulcan.recomputeDenormalizedValues` in the Meteor shell SimpleSchema.extendOptions(['canAutoDenormalize']) // Whether to log changes to this field to the LWEvents collection. If undefined // (neither true nor false), will be logged if the logChanges option is set on // the collection and the denormalized option is false. SimpleSchema.extendOptions(['logChanges']) // Helper function to add all the correct callbacks and metadata for a field // which is denormalized, where its denormalized value is a function only of // the other fields on the document. (Doesn't work if it depends on the contents // of other collections, because it doesn't set up callbacks for changes in // those collections) export function denormalizedField<T extends DbObject>({ needsUpdate, getValue }: { needsUpdate?: (doc: Partial<T>) => boolean, getValue: (doc: T, context: ResolverContext) => any, }): CollectionFieldSpecification<T> { return { onUpdate: async ({data, document, context}) => { if (!needsUpdate || needsUpdate(data)) { return await getValue(document, context) } }, onCreate: async ({newDocument, context}) => { if (!needsUpdate || needsUpdate(newDocument)) { return await getValue(newDocument, context) } }, denormalized: true, canAutoDenormalize: true, optional: true, needsUpdate, getValue } } // Create a denormalized field which counts the number of objects in some other // collection whose value for a field is this object's ID. For example, count // the number of comments on a post, or the number of posts by a user, updating // when objects are created/deleted/updated. export function denormalizedCountOfReferences<SourceType extends DbObject, TargetCollectionName extends keyof ObjectsByCollectionName>({ collectionName, fieldName, foreignCollectionName, foreignTypeName, foreignFieldName, filterFn }: { collectionName: CollectionNameString, fieldName: string, foreignCollectionName: TargetCollectionName, foreignTypeName: string, foreignFieldName: string, filterFn?: (doc: ObjectsByCollectionName[TargetCollectionName])=>boolean, }): CollectionFieldSpecification<SourceType> { const denormalizedLogger = loggerConstructor(`callbacks-${collectionName.toLowerCase()}-denormalized-${fieldName}`) type TargetType = ObjectsByCollectionName[TargetCollectionName]; const foreignCollectionCallbackPrefix = foreignTypeName.toLowerCase(); const filter = filterFn || ((doc: ObjectsByCollectionName[TargetCollectionName]) => true); if (isServer) { // When inserting a new document which potentially needs to be counted, follow // its reference and update with $inc. const createCallback = async (newDoc, {currentUser, collection, context}) => { denormalizedLogger(`about to test new ${foreignTypeName}`, newDoc) if (newDoc[foreignFieldName] && filter(newDoc)) { denormalizedLogger(`new ${foreignTypeName} should increment ${newDoc[foreignFieldName]}`) const collection = getCollection(collectionName); await collection.rawUpdateOne(newDoc[foreignFieldName], { $inc: { [fieldName]: 1 } }); } return newDoc; } addCallback(`${foreignCollectionCallbackPrefix}.create.after`, createCallback); // When updating a document, we may need to decrement a count, we may // need to increment a count, we may need to do both with them cancelling // out, or we may need to both but on different documents. addCallback(`${foreignCollectionCallbackPrefix}.update.after`, async (newDoc, {oldDocument, currentUser, collection}) => { denormalizedLogger(`about to test updating ${foreignTypeName}`, newDoc, oldDocument) const countingCollection = getCollection(collectionName); if (filter(newDoc) && !filter(oldDocument)) { // The old doc didn't count, but the new doc does. Increment on the new doc. if (newDoc[foreignFieldName]) { denormalizedLogger(`updated ${foreignTypeName} should increment ${newDoc[foreignFieldName]}`) await countingCollection.rawUpdateOne(newDoc[foreignFieldName], { $inc: { [fieldName]: 1 } }); } } else if (!filter(newDoc) && filter(oldDocument)) { // The old doc counted, but the new doc doesn't. Decrement on the old doc. if (oldDocument[foreignFieldName]) { denormalizedLogger(`updated ${foreignTypeName} should decrement ${newDoc[foreignFieldName]}`) await countingCollection.rawUpdateOne(oldDocument[foreignFieldName], { $inc: { [fieldName]: -1 } }); } } else if (filter(newDoc) && oldDocument[foreignFieldName] !== newDoc[foreignFieldName]) { denormalizedLogger(`${foreignFieldName} of ${foreignTypeName} has changed from ${oldDocument[foreignFieldName]} to ${newDoc[foreignFieldName]}`) // The old and new doc both count, but the reference target has changed. // Decrement on one doc and increment on the other. if (oldDocument[foreignFieldName]) { denormalizedLogger(`changing ${foreignFieldName} leads to decrement of ${oldDocument[foreignFieldName]}`) await countingCollection.rawUpdateOne(oldDocument[foreignFieldName], { $inc: { [fieldName]: -1 } }); } if (newDoc[foreignFieldName]) { denormalizedLogger(`changing ${foreignFieldName} leads to increment of ${newDoc[foreignFieldName]}`) await countingCollection.rawUpdateOne(newDoc[foreignFieldName], { $inc: { [fieldName]: 1 } }); } } return newDoc; } ); addCallback(`${foreignCollectionCallbackPrefix}.delete.async`, async ({document, currentUser, collection}) => { denormalizedLogger(`about to test deleting ${foreignTypeName}`, document) if (document[foreignFieldName] && filter(document)) { denormalizedLogger(`deleting ${foreignTypeName} should decrement ${document[foreignFieldName]}`) const countingCollection = getCollection(collectionName); await countingCollection.rawUpdateOne(document[foreignFieldName], { $inc: { [fieldName]: -1 } }); } } ); } return { type: Number, optional: true, defaultValue: 0, denormalized: true, canAutoDenormalize: true, getValue: async (document: SourceType, context: ResolverContext): Promise<number> => { const foreignCollection = getCollection(foreignCollectionName) as CollectionBase<TargetType>; const docsThatMayCount = await getWithLoader( context, foreignCollection, `denormalizedCount_${collectionName}.${fieldName}`, { }, foreignFieldName, document._id ); const docsThatCount = _.filter(docsThatMayCount, d=>filter(d)); return docsThatCount.length; } } } export function googleLocationToMongoLocation(gmaps) { return { type: "Point", coordinates: [gmaps.geometry.location.lng, gmaps.geometry.location.lat] } }
the_stack
import { Component, OnInit, Input, ElementRef, Inject } from '@angular/core'; import * as d3 from 'd3-selection'; import * as d3Scale from "d3-scale"; import * as d3Shape from "d3-shape"; import * as d3Array from "d3-array"; import * as d3Format from "d3-time-format"; import * as d3Time from "d3-time"; import * as d3Axis from "d3-axis"; import { DataCacheService } from '../../core/services/index'; @Component({ selector: 'line-graph', templateUrl: './line-graph.component.html', styleUrls: ['./line-graph.component.scss'] }) export class LineGraphComponent implements OnInit { @Input() graphData: any; @Input() graphDataOld: any; @Input() render: Function; @Input() timeRange:any; @Input() stat:any; root:any; private margin = {top: 0, right: 9, bottom: 36, left: 36}; private axes = {top: 0, right: 0, bottom: 0, left: 0}; private width: number; private height: number; private x: any; private y: any; private scale: any; private xAxis: any; private yAxis: any; private svg: any; cy:any; ypart:any; tooltip:any; valuesarray:any=[]; sortedvalues:any=[]; newData:any=[]; private line: d3Shape.Line<[number, number]>; public viewBox: string ='0 0 640 290'; constructor( private cache: DataCacheService, @Inject(ElementRef) elementRef: ElementRef ) { var el:HTMLElement = elementRef.nativeElement; this.root = d3.select(el); this.width = 630 - this.margin.left - this.margin.right ; this.height = 240 - this.margin.top - this.margin.bottom; } ngOnInit() { localStorage.setItem("stat",this.stat) } ngOnDestroy() { localStorage.removeItem('max'); localStorage.removeItem('cy'); localStorage.removeItem('range'); localStorage.removeItem('cx'); localStorage.removeItem('stat'); } ngOnChanges(x:any){ if(this.graphData != undefined){ this.onGraphRender() if (this.graphDataOld === undefined) { this.graphDataOld = this.graphData; } } } public renderGraph(data){ this.clearGraph(this.onGraphRender); } private clearGraph(onComplete) { this.root.select("svg").remove(); if(typeof onComplete === "function"){ onComplete() } } private onGraphRender() { d3.selectAll("svg > *").remove(); this.initSvg(); this.initAxis(); this.drawAxis(); } private initSvg() { this.svg = this.root.select("svg") .append("g") .attr("class", "base-group") } public initAxis() { var flagcodequality = this.cache.get("codequality"); this.y = d3Scale.scaleLinear().range([this.height, 0]); for(var i=0 ; i<this.graphData.data.length; i++){ this.valuesarray[i] = this.graphData.data[i].value; } this.sortedvalues = this.valuesarray.sort(function(a, b){return a - b}); var max = this.sortedvalues[this.graphData.data.length - 1]; var max4 = max/4; var max5 = max4*5; localStorage.setItem('max', JSON.stringify({ max: max })); if(max == 0){ this.y.domain([0,0.1]); }else{ this.y.domain([0,max5]); } // if(flagcodequality == true){ // FOR CODEQUALITY // this.x = d3Scale.scaleTime().range([0, this.width]); // if( this.timeRange == 'Month'){ // this.x.domain([new Date().setDate(new Date().getDate()-180),new Date()]); // }else if( this.timeRange == 'Week'){ // this.x.domain([new Date().setDate(new Date().getDate()-30),new Date()]); // }else if( this.timeRange == 'Day'){ // this.x.domain([new Date().setDate(new Date().getDate()-7),new Date()]); // } if( this.graphData.data.length == 1){ for(var i=0 ; i<this.graphData.data.length ; i++){ this.graphData.data[i].date = new Date(this.graphData.data[i].date.valueOf() + this.graphData.data[i].date.getTimezoneOffset() * 60000); this.x = d3Scale.scaleTime().range([0, this.width]).domain(d3Array.extent(this.graphData.data, (d) => d.date )); var dayless = this.x.domain()[0].setDate(this.x.domain()[0].getDate()-1) var daymore = this.x.domain()[1].setDate(this.x.domain()[1].getDate()+1) this.x.domain([dayless, daymore]).nice() } }else{ for(var i=0 ; i<this.graphData.data.length ; i++){ this.graphData.data[i].date = new Date(this.graphData.data[i].date.valueOf() + this.graphData.data[i].date.getTimezoneOffset() * 60000); } this.x = d3Scale.scaleTime().range([0, this.width]); this.x.domain(d3Array.extent(this.graphData.data, (d) => d.date )).nice(); } // FOR METRICS // }else if(flagcodequality == false){ // if( this.graphData.data.length == 1){ // for(var i=0 ; i<this.graphData.data.length ; i++){ // this.graphData.data[i].date = new Date(this.graphData.data[i].date.valueOf() + this.graphData.data[i].date.getTimezoneOffset() * 60000); // this.x = d3Scale.scaleTime().range([0, this.width]).domain(d3Array.extent(this.graphData.data, (d) => d.date )); // var dayless = this.x.domain()[0].setDate(this.x.domain()[0].getDate()-1) // var daymore = this.x.domain()[1].setDate(this.x.domain()[1].getDate()+1) // this.x.domain([dayless, daymore]).nice() // } // }else{ // for(var i=0 ; i<this.graphData.data.length ; i++){ // this.graphData.data[i].date = new Date(this.graphData.data[i].date.valueOf() + this.graphData.data[i].date.getTimezoneOffset() * 60000); // } // this.x = d3Scale.scaleTime().range([0, this.width]); // this.x.domain(d3Array.extent(this.graphData.data, (d) => d.date )).nice(); // } // } } public drawLine() { this.line = d3Shape.line() .x( (d: any) => this.x(d.date) ) .y( (d: any) => this.y(parseFloat(d.value)) ); if (this.graphDataOld == undefined) { this.graphDataOld = this.graphData; } var d0 = this.line(this.graphData.data); this.svg.append("path") .attr("class", "line line-plot") .attr("d", d0) this.svg.selectAll(".dot") .data(this.graphData.data) .enter().append("circle") .attr("class", "dot") .attr("d", d0) .attr("cx", (d) => this.x(d.date)) .attr("cy", (d) => this.y(d.value)) .on("mouseover", function() { self.svg.select(".tool-tip").style("display", null); var cy = 204 - d3.select(this).attr("cy"); var cx = d3.select(this).attr("cx"); localStorage.setItem('cy', JSON.stringify({ cy: cy })); localStorage.setItem('cx', JSON.stringify({ cx: cx })); }) .on("mouseout", function() { self.svg.select(".tool-tip").style("display", "none"); }) .on("mousemove", mousemove) var bisectDate = d3Array.bisector(function(d) { return d.hour; }).left; var range = this.graphData.xAxis.range; var self = this; var timeFormat = { '1 day' : '%b %d, %I:%M %p', 'day' : '%b %d, %I:%M %p', '7 days' : '%b %d, %I:%M %p', 'week':'%b %d, %I %p', '4 weeks' : '%b %d, %I %p', 'month' : '%b %d %Y', '6 months' : '%b %d %y', '6months' : '%b %d %y', 'year' : '%b %d %Y', '1 year' : '%b %y', '6 years' : '%Y' } function formatDate(date, range) { ; if (range == undefined) { range = 'day'; } range = range.toLowerCase(); var dateFormat = d3Format.timeFormat(timeFormat[range]); return dateFormat(date); } // function getGraphValue(key, data) { // var pointer = key; // // get y-axis value corresponding to x-axis date // var entry = data[0]; // for (var i = 0; i <= data.length - 1; i++) { // var record = data[i]; // var datapoint = record.date; // if (datapoint >= pointer) { // break; // } else{ // entry = record; // } // } // return entry; // } function mousemove() { var x0 = d3.mouse(this)[0]; var y0 = d3.mouse(this)[1]; var max = JSON.parse(localStorage.getItem('max')); var cy = JSON.parse(localStorage.getItem('cy')); var cx = JSON.parse(localStorage.getItem('cx')); var x1 = parseFloat(cx.cx); this.ypart = (max.max/163.2); var valueY = (cy.cy*this.ypart); // var value = getGraphValue(self.x.invert(x0+4), self.graphData.data); var date = self.x.invert(x1); var hoverX = formatDate(self.x.invert(x1), range) var stat = localStorage.getItem("stat"); if( stat == 'Average' ){ var hoverY = valueY.toFixed(2); }else { var hoverY = valueY.toFixed(0) } var tooltip = self.svg.select(".tool-tip"); tooltip.attr("transform", "translate(" + (x0 - (75/2)) + "," + (y0 - 65) + ")"); tooltip.select('.hover-x').text(hoverX); tooltip.select('.hover-y').text(hoverY); } this.toolTip() } private drawAxis() { this.xAxis = d3Axis.axisBottom(this.x); this.yAxis = d3Axis.axisLeft(this.y); this.svg.append("g") .attr("class", "axis axis--x") .attr("transform", "translate(0," + this.height + ")") .call(this.xAxis) this.svg.append("g") .attr("class", "axis axis--y") .call(this.yAxis); // Calculate width of the axis for accurate alignment this.axes.left = this.root.select('g.axis.axis--y').node().getBBox().width; var flagcodequality = this.cache.get("codequality"); switch(this.timeRange){ case 'Day': if( flagcodequality ){ this.xAxis = d3Axis.axisBottom(this.x).tickSize(5).tickPadding(9).ticks(2).tickFormat(d3Format.timeFormat( '%b %d')); break; }else{ localStorage.setItem("range","day"); this.xAxis = d3Axis.axisBottom(this.x).tickSize(5).tickPadding(9).ticks(5).tickFormat(d3Format.timeFormat( '%b %d, %I %p')); break; } case 'Week': localStorage.setItem("range","week"); this.xAxis = d3Axis.axisBottom(this.x).tickSize(5).tickPadding(9).ticks(5).tickFormat(d3Format.timeFormat('%b %d')); break; case 'Month': localStorage.setItem("range","month"); this.xAxis = d3Axis.axisBottom(this.x).tickSize(5).tickPadding(9).ticks(5).tickFormat(d3Format.timeFormat('%b')); break; case 'Year': localStorage.setItem("range","year"); this.xAxis = d3Axis.axisBottom(this.x).tickSize(5).tickPadding(9).ticks(5).tickFormat(d3Format.timeFormat('%b %Y')); break; } this.axes.bottom = this.root.select('g.axis.axis--x').node().getBBox().height; this.y = this.y.range([(this.height), 0]); this.yAxis = d3Axis.axisLeft(this.y).tickSize(-this.width).ticks(5); // redraw axes this.root.select('g.axis.axis--y').call(this.yAxis); this.svg.append("text") .attr("class", "axis-title axis-title--y") .attr("transform", "rotate(-90)") .attr("y", -(this.margin.left + this.axes.left/2)) .attr("x", -(this.height/2)+ this.axes.bottom) .attr("dy", "0.21em") .style("text-anchor", "end") .style('font', '12px') .style('text-transform','uppercase') .text(this.graphData.yAxis.label); this.svg.append("text") .attr("class", "axis-title axis-title--x") .attr("y", this.height + this.margin.top + this.axes.bottom*2) .attr("x", this.width/2) .attr("dy", "1.71em") .style("text-anchor", "end") .style('font', '12px') .text(this.graphData.xAxis.label) .attr("transform", "translate(" + (this.axes.left / 2) + ",0)"); this.root.select('g.axis.axis--x').call(this.xAxis).attr("transform", "translate(0," + this.height + ")"); this.root.select('g.base-group').attr('transform', 'translate(' + (this.axes.left/ 2 + this.margin.left) + ',' + (this.axes.bottom / 2 + this.margin.top) + ')'); this.drawLine(); } private toolTip() { var rectW = 75; var rectH = 45; var trnglW = 15; var trnglH = 10; this.tooltip = this.svg.append("g") .attr("class", "tool-tip") .style("display", "none") .style("z-index", "100") .attr("transform", "translate(" + (rectW/2) + "," + (-rectH + 10) + ")"); this.tooltip.append("rect") .attr("class", "rect-outer") .attr("rx", 6) .attr("ry", 6) .attr("z-index", "10000000") .attr("stroke","#999") .attr("fill","#fff") .attr("width", rectW) .attr("height", rectH); this.tooltip.append("text") .attr("y", 16) .attr("x", 6) .attr("fill","#999") .attr("width", rectW) .attr("class", "hover-x").text("4.00"); this.tooltip.append("text") .attr("y", 35) .attr("x", 6) .attr("fill","#999") .attr("class", "hover-y").text("1"); this.tooltip.append("path") .attr("fill","#999") .attr("d", "M 0 0 L " + (trnglW) + " 0 L " + (trnglW/2) + " " + trnglH + " Z") .attr("transform","translate(" + (rectW - trnglW)/2 + "," + (rectH) + ")"); this.tooltip.append("path") .attr("fill","#fff") .attr("d", "M 0 0 L " + (trnglW) + " 0 L " + (trnglW/2) + " " + trnglH + " Z") .attr("transform","translate(" + (rectW - trnglW)/2 + "," + (rectH - 2) + ")"); } }
the_stack
import { checkBehavior, failedExecuteInvalidEnumValue, scrollingElement } from "./common.js"; import type { IScrollConfig } from "./scroll-step"; import { elementScroll } from "./scroll.js"; const enum ScrollAlignment { ToEdgeIfNeeded, CenterAlways, LeftOrTop, RightOrBottom, } const enum WritingMode { HorizontalTb, VerticalRl, VerticalLr, SidewaysRl, SidewaysLr, } // https://drafts.csswg.org/css-writing-modes-4/#block-flow const normalizeWritingMode = (writingMode: string): WritingMode => { switch (writingMode) { case "horizontal-tb": case "lr": case "lr-tb": case "rl": case "rl-tb": return WritingMode.HorizontalTb; case "vertical-rl": case "tb": case "tb-rl": return WritingMode.VerticalRl; case "vertical-lr": case "tb-lr": return WritingMode.VerticalLr; case "sideways-rl": return WritingMode.SidewaysRl; case "sideways-lr": return WritingMode.SidewaysLr; } return WritingMode.HorizontalTb; }; type Tuple2<T> = [T, T]; const calcPhysicalAxis = <T>(writingMode: WritingMode, isLTR: boolean, hPos: T, vPos: T): [number, T, T] => { /** 0b{vertical}{horizontal} 0: normal, 1: reverse */ let layout = 0b00; const enum OP { ReverseHorizontal = 0b01, ReverseVertical = 0b10, } /** * WritingMode.VerticalLr: ↓→ * | 1 | 4 | | * | 2 | 5 | | * | 3 | | | * * RTL: ↑→ * | 3 | | | * | 2 | 5 | | * | 1 | 4 | | */ if (!isLTR) { layout ^= OP.ReverseVertical; } switch (writingMode) { /** * ↓→ * | 1 | 2 | 3 | * | 4 | 5 | | * | | | | * * RTL: ↓← * | 3 | 2 | 1 | * | | 5 | 4 | * | | | | */ case WritingMode.HorizontalTb: // swap horizontal and vertical layout = (layout >> 1) | ((layout & 1) << 1); [hPos, vPos] = [vPos, hPos]; break; /** * ↓← * | | 4 | 1 | * | | 5 | 2 | * | | | 3 | * * RTL: ↑← * | | | 3 | * | | 5 | 2 | * | | 4 | 1 | */ case WritingMode.VerticalRl: case WritingMode.SidewaysRl: // reverse horizontal layout ^= OP.ReverseHorizontal; break; /** * ↑→ * | 3 | | | * | 2 | 5 | | * | 1 | 4 | | * * RTL: ↓→ * | 1 | 4 | | * | 2 | 5 | | * | 3 | | | */ case WritingMode.SidewaysLr: // reverse vertical layout ^= OP.ReverseVertical; break; } return [layout, hPos, vPos]; }; const isXReversed = (computedStyle: Readonly<CSSStyleDeclaration>): boolean => { const layout = calcPhysicalAxis( normalizeWritingMode(computedStyle.writingMode), computedStyle.direction !== "rtl", undefined, undefined, )[0]; return (layout & 1) === 1; }; // https://source.chromium.org/chromium/chromium/src/+/main:third_party/blink/renderer/core/dom/element.cc;l=1097-1189;drc=6a7533d4a1e9f2372223a9d912a9e53a6fa35ae0 const toPhysicalAlignment = ( options: Readonly<ScrollIntoViewOptions>, writingMode: WritingMode, isLTR: boolean, ): Tuple2<ScrollAlignment> => { const [layout, hPos, vPos] = calcPhysicalAxis( writingMode, isLTR, options.block || "start", options.inline || "nearest", ); return [hPos, vPos].map((value, index) => { switch (value) { case "center": return ScrollAlignment.CenterAlways; case "nearest": return ScrollAlignment.ToEdgeIfNeeded; default: { const reverse = (layout >> index) & 1; return (value === "start") === !reverse ? ScrollAlignment.LeftOrTop : ScrollAlignment.RightOrBottom; } } }) as Tuple2<ScrollAlignment>; }; // code from stipsan/compute-scroll-into-view // https://github.com/stipsan/compute-scroll-into-view/blob/5396c6b78af5d0bbce11a7c4e93cc3146546fcd3/src/index.ts /** * Find out which edge to align against when logical scroll position is "nearest" * Interesting fact: "nearest" works similarily to "if-needed", if the element is fully visible it will not scroll it * * Legends: * ┌────────┐ ┏ ━ ━ ━ ┓ * │ target │ frame * └────────┘ ┗ ━ ━ ━ ┛ */ const mapNearest = ( align: ScrollAlignment, scrollingEdgeStart: number, scrollingEdgeEnd: number, scrollingSize: number, elementEdgeStart: number, elementEdgeEnd: number, elementSize: number, ): Exclude<ScrollAlignment, ScrollAlignment.ToEdgeIfNeeded> | null => { if (align !== ScrollAlignment.ToEdgeIfNeeded) { return align; } /** * If element edge A and element edge B are both outside scrolling box edge A and scrolling box edge B * * ┌──┐ * ┏━│━━│━┓ * │ │ * ┃ │ │ ┃ do nothing * │ │ * ┗━│━━│━┛ * └──┘ * * If element edge C and element edge D are both outside scrolling box edge C and scrolling box edge D * * ┏ ━ ━ ━ ━ ┓ * ┌───────────┐ * │┃ ┃│ do nothing * └───────────┘ * ┗ ━ ━ ━ ━ ┛ */ if ( (elementEdgeStart < scrollingEdgeStart && elementEdgeEnd > scrollingEdgeEnd) || (elementEdgeStart > scrollingEdgeStart && elementEdgeEnd < scrollingEdgeEnd) ) { return null; } /** * If element edge A is outside scrolling box edge A and element height is less than scrolling box height * * ┌──┐ * ┏━│━━│━┓ ┏━┌━━┐━┓ * └──┘ │ │ * from ┃ ┃ to ┃ └──┘ ┃ * * ┗━ ━━ ━┛ ┗━ ━━ ━┛ * * If element edge B is outside scrolling box edge B and element height is greater than scrolling box height * * ┏━ ━━ ━┓ ┏━┌━━┐━┓ * │ │ * from ┃ ┌──┐ ┃ to ┃ │ │ ┃ * │ │ │ │ * ┗━│━━│━┛ ┗━│━━│━┛ * │ │ └──┘ * │ │ * └──┘ * * If element edge C is outside scrolling box edge C and element width is less than scrolling box width * * from to * ┏ ━ ━ ━ ━ ┓ ┏ ━ ━ ━ ━ ┓ * ┌───┐ ┌───┐ * │ ┃ │ ┃ ┃ │ ┃ * └───┘ └───┘ * ┗ ━ ━ ━ ━ ┛ ┗ ━ ━ ━ ━ ┛ * * If element edge D is outside scrolling box edge D and element width is greater than scrolling box width * * from to * ┏ ━ ━ ━ ━ ┓ ┏ ━ ━ ━ ━ ┓ * ┌───────────┐ ┌───────────┐ * ┃ │ ┃ │ ┃ ┃ │ * └───────────┘ └───────────┘ * ┗ ━ ━ ━ ━ ┛ ┗ ━ ━ ━ ━ ┛ */ if ( (elementEdgeStart <= scrollingEdgeStart && elementSize <= scrollingSize) || (elementEdgeEnd >= scrollingEdgeEnd && elementSize >= scrollingSize) ) { return ScrollAlignment.LeftOrTop; } /** * If element edge B is outside scrolling box edge B and element height is less than scrolling box height * * ┏━ ━━ ━┓ ┏━ ━━ ━┓ * * from ┃ ┃ to ┃ ┌──┐ ┃ * ┌──┐ │ │ * ┗━│━━│━┛ ┗━└━━┘━┛ * └──┘ * * If element edge A is outside scrolling box edge A and element height is greater than scrolling box height * * ┌──┐ * │ │ * │ │ ┌──┐ * ┏━│━━│━┓ ┏━│━━│━┓ * │ │ │ │ * from ┃ └──┘ ┃ to ┃ │ │ ┃ * │ │ * ┗━ ━━ ━┛ ┗━└━━┘━┛ * * If element edge C is outside scrolling box edge C and element width is greater than scrolling box width * * from to * ┏ ━ ━ ━ ━ ┓ ┏ ━ ━ ━ ━ ┓ * ┌───────────┐ ┌───────────┐ * │ ┃ │ ┃ │ ┃ ┃ * └───────────┘ └───────────┘ * ┗ ━ ━ ━ ━ ┛ ┗ ━ ━ ━ ━ ┛ * * If element edge D is outside scrolling box edge D and element width is less than scrolling box width * * from to * ┏ ━ ━ ━ ━ ┓ ┏ ━ ━ ━ ━ ┓ * ┌───┐ ┌───┐ * ┃ │ ┃ │ ┃ │ ┃ * └───┘ └───┘ * ┗ ━ ━ ━ ━ ┛ ┗ ━ ━ ━ ━ ┛ * */ if ( (elementEdgeEnd > scrollingEdgeEnd && elementSize < scrollingSize) || (elementEdgeStart < scrollingEdgeStart && elementSize > scrollingSize) ) { return ScrollAlignment.RightOrBottom; } return null; }; const canOverflow = (overflow: string | null): boolean => { return overflow !== "visible" && overflow !== "clip"; }; const getFrameElement = (element: Element): Element | null => { try { return element.ownerDocument.defaultView?.frameElement || null; } catch { return null; } }; const isScrollable = (element: Element, computedStyle: Readonly<CSSStyleDeclaration>): boolean => { if (element.clientHeight < element.scrollHeight || element.clientWidth < element.scrollWidth) { return ( canOverflow(computedStyle.overflowY) || canOverflow(computedStyle.overflowX) || element === scrollingElement(element) ); } return false; }; const parentElement = (element: Element): Element | null => { const pNode = element.parentNode; const pElement = element.parentElement; if (pElement === null && pNode !== null) { if (pNode.nodeType === /** Node.DOCUMENT_FRAGMENT_NODE */ 11) { return (pNode as ShadowRoot).host; } if (pNode.nodeType === /** Node.DOCUMENT_NODE */ 9) { return getFrameElement(element); } } return pElement; }; const clamp = (value: number, min: number, max: number): number => { if (value < min) { return min; } if (value > max) { return max; } return value; }; const getSupportedScrollMarginProperty = ( ownerDocument: Document, ): "scroll-margin" | "scroll-snap-margin" | undefined => { // Webkit uses "scroll-snap-margin" https://bugs.webkit.org/show_bug.cgi?id=189265. return (["scroll-margin", "scroll-snap-margin"] as const).filter( (property) => property in ownerDocument.documentElement.style, )[0]; }; const getElementScrollSnapArea = ( element: Element, elementRect: Readonly<DOMRect>, computedStyle: Readonly<CSSStyleDeclaration>, ): [top: number, right: number, bottom: number, left: number] => { const { top, right, bottom, left } = elementRect; const scrollProperty = getSupportedScrollMarginProperty(element.ownerDocument); if (!scrollProperty) { return [top, right, bottom, left]; } const scrollMarginValue = (edge: "top" | "right" | "bottom" | "left"): number => { const value = computedStyle.getPropertyValue(`${scrollProperty}-${edge}`); return parseInt(value, 10) || 0; }; return [ top - scrollMarginValue("top"), right + scrollMarginValue("right"), bottom + scrollMarginValue("bottom"), left - scrollMarginValue("left"), ]; }; const calcAlignEdge = (align: ScrollAlignment, start: number, end: number): number => { switch (align) { case ScrollAlignment.CenterAlways: return (start + end) / 2; case ScrollAlignment.RightOrBottom: return end; case ScrollAlignment.LeftOrTop: case ScrollAlignment.ToEdgeIfNeeded: return start; } }; const getFrameViewport = (frame: Element, frameRect: Readonly<DOMRect>) => { const visualViewport = frame.ownerDocument.defaultView?.visualViewport; const [x, y, width, height] = frame === scrollingElement(frame) ? [0, 0, visualViewport?.width ?? frame.clientWidth, visualViewport?.height ?? frame.clientHeight] : [frameRect.left, frameRect.top, frame.clientWidth, frame.clientHeight]; const left = x + frame.clientLeft; const top = y + frame.clientTop; const right = left + width; const bottom = top + height; return [top, right, bottom, left] as const; }; const computeScrollIntoView = (element: Element, options: ScrollIntoViewOptions): [Element, ScrollToOptions][] => { // Collect all the scrolling boxes, as defined in the spec: https://drafts.csswg.org/cssom-view/#scrolling-box const actions: [Element, ScrollToOptions][] = []; let ownerDocument = element.ownerDocument; let ownerWindow = ownerDocument.defaultView; if (!ownerWindow) { return actions; } const computedStyle = window.getComputedStyle(element); const isLTR = computedStyle.direction !== "rtl"; const writingMode = normalizeWritingMode( computedStyle.writingMode || computedStyle.getPropertyValue("-webkit-writing-mode") || computedStyle.getPropertyValue("-ms-writing-mode"), ); const [alignH, alignV] = toPhysicalAlignment(options, writingMode, isLTR); let [top, right, bottom, left] = getElementScrollSnapArea(element, element.getBoundingClientRect(), computedStyle); for (let frame = parentElement(element); frame !== null; frame = parentElement(frame)) { if (ownerDocument !== frame.ownerDocument) { ownerDocument = frame.ownerDocument; ownerWindow = ownerDocument.defaultView; if (!ownerWindow) { break; } const { left: dX, top: dY } = frame.getBoundingClientRect(); top += dY; right += dX; bottom += dY; left += dX; } const frameStyle = ownerWindow.getComputedStyle(frame); if (frameStyle.position === "fixed") { break; } if (!isScrollable(frame, frameStyle)) { continue; } const frameRect = frame.getBoundingClientRect(); const [frameTop, frameRight, frameBottom, frameLeft] = getFrameViewport(frame, frameRect); const eAlignH = mapNearest(alignH, frameLeft, frameRight, frame.clientWidth, left, right, right - left); const eAlignV = mapNearest(alignV, frameTop, frameBottom, frame.clientHeight, top, bottom, bottom - top); const diffX = eAlignH === null ? 0 : calcAlignEdge(eAlignH, left, right) - calcAlignEdge(eAlignH, frameLeft, frameRight); const diffY = eAlignV === null ? 0 : calcAlignEdge(eAlignV, top, bottom) - calcAlignEdge(eAlignV, frameTop, frameBottom); const moveX = isXReversed(frameStyle) ? clamp(diffX, -frame.scrollWidth + frame.clientWidth - frame.scrollLeft, -frame.scrollLeft) : clamp(diffX, -frame.scrollLeft, frame.scrollWidth - frame.clientWidth - frame.scrollLeft); const moveY = clamp(diffY, -frame.scrollTop, frame.scrollHeight - frame.clientHeight - frame.scrollTop); actions.push([ frame, { left: frame.scrollLeft + moveX, top: frame.scrollTop + moveY, behavior: options.behavior }, ]); top = Math.max(top - moveY, frameTop); right = Math.min(right - moveX, frameRight); bottom = Math.min(bottom - moveY, frameBottom); left = Math.max(left - moveX, frameLeft); } return actions; }; export const scrollIntoView = ( element: Element, scrollIntoViewOptions?: ScrollIntoViewOptions, config?: IScrollConfig, ): void => { const options = scrollIntoViewOptions || {}; if (!checkBehavior(options.behavior)) { throw new TypeError(failedExecuteInvalidEnumValue("scrollIntoView", "Element", options.behavior)); } const actions = computeScrollIntoView(element, options); actions.forEach(([frame, scrollToOptions]) => { elementScroll(frame, scrollToOptions, config); }); }; export const elementScrollIntoView = scrollIntoView;
the_stack
import type { b64string } from '@tanker/crypto'; import { tcrypto, utils } from '@tanker/crypto'; import { InvalidArgument } from '@tanker/errors'; import { _deserializePublicIdentity, _splitProvisionalAndPermanentPublicIdentities, _serializeIdentity, assertTrustchainId } from '../Identity'; import type { PublicPermanentIdentity, PublicProvisionalIdentity } from '../Identity'; import type UserManager from '../Users/Manager'; import type LocalUser from '../LocalUser/LocalUser'; import type ProvisionalIdentityManager from '../ProvisionalIdentity/Manager'; import { getGroupEntryFromBlock, makeUserGroupCreation, makeUserGroupAdditionV3, makeUserGroupRemoval } from './Serialize'; import type { Client } from '../Network/Client'; import type GroupStore from './GroupStore'; import type { InternalGroup, Group } from './types'; import { isInternalGroup } from './types'; import { assertExpectedGroups, assertPublicIdentities, groupsFromEntries } from './ManagerHelper'; type CachedPublicKeysResult = { cachedKeys: Array<Uint8Array>; missingGroupIds: Array<Uint8Array>; }; function checkAddedAndRemoved(permanentIdentitiesToAdd: Array<PublicPermanentIdentity>, permanentIdentitiesToRemove: Array<PublicPermanentIdentity>, provisionalIdentitiesToAdd: Array<PublicProvisionalIdentity>, provisionalIdentitiesToRemove: Array<PublicProvisionalIdentity>) { const addedAndRemovedIdentities: Array<b64string> = []; const userIdsToAdd: Set<b64string> = new Set(); const appSignaturePublicKeysToAdd: Set<b64string> = new Set(); for (const i of permanentIdentitiesToAdd) userIdsToAdd.add(i.value); for (const i of provisionalIdentitiesToAdd) appSignaturePublicKeysToAdd.add(i.public_signature_key); for (const i of permanentIdentitiesToRemove) if (userIdsToAdd.has(i.value)) // @ts-expect-error this field is hidden addedAndRemovedIdentities.push(i.serializedIdentity || _serializeIdentity(i)); for (const i of provisionalIdentitiesToRemove) if (appSignaturePublicKeysToAdd.has(i.public_signature_key)) // @ts-expect-error this field is hidden addedAndRemovedIdentities.push(i.serializedIdentity || _serializeIdentity(i)); if (addedAndRemovedIdentities.length) throw new InvalidArgument(`The identities ${addedAndRemovedIdentities.join(', ')} are both added to and removed from the group.`); } export default class GroupManager { _localUser: LocalUser; _UserManager: UserManager; _provisionalIdentityManager: ProvisionalIdentityManager; _client: Client; _groupStore: GroupStore; constructor( client: Client, groupStore: GroupStore, localUser: LocalUser, userManager: UserManager, provisionalIdentityManager: ProvisionalIdentityManager, ) { this._localUser = localUser; this._UserManager = userManager; this._client = client; this._groupStore = groupStore; this._provisionalIdentityManager = provisionalIdentityManager; } async createGroup(publicIdentities: Array<b64string>): Promise<b64string> { assertPublicIdentities(publicIdentities); const deserializedIdentities = publicIdentities.map(i => _deserializePublicIdentity(i)); assertTrustchainId(deserializedIdentities, this._localUser.trustchainId); const { permanentIdentities, provisionalIdentities } = _splitProvisionalAndPermanentPublicIdentities(deserializedIdentities); const users = await this._UserManager.getUsers(permanentIdentities, { isLight: true }); const provisionalUsers = await this._provisionalIdentityManager.getProvisionalUsers(provisionalIdentities); const groupEncryptionKeyPair = tcrypto.makeEncryptionKeyPair(); const groupSignatureKeyPair = tcrypto.makeSignKeyPair(); const { payload, nature } = makeUserGroupCreation( groupSignatureKeyPair, groupEncryptionKeyPair, users, provisionalUsers, ); const block = this._localUser.makeBlock(payload, nature); await this._client.createGroup({ user_group_creation: block }); const groupId = groupSignatureKeyPair.publicKey; // Only save the key if we are in the group const myUserId = utils.toBase64(this._localUser.userId); if (permanentIdentities.find(i => i.value === myUserId)) await this._groupStore.saveGroupEncryptionKeys([{ groupId, publicEncryptionKey: groupEncryptionKeyPair.publicKey, privateEncryptionKey: groupEncryptionKeyPair.privateKey, }]); return utils.toBase64(groupId); } async updateGroupMembers(groupId: string, publicIdentitiesToAdd: Array<b64string>, publicIdentitiesToRemove: Array<b64string>): Promise<void> { assertPublicIdentities(publicIdentitiesToAdd); assertPublicIdentities(publicIdentitiesToRemove); const internalGroupId = utils.fromBase64(groupId); const existingGroup = await this._getInternalGroupById(internalGroupId); if (!existingGroup) { throw new InvalidArgument('groupId', 'string', groupId); } const { encryptionKeyPairs, lastGroupBlock, signatureKeyPairs } = existingGroup; const deserializedIdentitiesToAdd = publicIdentitiesToAdd.map(i => _deserializePublicIdentity(i)); assertTrustchainId(deserializedIdentitiesToAdd, this._localUser.trustchainId); const { permanentIdentities: permanentIdentitiesToAdd, provisionalIdentities: provisionalIdentitiesToAdd } = _splitProvisionalAndPermanentPublicIdentities(deserializedIdentitiesToAdd); const deserializedIdentitiesToRemove = publicIdentitiesToRemove.map(i => _deserializePublicIdentity(i)); assertTrustchainId(deserializedIdentitiesToRemove, this._localUser.trustchainId); const { permanentIdentities: permanentIdentitiesToRemove, provisionalIdentities: provisionalIdentitiesToRemove } = _splitProvisionalAndPermanentPublicIdentities(deserializedIdentitiesToRemove); checkAddedAndRemoved(permanentIdentitiesToAdd, permanentIdentitiesToRemove, provisionalIdentitiesToAdd, provisionalIdentitiesToRemove); const usersToAdd = await this._UserManager.getUsers(permanentIdentitiesToAdd, { isLight: true }); const provisionalUsersToAdd = await this._provisionalIdentityManager.getProvisionalUsers(provisionalIdentitiesToAdd); const usersToRemove = [...new Set(permanentIdentitiesToRemove.map(u => u.value))].map(uid => utils.fromBase64(uid)); const provisionalUsersToRemove = await this._provisionalIdentityManager.getProvisionalUsers(provisionalIdentitiesToRemove); let additionBlock; let removalBlock; if (publicIdentitiesToAdd.length) { const { payload, nature } = makeUserGroupAdditionV3( internalGroupId, signatureKeyPairs[signatureKeyPairs.length - 1]!.privateKey, lastGroupBlock, encryptionKeyPairs[encryptionKeyPairs.length - 1]!.privateKey, usersToAdd, provisionalUsersToAdd, ); additionBlock = this._localUser.makeBlock(payload, nature); } if (publicIdentitiesToRemove.length) { const { payload, nature } = makeUserGroupRemoval( this._localUser.deviceId, internalGroupId, signatureKeyPairs[signatureKeyPairs.length - 1]!.privateKey, usersToRemove, provisionalUsersToRemove, ); removalBlock = this._localUser.makeBlock(payload, nature); } if (removalBlock) await this._client.softUpdateGroup({ user_group_addition: additionBlock, user_group_removal: removalBlock }); else await this._client.patchGroup({ user_group_addition: additionBlock }); } async getGroupsPublicEncryptionKeys(groupIds: Array<Uint8Array>): Promise<Array<Uint8Array>> { if (groupIds.length === 0) return []; const { cachedKeys, missingGroupIds, } = await this._getCachedGroupsPublicKeys(groupIds); const newKeys = []; if (missingGroupIds.length > 0) { const { histories: blocks } = await this._client.getGroupHistoriesByGroupIds(missingGroupIds); const groups = await this._groupsFromBlocks(blocks); assertExpectedGroups(groups, missingGroupIds); const externalGroupRecords = []; const internalGroupRecords = []; for (const group of groups) { if (isInternalGroup(group)) { for (const encryptionKeyPair of group.encryptionKeyPairs) { internalGroupRecords.push({ groupId: group.groupId, publicEncryptionKey: encryptionKeyPair.publicKey, privateEncryptionKey: encryptionKeyPair.privateKey, }); } } else { externalGroupRecords.push({ groupId: group.groupId, publicEncryptionKey: group.lastPublicEncryptionKey, }); } newKeys.push(group.lastPublicEncryptionKey); } await this._groupStore.saveGroupPublicEncryptionKeys(externalGroupRecords); await this._groupStore.saveGroupEncryptionKeys(internalGroupRecords); } return cachedKeys.concat(newKeys); } async getGroupEncryptionKeyPair(groupPublicEncryptionKey: Uint8Array) { const cachedEncryptionKeyPair = await this._groupStore.findGroupEncryptionKeyPair(groupPublicEncryptionKey); if (cachedEncryptionKeyPair) { return cachedEncryptionKeyPair; } const { histories: blocks } = await this._client.getGroupHistoriesByGroupPublicEncryptionKey(groupPublicEncryptionKey); const groups = await this._groupsFromBlocks(blocks); let result; const internalGroupRecords = []; for (const group of groups) { if (isInternalGroup(group)) { for (const encryptionKeyPair of group.encryptionKeyPairs) { internalGroupRecords.push({ groupId: group.groupId, publicEncryptionKey: encryptionKeyPair.publicKey, privateEncryptionKey: encryptionKeyPair.privateKey, }); if (utils.equalArray(groupPublicEncryptionKey, encryptionKeyPair.publicKey)) { result = encryptionKeyPair; } } } } await this._groupStore.saveGroupEncryptionKeys(internalGroupRecords); if (!result) { throw new InvalidArgument('Current user is not a group member'); } return result; } async _getInternalGroupById(groupId: Uint8Array): Promise<InternalGroup> { const { histories: blocks } = await this._client.getGroupHistoriesByGroupIds([groupId]); const groups = await this._groupsFromBlocks(blocks); assertExpectedGroups(groups, [groupId]); const group = groups[0]!; if (!isInternalGroup(group)) { throw new InvalidArgument('Current user is not a group member'); } return group; } async _groupsFromBlocks(blocks: Array<b64string>): Promise<Array<Group>> { if (blocks.length === 0) { return []; } const entries = blocks.map(block => getGroupEntryFromBlock(block)); const deviceIds = entries.map(entry => entry.author); const devicePublicSignatureKeyMap = await this._UserManager.getDeviceKeysByDevicesIds(deviceIds, { isLight: true }); return groupsFromEntries(entries, devicePublicSignatureKeyMap, this._localUser, this._provisionalIdentityManager); } async _getCachedGroupsPublicKeys(groupsIds: Array<Uint8Array>): Promise<CachedPublicKeysResult> { const cachePublicKeys = await this._groupStore.findGroupsPublicKeys(groupsIds); const missingGroupIds = []; const isGroupInCache: Record<b64string, boolean> = {}; for (const groupId of groupsIds) { isGroupInCache[utils.toBase64(groupId)] = false; } for (const group of cachePublicKeys) { isGroupInCache[utils.toBase64(group.groupId)] = true; } for (const groupId of Object.keys(isGroupInCache)) { if (!isGroupInCache[groupId]) { missingGroupIds.push(utils.fromBase64(groupId)); } } return { cachedKeys: cachePublicKeys.map(r => r.publicEncryptionKey), missingGroupIds, }; } }
the_stack
import { Kind } from 'graphql/language'; import { GraphQLNonNegativeFloat } from '../src/scalars/NonNegativeFloat'; import { GraphQLUnsignedFloat } from '../src/scalars/UnsignedFloat'; describe('NonNegativeFloat', () => { describe('valid', () => { describe('greater than zero', () => { describe('as float', () => { test('serialize', () => { expect(GraphQLNonNegativeFloat.serialize(123.45)).toBe(123.45); }); test('parseValue', () => { expect(GraphQLNonNegativeFloat.parseValue(123.45)).toBe(123.45); }); test('parseLiteral', () => { expect( GraphQLNonNegativeFloat.parseLiteral( { value: '123.45', kind: Kind.FLOAT }, {}, ), ).toBe(123.45); }); }); describe('as string', () => { test('serialize', () => { expect(GraphQLNonNegativeFloat.serialize('123.45')).toBe(123.45); }); test('parseValue', () => { expect(GraphQLNonNegativeFloat.parseValue('123.45')).toBe(123.45); }); test('parseLiteral', () => { expect( GraphQLNonNegativeFloat.parseLiteral( { value: '123.45', kind: Kind.FLOAT, }, {}, ), ).toBe(123.45); }); }); }); describe('zero', () => { describe('as float', () => { test('serialize', () => { expect(GraphQLNonNegativeFloat.serialize(0.0)).toBe(0.0); }); test('parseValue', () => { expect(GraphQLNonNegativeFloat.parseValue(0.0)).toBe(0.0); }); test('parseLiteral', () => { expect( GraphQLNonNegativeFloat.parseLiteral( { value: '0.0', kind: Kind.FLOAT }, {}, ), ).toBe(0.0); }); }); describe('as string', () => { test('serialize', () => { expect(GraphQLNonNegativeFloat.serialize('0.0')).toBe(0.0); }); test('parseValue', () => { expect(GraphQLNonNegativeFloat.parseValue('0.0')).toBe(0.0); }); test('parseLiteral', () => { expect( GraphQLNonNegativeFloat.parseLiteral( { value: '0.0', kind: Kind.FLOAT }, {}, ), ).toBe(0.0); }); }); }); }); describe('invalid', () => { describe('null', () => { test('serialize', () => { expect(() => GraphQLNonNegativeFloat.serialize(null)).toThrow( /Value is not a number/, ); }); test('parseValue', () => { expect(() => GraphQLNonNegativeFloat.parseValue(null)).toThrow( /Value is not a number/, ); }); test('parseLiteral', () => { expect(() => GraphQLNonNegativeFloat.parseLiteral( { value: null, kind: Kind.FLOAT }, {}, ), ).toThrow(/Value is not a number/); }); }); describe('undefined', () => { test('serialize', () => { expect(() => GraphQLNonNegativeFloat.serialize(undefined)).toThrow( /Value is not a number/, ); }); // FIXME: Does nothing. No throw. Call doesn't even seem to get to the parseValue() function. // test('parseValue', () => { // expect(() => GraphQLNonNegativeFloat.parseValue(undefined)).toThrow( // /Value is not a number/, // ); // }); test('parseLiteral', () => { expect(() => GraphQLNonNegativeFloat.parseLiteral( { value: undefined, kind: Kind.FLOAT }, {}, ), ).toThrow(/Value is not a number/); }); }); describe('less than zero', () => { describe('as float', () => { test('serialize', () => { expect(() => GraphQLNonNegativeFloat.serialize(-1.0)).toThrow( /Value is not a non-negative number/, ); }); test('parseValue', () => { expect(() => GraphQLNonNegativeFloat.parseValue(-1.0)).toThrow( /Value is not a non-negative number/, ); }); test('parseLiteral', () => { expect(() => GraphQLNonNegativeFloat.parseLiteral( { value: '-1.0', kind: Kind.FLOAT }, {}, ), ).toThrow(/Value is not a non-negative number/); }); }); describe('as string', () => { test('serialize', () => { expect(() => GraphQLNonNegativeFloat.serialize('-1.0')).toThrow( /Value is not a non-negative number/, ); }); test('parseValue', () => { expect(() => GraphQLNonNegativeFloat.parseValue('-1.0')).toThrow( /Value is not a non-negative number/, ); }); test('parseLiteral', () => { expect(() => GraphQLNonNegativeFloat.parseLiteral( { value: '-1.0', kind: Kind.FLOAT }, {}, ), ).toThrow(/Value is not a non-negative number/); }); }); }); describe('infinity', () => { test('serialize', () => { expect(() => GraphQLNonNegativeFloat.serialize(Number.POSITIVE_INFINITY), ).toThrow(/Value is not a finite number/); }); test('parseValue', () => { expect(() => GraphQLNonNegativeFloat.parseValue(Number.POSITIVE_INFINITY), ).toThrow(/Value is not a finite number/); }); test('parseLiteral', () => { expect(() => GraphQLNonNegativeFloat.parseLiteral( { value: Number.POSITIVE_INFINITY.toString(), kind: Kind.FLOAT, }, {}, ), ).toThrow(/Value is not a finite number/); }); }); describe('not a number', () => { test('serialize', () => { expect(() => GraphQLNonNegativeFloat.serialize('not a number')).toThrow( /Value is not a number/, ); }); test('parseValue', () => { expect(() => GraphQLNonNegativeFloat.parseValue('not a number'), ).toThrow(/Value is not a number/); }); test('parseLiteral', () => { expect(() => GraphQLNonNegativeFloat.parseLiteral( { value: 'not a number', kind: Kind.STRING, }, {}, ), ).toThrow( /Can only validate floating point numbers as non-negative floating point numbers but got a/, ); }); }); describe('NaN', () => { test('serialize', () => { expect(() => GraphQLNonNegativeFloat.serialize(Number.NaN)).toThrow( /Value is not a number/, ); }); // FIXME: Does nothing. No throw. Call doesn't even seem to get to the parseValue() function. // test('parseValue', () => { // expect(() => GraphQLNonNegativeFloat.parseValue(Number.NaN)).toThrow( // /Value is not a number/, // ); // }); test('parseLiteral', () => { expect(() => GraphQLNonNegativeFloat.parseLiteral( { value: Number.NaN.toString(), kind: Kind.STRING, }, {}, ), ).toThrow( /Can only validate floating point numbers as non-negative floating point numbers but got a/, ); }); }); }); }); describe('UnsignedFloat', () => { describe('valid', () => { describe('greater than zero', () => { describe('as float', () => { test('serialize', () => { expect(GraphQLUnsignedFloat.serialize(123.45)).toBe(123.45); }); test('parseValue', () => { expect(GraphQLUnsignedFloat.parseValue(123.45)).toBe(123.45); }); test('parseLiteral', () => { expect( GraphQLUnsignedFloat.parseLiteral( { value: '123.45', kind: Kind.FLOAT }, {}, ), ).toBe(123.45); }); }); describe('as string', () => { test('serialize', () => { expect(GraphQLUnsignedFloat.serialize('123.45')).toBe(123.45); }); test('parseValue', () => { expect(GraphQLUnsignedFloat.parseValue('123.45')).toBe(123.45); }); test('parseLiteral', () => { expect( GraphQLUnsignedFloat.parseLiteral( { value: '123.45', kind: Kind.FLOAT, }, {}, ), ).toBe(123.45); }); }); }); describe('zero', () => { describe('as float', () => { test('serialize', () => { expect(GraphQLUnsignedFloat.serialize(0.0)).toBe(0.0); }); test('parseValue', () => { expect(GraphQLUnsignedFloat.parseValue(0.0)).toBe(0.0); }); test('parseLiteral', () => { expect( GraphQLUnsignedFloat.parseLiteral( { value: '0.0', kind: Kind.FLOAT }, {}, ), ).toBe(0.0); }); }); describe('as string', () => { test('serialize', () => { expect(GraphQLUnsignedFloat.serialize('0.0')).toBe(0.0); }); test('parseValue', () => { expect(GraphQLUnsignedFloat.parseValue('0.0')).toBe(0.0); }); test('parseLiteral', () => { expect( GraphQLUnsignedFloat.parseLiteral( { value: '0.0', kind: Kind.FLOAT }, {}, ), ).toBe(0.0); }); }); }); }); describe('invalid', () => { describe('null', () => { test('serialize', () => { expect(() => GraphQLUnsignedFloat.serialize(null)).toThrow( /Value is not a number/, ); }); test('parseValue', () => { expect(() => GraphQLUnsignedFloat.parseValue(null)).toThrow( /Value is not a number/, ); }); test('parseLiteral', () => { expect(() => GraphQLUnsignedFloat.parseLiteral( { value: null, kind: Kind.FLOAT }, {}, ), ).toThrow(/Value is not a number/); }); }); describe('undefined', () => { test('serialize', () => { expect(() => GraphQLUnsignedFloat.serialize(undefined)).toThrow( /Value is not a number/, ); }); // FIXME: Does nothing. No throw. Call doesn't even seem to get to the parseValue() function. // test('parseValue', () => { // expect(() => GraphQLUnsignedFloat.parseValue(undefined)).toThrow( // /Value is not a number/, // ); // }); test('parseLiteral', () => { expect(() => GraphQLUnsignedFloat.parseLiteral( { value: undefined, kind: Kind.FLOAT }, {}, ), ).toThrow(/Value is not a number/); }); }); describe('less than zero', () => { describe('as float', () => { test('serialize', () => { expect(() => GraphQLUnsignedFloat.serialize(-1.0)).toThrow( /Value is not a non-negative number/, ); }); test('parseValue', () => { expect(() => GraphQLUnsignedFloat.parseValue(-1.0)).toThrow( /Value is not a non-negative number/, ); }); test('parseLiteral', () => { expect(() => GraphQLUnsignedFloat.parseLiteral( { value: '-1.0', kind: Kind.FLOAT }, {}, ), ).toThrow(/Value is not a non-negative number/); }); }); describe('as string', () => { test('serialize', () => { expect(() => GraphQLUnsignedFloat.serialize('-1.0')).toThrow( /Value is not a non-negative number/, ); }); test('parseValue', () => { expect(() => GraphQLUnsignedFloat.parseValue('-1.0')).toThrow( /Value is not a non-negative number/, ); }); test('parseLiteral', () => { expect(() => GraphQLUnsignedFloat.parseLiteral( { value: '-1.0', kind: Kind.FLOAT }, {}, ), ).toThrow(/Value is not a non-negative number/); }); }); }); describe('infinity', () => { test('serialize', () => { expect(() => GraphQLUnsignedFloat.serialize(Number.POSITIVE_INFINITY), ).toThrow(/Value is not a finite number/); }); test('parseValue', () => { expect(() => GraphQLUnsignedFloat.parseValue(Number.POSITIVE_INFINITY), ).toThrow(/Value is not a finite number/); }); test('parseLiteral', () => { expect(() => GraphQLUnsignedFloat.parseLiteral( { value: Number.POSITIVE_INFINITY.toString(), kind: Kind.FLOAT, }, {}, ), ).toThrow(/Value is not a finite number/); }); }); describe('not a number', () => { test('serialize', () => { expect(() => GraphQLUnsignedFloat.serialize('not a number')).toThrow( /Value is not a number/, ); }); test('parseValue', () => { expect(() => GraphQLUnsignedFloat.parseValue('not a number')).toThrow( /Value is not a number/, ); }); test('parseLiteral', () => { expect(() => GraphQLUnsignedFloat.parseLiteral( { value: 'not a number', kind: Kind.STRING, }, {}, ), ).toThrow( /Can only validate floating point numbers as non-negative floating point numbers but got a/, ); }); }); describe('NaN', () => { test('serialize', () => { expect(() => GraphQLUnsignedFloat.serialize(Number.NaN)).toThrow( /Value is not a number/, ); }); // FIXME: Does nothing. No throw. Call doesn't even seem to get to the parseValue() function. // test('parseValue', () => { // expect(() => GraphQLUnsignedFloat.parseValue(Number.NaN)).toThrow( // /Value is not a number/, // ); // }); test('parseLiteral', () => { expect(() => GraphQLUnsignedFloat.parseLiteral( { value: Number.NaN.toString(), kind: Kind.STRING, }, {}, ), ).toThrow( /Can only validate floating point numbers as non-negative floating point numbers but got a/, ); }); }); }); });
the_stack
'use strict'; import { OidcProfile, WickedApiScopes, WickedUserInfo, WickedPool, WickedSubscriptionInfo, WickedApi } from "wicked-sdk"; import { AuthRequest, ValidatedScopes, TokenRequest, AccessTokenCallback, AuthResponse, OAuth2Request, AccessToken } from "./types"; const async = require('async'); const { debug, info, warn, error } = require('portal-env').Logger('portal-auth:utils-oauth2'); import * as wicked from 'wicked-sdk'; const request = require('request'); import { failMessage, failError, failOAuth, makeError, makeOAuthError } from './utils-fail'; import { profileStore } from './profile-store'; import { utils } from './utils'; import { oauth2 } from '../kong-oauth2/oauth2'; import { WickedSubscriptionScopeModeType } from "wicked-sdk/dist/interfaces"; export class UtilsOAuth2 { constructor() { debug(`UtilsOAuth2()`); } private _apiScopes: { [apiId: string]: WickedApiScopes } = {}; public getApiScopes = async (apiId: string) => { debug(`getApiScopes(${apiId})`); const instance = this; // Check cache first if (this._apiScopes[apiId]) return this._apiScopes[apiId]; debug('getApiScopes: Not present in cache, fetching.'); const apiInfo = await wicked.getApi(apiId) as WickedApi; if (!apiInfo || !apiInfo.settings) throw new Error(`API ${apiId} does not have settings section`); debug('getApiScopes: Succeeded, storing.'); debug('api.settings.scopes: ' + JSON.stringify(apiInfo.settings.scopes)); instance._apiScopes[apiId] = apiInfo.settings.scopes || {}; return instance._apiScopes[apiId]; }; public validateAuthorizeRequest = async (authRequest: AuthRequest): Promise<WickedSubscriptionInfo> => { const instance = this; debug(`validateAuthorizeRequest(${authRequest})`); if (authRequest.response_type !== 'token' && authRequest.response_type !== 'code') throw makeError(`Invalid response_type ${authRequest.response_type}`, 400); if (!authRequest.client_id) throw makeError('Invalid or empty client_id.', 400); let subscriptionInfo: WickedSubscriptionInfo; try { subscriptionInfo = await instance.validateSubscription(authRequest); } catch (err) { // Otherwise this would return a JSON instead of a HTML error page. // See https://github.com/Haufe-Lexware/wicked.haufe.io/issues/137 delete err.oauthError; throw err; } const application = subscriptionInfo.application; if (!application.redirectUri) throw makeError('The application associated with the given client_id does not have a registered redirect_uri.', 400); if (authRequest.redirect_uri) { // Verify redirect_uri from application, has to match what is passed in const uri1 = utils.normalizeRedirectUri(authRequest.redirect_uri); let registeredUris = ''; let foundMatching = false; for (let i = 0; i < subscriptionInfo.application.redirectUris.length; ++i) { const uri2 = utils.normalizeRedirectUri(subscriptionInfo.application.redirectUris[i]); if (uri1 === uri2) { foundMatching = true; debug(`Found matching redirect_uri: ${uri2}`); } if (registeredUris) { registeredUris += ', '; } registeredUris += uri2; } if (!foundMatching) { error(`Received redirect_uri: ${uri1}`); error(`Received redirect_uri is not any of ${registeredUris}`); throw makeError('The provided redirect_uri does not match any registered redirect_uri', 400); } } else { // https://tools.ietf.org/html/rfc6749#section-4.1.1 // We will pick one (the first one in case we have multiple ones) authRequest.redirect_uri = subscriptionInfo.application.redirectUri; } // Now we have a redirect_uri; we can now make use of failOAuth // Check for PKCE for public apps using the authorization code grant if (authRequest.response_type === 'code' && application.confidential !== true) { if (!authRequest.code_challenge) throw makeError('the given client is a public client; it must present a code_challenge (PKCE, RFC7636) to use the authorization code grant.', 400); if (!authRequest.code_challenge_method) authRequest.code_challenge_method = 'plain'; // Default if (authRequest.code_challenge_method !== 'plain' && authRequest.code_challenge_method !== 'S256') throw makeError('unsupported code_challenge_method; expected "plain" or "S256".', 400); } // Success return subscriptionInfo; }; public validateSubscription = async (oauthRequest: OAuth2Request): Promise<WickedSubscriptionInfo> => { debug('validateSubscription()'); try { const subsInfo = await wicked.getSubscriptionByClientId(oauthRequest.client_id, oauthRequest.api_id) as WickedSubscriptionInfo; // Do we have a trusted subscription? let trusted = false; if (subsInfo.subscription && subsInfo.subscription.trusted) { debug('validateAuthorizeRequest: Trusted subscription detected.'); // Yes, note that in the authRequest trusted = true; } if (!subsInfo.application || !subsInfo.application.id) throw makeOAuthError(500, 'server_error', 'Subscription information does not contain a valid application id'); subsInfo.subscription.trusted = trusted; oauthRequest.app_id = subsInfo.application.id; oauthRequest.app_name = subsInfo.application.name; return subsInfo; } catch (err) { throw makeOAuthError(400, 'invalid_request', 'could not validate client_id and API subscription', err); } }; public validateApiScopes = async (apiId: string, scope: string, subscriptionInfo: WickedSubscriptionInfo): Promise<ValidatedScopes> => { debug(`validateApiScopes(${apiId}, ${scope})`); const instance = this; const subIsTrusted = subscriptionInfo.subscription.trusted; const apiScopes = await instance.getApiScopes(apiId); // const apiInfo = await utils.getApiInfoAsync(apiId); let requestScope = scope; if (!requestScope) { debug('validateApiScopes: No scopes requested.'); requestScope = ''; } let scopes = [] as string[]; if (requestScope) { if (requestScope.indexOf(' ') > 0) scopes = requestScope.split(' '); else if (requestScope.indexOf(',') > 0) scopes = requestScope.split(','); else if (requestScope.indexOf(';') > 0) scopes = requestScope.split(';') else scopes = [requestScope]; debug(scopes); } else { scopes = []; } const validatedScopes = [] as string[]; // Pass upstream if we changed the scopes (e.g. for a trusted application) let scopeDiffers = false; debug('validateApiScopes: Trusted subscription.'); // No scopes requested? Default to all scopes. if (subIsTrusted && scopes.length === 0) { // apiScopes is a map of scopes for (let aScope in apiScopes) { validatedScopes.push(aScope); } scopeDiffers = true; } else { debug('validateApiScopes: Non-trusted subscription, or scope passed in.'); const validScopes = []; for (let i = 0; i < scopes.length; ++i) { const thisScope = scopes[i]; if (!apiScopes[thisScope]) throw makeError(`Invalid or unknown scope "${thisScope}".`, 400); validScopes.push(thisScope); } // Now check for allowed scopes const allowedScopesMode = subscriptionInfo.subscription.allowedScopesMode; debug(`Allowed scopes mode: ${allowedScopesMode}, allowed scopes: ${subscriptionInfo.subscription.allowedScopes.toString()}`); let allowedScopes: any = {}; if (allowedScopesMode === WickedSubscriptionScopeModeType.All) allowedScopes = apiScopes; else if (subscriptionInfo.subscription.allowedScopesMode === WickedSubscriptionScopeModeType.None) allowedScopes = {}; else if (subscriptionInfo.subscription.allowedScopesMode === WickedSubscriptionScopeModeType.Select) subscriptionInfo.subscription.allowedScopes.forEach(s => allowedScopes[s] = s); // Above we checked whether the scopes which were requested are part of the API definition. // Here we check whether there is information on the subscription whether a specific scope is allowed // for a specific application/subscription. The auth server will not *fail* if there are non-allowed // scopes, but rather just strip them off, and return a reduced scope. for (let i = 0; i < validScopes.length; ++i) { const thisScope = scopes[i]; if (!allowedScopes[thisScope]) { debug(`Filtering out non-allowed scope ${thisScope}`); scopeDiffers = true; } else validatedScopes.push(thisScope); } } debug(`validated Scopes: ${validatedScopes}`); return { scopeDiffers: scopeDiffers, validatedScopes: validatedScopes }; }; public makeTokenRequest(req, apiId: string, authMethodId: string): TokenRequest { // Gather parameters from body. Note that not all parameters // are used in all flows. const tokenRequest = { api_id: apiId, auth_method: req.app.get('server_name') + ':' + authMethodId, grant_type: req.body.grant_type, code: req.body.code, //redirect_uri: req.body.redirect_uri, client_id: req.body.client_id, client_secret: req.body.client_secret, scope: req.body.scope, username: req.body.username, password: req.body.password, refresh_token: req.body.refresh_token, // PKCE code_verifier: req.body.code_verifier }; if (!tokenRequest.client_id) { // Check for Basic Auth const authHeader = req.get('Authorization'); if (authHeader) { let basicAuth = authHeader; if (authHeader.toLowerCase().startsWith('basic')) { const spacePos = authHeader.indexOf(' '); basicAuth = authHeader.substring(spacePos + 1); } // Try to decode base 64 to get client_id and client_secret try { const idAndSecret = utils.decodeBase64(basicAuth); // client_id:client_secret const colonIndex = idAndSecret.indexOf(':'); if (colonIndex > 0) { tokenRequest.client_id = idAndSecret.substring(0, colonIndex); tokenRequest.client_secret = idAndSecret.substring(colonIndex + 1); } else { warn('makeTokenRequest: Received invalid client_id and client_secret in as Basic Auth') } } catch (err) { error('Received Basic Auth credentials, but they are invalid') error(err); } } } return tokenRequest; }; public validateTokenRequest = async (tokenRequest: TokenRequest) => { debug(`validateTokenRequest(${tokenRequest})`); if (!tokenRequest.grant_type) throw makeOAuthError(400, 'invalid_request', 'grant_type is missing.'); // Different for different grant_types if (tokenRequest.grant_type === 'client_credentials') { if (!tokenRequest.client_id) throw makeOAuthError(400, 'invalid_client', 'client_id is missing.'); if (!tokenRequest.client_secret) throw makeOAuthError(400, 'invalid_client', 'client_secret is missing.'); return; } else if (tokenRequest.grant_type === 'authorization_code') { if (!tokenRequest.code) throw makeOAuthError(400, 'invalid_request', 'code is missing.'); if (!tokenRequest.client_id) throw makeOAuthError(400, 'invalid_client', 'client_id is missing.'); if (!tokenRequest.client_secret && !tokenRequest.code_verifier) throw makeOAuthError(400, 'invalid_client', 'client_secret or code_verifier is missing.'); } else if (tokenRequest.grant_type === 'password') { if (!tokenRequest.client_id) throw makeOAuthError(400, 'invalid_client', 'client_id is missing.'); // For confidential clients, the client_secret will also be checked (by the OAuth2 adapter) if (!tokenRequest.username) throw makeOAuthError(400, 'invalid_request', 'username is missing.'); if (!tokenRequest.username) throw makeOAuthError(400, 'invalid_request', 'password is missing.'); // TODO: scopes } else if (tokenRequest.grant_type === 'refresh_token') { if (!tokenRequest.client_id) throw makeOAuthError(400, 'invalid_client', 'client_id is missing.'); // For confidential clients, the client_secret will also be checked (by the OAuth2 adapter) if (!tokenRequest.refresh_token) throw makeOAuthError(400, 'invalid_request', 'refresh_token is missing.'); } else { throw makeOAuthError(400, 'unsupported_grant_type', `The grant_type '${tokenRequest.grant_type}' is not supported or is unknown.`); } return; }; public tokenClientCredentials = async (tokenRequest: TokenRequest): Promise<AccessToken> => { debug('tokenClientCredentials()'); const instance = this; const subscriptionInfo = await instance.validateSubscription(tokenRequest); const scopeInfo = await instance.validateApiScopes(tokenRequest.api_id, tokenRequest.scope, subscriptionInfo); tokenRequest.scope = scopeInfo.validatedScopes; tokenRequest.scope_differs = scopeInfo.scopeDiffers; // We can just pass this on to the wicked SDK. return await oauth2.tokenAsync(tokenRequest); }; public tokenAuthorizationCode = async (tokenRequest: TokenRequest): Promise<AccessToken> => { const instance = this; return new Promise<AccessToken>(function (resolve, reject) { instance.tokenAuthorizationCode_(tokenRequest, function (err, accessToken) { err ? reject(err) : resolve(accessToken); }) }); } private tokenAuthorizationCode_ = (tokenRequest: TokenRequest, callback: AccessTokenCallback) => { debug('tokenAuthorizationCode()'); profileStore.retrieve(tokenRequest.code, (err, profile) => { if (err) return callback(err); if (!profile) return callback(makeOAuthError(401, 'invalid_grant', 'invalid authorization code, could not retrieve information on code')); tokenRequest.code_challenge = profile.code_challenge; tokenRequest.code_challenge_method = profile.code_challenge_method; tokenRequest.scope_differs = profile.scope_differs; delete profile.code_challenge; delete profile.code_challenge_method; // We can just pass this on to the wicked SDK, and then register the token. oauth2.token(tokenRequest, (err, accessToken) => { if (err) return callback(err); accessToken.session_data = profile; // We now have to register the access token with the profile // Also delete the code from the redis, it's not needed anymore async.parallel({ deleteToken: (callback) => { // We'll ignore what happens here. profileStore.deleteTokenOrCode(tokenRequest.code); return callback(null); }, updateToken: (callback) => { profileStore.registerTokenOrCode(accessToken, tokenRequest.api_id, profile, (err) => { if (err) return callback(err); return callback(null, accessToken); }); } }, (err, results) => { if (err) return callback(err); return callback(null, accessToken); }); }); }); } public getProfile(req, res, next) { debug(`/profile`); // OIDC profile end point, we need this. This is nice. Yeah. // res.status(500).json({ message: 'Not yet implemented.' }); const bearerToken = req.get('authorization'); if (!bearerToken) return failMessage(401, 'Unauthorized', next); let accessToken = null; if (bearerToken.indexOf(' ') > 0) { // assume Bearer xxx let tokenSplit = bearerToken.split(' '); if (tokenSplit.length !== 2) return failOAuth(400, 'invalid_request', 'Invalid Bearer token.', next); accessToken = bearerToken.split(' ')[1]; } else { // Assume without "Bearer", just the access token accessToken = bearerToken; } accessToken = accessToken.trim(); // Read from profile store. profileStore.retrieve(accessToken, (err, profile) => { if (err || !profile) return failOAuth(404, 'invalid_request', 'Not found', next); return res.status(200).json(profile); }); } public wickedUserInfoToOidcProfile(userInfo: WickedUserInfo): OidcProfile { debug('wickedUserInfoToOidcProfile()'); // Simple mapping to some basic OIDC profile claims const oidcProfile = { sub: userInfo.id, email: userInfo.email, email_verified: userInfo.validated }; return oidcProfile; }; public makeOidcProfile = (poolId: string, authResponse: AuthResponse, regInfo, callback) => { debug(`makeOidcProfile(${poolId}, ${authResponse.userId})`); const userId = authResponse.userId; const instance = this; // OK; we might be able to get the information from somewhere else, but let's keep // it simple. async.parallel({ userInfo: callback => wicked.getUser(userId, callback), poolInfo: callback => utils.getPoolInfo(poolId, callback) }, function (err, results) { if (err) return callback(err); const userInfo = results.userInfo as WickedUserInfo; const poolInfo = results.poolInfo as WickedPool; const profile = instance.wickedUserInfoToOidcProfile(userInfo); // Now let's see what we can map from the registration for (let i = 0; i < poolInfo.properties.length; ++i) { const propInfo = poolInfo.properties[i]; const propName = propInfo.id; if (!regInfo[propName]) continue; // If the property doesn't include a mapping to an OIDC claim, we can't use it if (!propInfo.oidcClaim) continue; // Now assign the value to the OIDC claim in the profile profile[propInfo.oidcClaim] = regInfo[propName]; } debug('makeOidcProfile() assembled the following profile:'); debug(profile); return callback(null, profile); }); } }; export const utilsOAuth2 = new UtilsOAuth2();
the_stack
// launchAsync.ts needs to read VERSION (which is passed in from package.json via webpack) (window as any).VERSION = '000.000.000'; import { launchAsync } from '../src/immersive-reader-sdk'; import { isValidSubdomain } from '../src/launchAsync'; import { Content } from '../src/content'; import { CookiePolicy, Options } from '../src/options'; describe('launchAsync tests', () => { const SampleToken: string = 'not-a-real-token'; const SampleSubdomain: string = 'not-a-real-subdomain'; const SampleContent: Content = { chunks: [ { content: 'Hello, world' } ] }; it('fails due to missing token', async () => { expect.assertions(1); try { await launchAsync(null, SampleSubdomain, SampleContent); } catch (error) { expect(error.code).toBe('BadArgument'); } }); it('fails due to missing subdomain', async () => { expect.assertions(1); try { await launchAsync(SampleToken, null, SampleContent); } catch (error) { expect(error.code).toBe('InvalidSubdomain'); } }); it('fails due to missing content', async () => { expect.assertions(1); try { await launchAsync(SampleToken, SampleSubdomain, null); } catch (error) { expect(error.code).toBe('BadArgument'); } }); it('fails due to missing chunks', async () => { expect.assertions(1); try { await launchAsync(SampleToken, SampleSubdomain, { chunks: null }); } catch (error) { expect(error.code).toBe('BadArgument'); } }); it('fails due to empty chunks', async () => { expect.assertions(1); try { await launchAsync(SampleToken, SampleSubdomain, { chunks: [] }); } catch (error) { expect(error.code).toBe('BadArgument'); } }); it('succeeds', () => { expect.assertions(1); const launchPromise = launchAsync(SampleToken, SampleSubdomain, SampleContent) .then(iframe => { expect(iframe).not.toBeNull(); }); // launchAsync creates an iframe which points to the Immersive Reader, // which then sends a postMessage to the parent window with the message // 'ImmersiveReader-LaunchResponse:{"success":true}'. This mocks that behavior. window.postMessage('ImmersiveReader-LaunchResponse:{"success":true}', '*'); return launchPromise; }); it('sets the display language', async () => { expect.assertions(1); const options: Options = { uiLang: 'zh-Hans' }; const launchPromise = launchAsync(SampleToken, SampleSubdomain, SampleContent, options); window.postMessage('ImmersiveReader-LaunchResponse:{"success":true}', '*'); const response = await launchPromise; const iframe = <HTMLIFrameElement>response.container.firstElementChild; expect(iframe.src.toLowerCase()).toMatch('omkt=zh-hans'); }); it('without setting the display language', async () => { expect.assertions(1); const options: Options = { uiLang: '' }; const launchPromise = launchAsync(SampleToken, SampleSubdomain, SampleContent, options); window.postMessage('ImmersiveReader-LaunchResponse:{"success":true}', '*'); const response = await launchPromise; const iframe = <HTMLIFrameElement>response.container.firstElementChild; expect(iframe.src).not.toContain('&omkt='); }); it('sets the z-index of the iframe', async () => { const zIndex = 12345; expect.assertions(1); const options: Options = { uiZIndex: zIndex }; const launchPromise = launchAsync(SampleToken, SampleSubdomain, SampleContent, options); window.postMessage('ImmersiveReader-LaunchResponse:{"success":true}', '*'); const response = await launchPromise; expect(response.container.style.zIndex).toEqual('' + zIndex); }); it('launches with default z-index', async () => { expect.assertions(1); const launchPromise = launchAsync(SampleToken, SampleSubdomain, SampleContent); window.postMessage('ImmersiveReader-LaunchResponse:{"success":true}', '*'); const response = await launchPromise; expect(response.container.style.zIndex).toEqual('1000'); // Default is 1000; }); it('launches with a webview tag instead of an iframe', async () => { expect.assertions(1); const options: Options = { useWebview: true }; const launchPromise = launchAsync(SampleToken, SampleSubdomain, SampleContent, options); window.postMessage('ImmersiveReader-LaunchResponse:{"success":true}', '*'); const response = await launchPromise; const firstElementTagName = response.container.firstElementChild.tagName; expect(firstElementTagName.toLowerCase()).toBe("webview"); }); it('fails to launch due to timeout', async () => { jest.useFakeTimers(); expect.assertions(1); const launchPromise = launchAsync(SampleToken, SampleSubdomain, SampleContent); // Skip forward in time to trigger timeout logic jest.runAllTimers(); try { await launchPromise; } catch (error) { expect(error.code).toBe('Timeout'); } }); it('fails to launch due to expired token', async () => { expect.assertions(2); const launchPromise = launchAsync(SampleToken, SampleSubdomain, SampleContent); window.postMessage('ImmersiveReader-LaunchResponse:{"success":false, "errorCode":"TokenExpired"}', '*'); try { await launchPromise; } catch (error) { expect(error.code).toBe('TokenExpired'); expect(error.message).toBe('The access token supplied is expired.'); } }); it('launches with a custom subdomain', async () => { expect.assertions(1); const options: Options = { customDomain: 'https://foo.com/' }; const launchPromise = launchAsync(SampleToken, null, SampleContent, options); window.postMessage('ImmersiveReader-LaunchResponse:{"success":true}', '*'); const response = await launchPromise; const iframe = <HTMLIFrameElement>response.container.firstElementChild; expect(iframe.src.toLowerCase()).toMatch('https://foo.com/'); }); it('launches with a custom subdomain 2', async () => { expect.assertions(1); const options: Options = { customDomain: '' }; const launchPromise = launchAsync(SampleToken, SampleSubdomain, SampleContent, options); window.postMessage('ImmersiveReader-LaunchResponse:{"success":true}', '*'); const response = await launchPromise; const iframe = <HTMLIFrameElement>response.container.firstElementChild; expect(iframe.src.toLowerCase()).toContain(`https://${SampleSubdomain}.cognitiveservices.azure.com/immersivereader/webapp/v1.0/reader`); }); it('launches with exit button hidden', async () => { expect.assertions(1); const options: Options = { hideExitButton: true }; const launchPromise = launchAsync(SampleToken, SampleSubdomain, SampleContent, options); window.postMessage('ImmersiveReader-LaunchResponse:{"success":true}', '*'); const response = await launchPromise; const iframe = <HTMLIFrameElement>response.container.firstElementChild; expect(iframe.src).toContain('&hideExitButton=true'); }); it('launches with exit button displayed', async () => { expect.assertions(1); const options: Options = { hideExitButton: false }; const launchPromise = launchAsync(SampleToken, SampleSubdomain, SampleContent, options); window.postMessage('ImmersiveReader-LaunchResponse:{"success":true}', '*'); const response = await launchPromise; const iframe = <HTMLIFrameElement>response.container.firstElementChild; expect(iframe.src).not.toContain('&hideExitButton=true'); }); it('launches with full screen button displayed', async () => { expect.assertions(1); const options: Options = { allowFullscreen: true }; const launchPromise = launchAsync(SampleToken, SampleSubdomain, SampleContent, options); window.postMessage('ImmersiveReader-LaunchResponse:{"success":true}', '*'); const response = await launchPromise; const iframe = <HTMLIFrameElement>response.container.firstElementChild; expect(iframe.getAttribute('allowfullscreen')).not.toBeNull(); }); it('launches with full screen button hidden', async () => { expect.assertions(1); const options: Options = { allowFullscreen: false }; const launchPromise = launchAsync(SampleToken, SampleSubdomain, SampleContent, options); window.postMessage('ImmersiveReader-LaunchResponse:{"success":true}', '*'); const response = await launchPromise; const iframe = <HTMLIFrameElement>response.container.firstElementChild; expect(iframe.getAttribute('allowfullscreen')).toBeNull(); }); it('launches with Cookie Policy enabled', async () => { expect.assertions(1); const options: Options = { cookiePolicy: CookiePolicy.Enable }; const launchPromise = launchAsync(SampleToken, SampleSubdomain, SampleContent, options); window.postMessage('ImmersiveReader-LaunchResponse:{"success":true}', '*'); const response = await launchPromise; const iframe = <HTMLIFrameElement>response.container.firstElementChild; expect(iframe.src).toContain('&cookiePolicy=enable'); }); it('launches with Cookie Policy disabled', async () => { expect.assertions(1); const options: Options = { cookiePolicy: CookiePolicy.Disable }; const launchPromise = launchAsync(SampleToken, SampleSubdomain, SampleContent, options); window.postMessage('ImmersiveReader-LaunchResponse:{"success":true}', '*'); const response = await launchPromise; const iframe = <HTMLIFrameElement>response.container.firstElementChild; expect(iframe.src).toContain('&cookiePolicy=disable'); }); it('launches with onExit callback', async () => { jest.useRealTimers(); expect.assertions(1); const cbOnExit = jest.fn(() => { }); const options: Options = { onExit: () => { cbOnExit(); } }; const launchPromise = launchAsync(SampleToken, SampleSubdomain, SampleContent, options); window.postMessage('ImmersiveReader-LaunchResponse:{"success":true}', '*'); await launchPromise; window.postMessage('ImmersiveReader-Exit', '*'); // this is to yield this thread of execution to allow the exit message to get processed await new Promise(resolve => { setTimeout(resolve, 1); }); expect(cbOnExit).toHaveBeenCalledTimes(1); }); it('launches with preferences not set', async () => { expect.assertions(1); const options: Options = { preferences: null }; const launchPromise = launchAsync(SampleToken, SampleSubdomain, SampleContent, options) .then(iframe => { expect(iframe).not.toBeNull(); }); window.postMessage('ImmersiveReader-LaunchResponse:{"success":true}', '*'); return launchPromise; }); it('launches with preferences set', async () => { expect.assertions(1); const options: Options = { preferences: 'foo' }; const launchPromise = launchAsync(SampleToken, SampleSubdomain, SampleContent, options) .then(iframe => { expect(iframe).not.toBeNull(); }); window.postMessage('ImmersiveReader-LaunchResponse:{"success":true}', '*'); return launchPromise; }); it('launches with onPreferencesChanged set', async () => { jest.useRealTimers(); expect.assertions(1); const options: Options = { onPreferencesChanged: (value) => { expect(value).toBe('hello world'); } }; const launchPromise = launchAsync(SampleToken, SampleSubdomain, SampleContent, options); window.postMessage('ImmersiveReader-LaunchResponse:{"success":true}', '*'); await launchPromise; window.postMessage('ImmersiveReader-Preferences:hello world', '*'); // this is to yield this thread of execution to allow the exit message to get processed await new Promise(resolve => { setTimeout(resolve, 1); }); }); }); describe('Utility method isValidSubdomain', () => { it('should return false', () => { expect(isValidSubdomain(null)).toBe(false); expect(isValidSubdomain(undefined)).toBe(false); expect(isValidSubdomain('')).toBe(false); expect(isValidSubdomain('é')).toBe(false); expect(isValidSubdomain('hasaccént')).toBe(false); expect(isValidSubdomain('1é2')).toBe(false); expect(isValidSubdomain('É')).toBe(false); expect(isValidSubdomain('Ã')).toBe(false); expect(isValidSubdomain('has space')).toBe(false); expect(isValidSubdomain('has.period')).toBe(false); expect(isValidSubdomain(' startswithspace')).toBe(false); expect(isValidSubdomain('endswithspace ')).toBe(false); expect(isValidSubdomain('-startswithdash')).toBe(false); expect(isValidSubdomain('endswithdash-')).toBe(false); }); it('should return true', () => { expect(isValidSubdomain('valid')).toBe(true); expect(isValidSubdomain('valid10with2numbers')).toBe(true); expect(isValidSubdomain('1234')).toBe(true); }); }); const fs = require('fs'); describe('Verify SDK version is valid', () => { it('check version', () => { const packageJson: string = fs.readFileSync("package.json", "utf8"); const sdkVersion: string = JSON.parse(packageJson).version; console.log(`SDK version: ${sdkVersion}`); expect(isValidSDKVersion(sdkVersion)).toBe(true); }); }); // sdk version must be in format xxx.xxx.xxx (each version segment between 1 and 3 digits) function isValidSDKVersion(sdkVersion: string): boolean { if (!sdkVersion) { return false; } const regExp = /^[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}$/; return regExp.test(sdkVersion); }
the_stack
import { AbstractView, Component, ComponentClass, ReactElement, ReactInstance, ClassType, DOMElement, SFCElement, CElement, ReactHTMLElement, DOMAttributes, SFC } from 'react'; import * as ReactTestUtils from "."; export interface OptionalEventProperties { bubbles?: boolean; cancelable?: boolean; currentTarget?: EventTarget; defaultPrevented?: boolean; eventPhase?: number; isTrusted?: boolean; nativeEvent?: Event; preventDefault?(): void; stopPropagation?(): void; target?: EventTarget; timeStamp?: Date; type?: string; } export interface SyntheticEventData extends OptionalEventProperties { altKey?: boolean; button?: number; buttons?: number; clientX?: number; clientY?: number; changedTouches?: TouchList; charCode?: number; clipboardData?: DataTransfer; ctrlKey?: boolean; deltaMode?: number; deltaX?: number; deltaY?: number; deltaZ?: number; detail?: number; getModifierState?(key: string): boolean; key?: string; keyCode?: number; locale?: string; location?: number; metaKey?: boolean; pageX?: number; pageY?: number; relatedTarget?: EventTarget; repeat?: boolean; screenX?: number; screenY?: number; shiftKey?: boolean; targetTouches?: TouchList; touches?: TouchList; view?: AbstractView; which?: number; } export type EventSimulator = (element: Element | Component<any>, eventData?: SyntheticEventData) => void; export interface MockedComponentClass { new (props: any): any; } export interface ShallowRenderer { /** * After `shallowRenderer.render()` has been called, returns shallowly rendered output. */ getRenderOutput<E extends ReactElement>(): E; /** * Similar to `ReactDOM.render` but it doesn't require DOM and only renders a single level deep. */ render(element: ReactElement, context?: any): void; unmount(): void; } /** * Simulate an event dispatch on a DOM node with optional `eventData` event data. * `Simulate` has a method for every event that React understands. */ export namespace Simulate { const abort: EventSimulator; const animationEnd: EventSimulator; const animationIteration: EventSimulator; const animationStart: EventSimulator; const blur: EventSimulator; const canPlay: EventSimulator; const canPlayThrough: EventSimulator; const change: EventSimulator; const click: EventSimulator; const compositionEnd: EventSimulator; const compositionStart: EventSimulator; const compositionUpdate: EventSimulator; const contextMenu: EventSimulator; const copy: EventSimulator; const cut: EventSimulator; const doubleClick: EventSimulator; const drag: EventSimulator; const dragEnd: EventSimulator; const dragEnter: EventSimulator; const dragExit: EventSimulator; const dragLeave: EventSimulator; const dragOver: EventSimulator; const dragStart: EventSimulator; const drop: EventSimulator; const durationChange: EventSimulator; const emptied: EventSimulator; const encrypted: EventSimulator; const ended: EventSimulator; const error: EventSimulator; const focus: EventSimulator; const input: EventSimulator; const invalid: EventSimulator; const keyDown: EventSimulator; const keyPress: EventSimulator; const keyUp: EventSimulator; const load: EventSimulator; const loadStart: EventSimulator; const loadedData: EventSimulator; const loadedMetadata: EventSimulator; const mouseDown: EventSimulator; const mouseEnter: EventSimulator; const mouseLeave: EventSimulator; const mouseMove: EventSimulator; const mouseOut: EventSimulator; const mouseOver: EventSimulator; const mouseUp: EventSimulator; const paste: EventSimulator; const pause: EventSimulator; const play: EventSimulator; const playing: EventSimulator; const progress: EventSimulator; const rateChange: EventSimulator; const scroll: EventSimulator; const seeked: EventSimulator; const seeking: EventSimulator; const select: EventSimulator; const stalled: EventSimulator; const submit: EventSimulator; const suspend: EventSimulator; const timeUpdate: EventSimulator; const touchCancel: EventSimulator; const touchEnd: EventSimulator; const touchMove: EventSimulator; const touchStart: EventSimulator; const transitionEnd: EventSimulator; const volumeChange: EventSimulator; const waiting: EventSimulator; const wheel: EventSimulator; } /** * Render a React element into a detached DOM node in the document. __This function requires a DOM__. */ export function renderIntoDocument<T extends Element>( element: DOMElement<any, T>): T; export function renderIntoDocument( element: SFCElement<any>): void; // If we replace `P` with `any` in this overload, then some tests fail because // calls to `renderIntoDocument` choose the last overload on the // subtype-relation pass and get an undesirably broad return type. Using `P` // allows this overload to match on the subtype-relation pass. export function renderIntoDocument<P, T extends Component<P>>( element: CElement<P, T>): T; export function renderIntoDocument<P>( element: ReactElement<P>): Component<P> | Element | void; /** * Pass a mocked component module to this method to augment it with useful methods that allow it to * be used as a dummy React component. Instead of rendering as usual, the component will become * a simple `<div>` (or other tag if `mockTagName` is provided) containing any provided children. */ export function mockComponent( mocked: MockedComponentClass, mockTagName?: string): typeof ReactTestUtils; /** * Returns `true` if `element` is any React element. */ export function isElement(element: any): boolean; /** * Returns `true` if `element` is a React element whose type is of a React `componentClass`. */ export function isElementOfType<T extends HTMLElement>( element: ReactElement, type: string): element is ReactHTMLElement<T>; /** * Returns `true` if `element` is a React element whose type is of a React `componentClass`. */ export function isElementOfType<P extends DOMAttributes<{}>, T extends Element>( element: ReactElement, type: string): element is DOMElement<P, T>; /** * Returns `true` if `element` is a React element whose type is of a React `componentClass`. */ export function isElementOfType<P>( element: ReactElement, type: SFC<P>): element is SFCElement<P>; /** * Returns `true` if `element` is a React element whose type is of a React `componentClass`. */ export function isElementOfType<P, T extends Component<P>, C extends ComponentClass<P>>( element: ReactElement, type: ClassType<P, T, C>): element is CElement<P, T>; /** * Returns `true` if `instance` is a DOM component (such as a `<div>` or `<span>`). */ export function isDOMComponent(instance: ReactInstance): instance is Element; /** * Returns `true` if `instance` is a user-defined component, such as a class or a function. */ export function isCompositeComponent(instance: ReactInstance): instance is Component<any>; /** * Returns `true` if `instance` is a component whose type is of a React `componentClass`. */ export function isCompositeComponentWithType<T extends Component<any>, C extends ComponentClass<any>>( instance: ReactInstance, type: ClassType<any, T, C>): boolean; /** * Traverse all components in `tree` and accumulate all components where * `test(component)` is `true`. This is not that useful on its own, but it's used * as a primitive for other test utils. */ export function findAllInRenderedTree( root: Component<any>, fn: (i: ReactInstance) => boolean): ReactInstance[]; /** * Finds all DOM elements of components in the rendered tree that are * DOM components with the class name matching `className`. */ export function scryRenderedDOMComponentsWithClass( root: Component<any>, className: string): Element[]; /** * Like `scryRenderedDOMComponentsWithClass()` but expects there to be one result, * and returns that one result, or throws exception if there is any other * number of matches besides one. */ export function findRenderedDOMComponentWithClass( root: Component<any>, className: string): Element; /** * Finds all DOM elements of components in the rendered tree that are * DOM components with the tag name matching `tagName`. */ export function scryRenderedDOMComponentsWithTag( root: Component<any>, tagName: string): Element[]; /** * Like `scryRenderedDOMComponentsWithTag()` but expects there to be one result, * and returns that one result, or throws exception if there is any other * number of matches besides one. */ export function findRenderedDOMComponentWithTag( root: Component<any>, tagName: string): Element; /** * Finds all instances of components with type equal to `componentClass`. */ export function scryRenderedComponentsWithType<T extends Component<any>, C extends ComponentClass<any>>( root: Component<any>, type: ClassType<any, T, C>): T[]; /** * Same as `scryRenderedComponentsWithType()` but expects there to be one result * and returns that one result, or throws exception if there is any other * number of matches besides one. */ export function findRenderedComponentWithType<T extends Component<any>, C extends ComponentClass<any>>( root: Component<any>, type: ClassType<any, T, C>): T; /** * Call this in your tests to create a shallow renderer. */ export function createRenderer(): ShallowRenderer; /** * Wrap any code rendering and triggering updates to your components into `act()` calls. * * Ensures that the behavior in your tests matches what happens in the browser * more closely by executing pending `useEffect`s before returning. This also * reduces the amount of re-renders done. * * @param callback A synchronous, void callback that will execute as a single, complete React commit. * * @see https://reactjs.org/blog/2019/02/06/react-v16.8.0.html#testing-hooks */ // the "void | undefined" is here to forbid any sneaky "Promise" returns. export function act(callback: () => void | undefined): void; // the "void | undefined" is here to forbid any sneaky return values // tslint:disable-next-line: void-return export function act(callback: () => Promise<void | undefined>): Promise<undefined>; // Intentionally doesn't extend PromiseLike<never>. // Ideally this should be as hard to accidentally use as possible. export interface DebugPromiseLike { // the actual then() in here is 0-ary, but that doesn't count as a PromiseLike. then(onfulfilled: (value: never) => never, onrejected: (reason: never) => never): never; }
the_stack
import { Cache } from "@siteimprove/alfa-cache"; import { Device } from "@siteimprove/alfa-device"; import { Node, Element, Namespace } from "@siteimprove/alfa-dom"; import { Iterable } from "@siteimprove/alfa-iterable"; import { Map } from "@siteimprove/alfa-map"; import { Mapper } from "@siteimprove/alfa-mapper"; import { None, Option } from "@siteimprove/alfa-option"; import { Predicate } from "@siteimprove/alfa-predicate"; import { Refinement } from "@siteimprove/alfa-refinement"; import { Sequence } from "@siteimprove/alfa-sequence"; import { Cell, Table } from "@siteimprove/alfa-table"; import { Attribute } from "./attribute"; import { Name } from "./name"; import { Role } from "./role"; const { hasInputType, hasName, isElement } = Element; const { or, test } = Predicate; const { and } = Refinement; /** * @internal */ export class Feature { public static of( role: Feature.RoleAspect = () => [], attributes: Feature.AttributesAspect = () => [], name: Feature.NameAspect = () => None ): Feature { return new Feature(role, attributes, name); } private readonly _role: Feature.RoleAspect; private readonly _attributes: Feature.AttributesAspect; private readonly _name: Feature.NameAspect; private constructor( role: Feature.RoleAspect, attributes: Feature.AttributesAspect, name: Feature.NameAspect ) { this._role = role; this._attributes = attributes; this._name = name; } public get role(): Feature.RoleAspect { return this._role; } public get attributes(): Feature.AttributesAspect { return this._attributes; } public get name(): Feature.NameAspect { return this._name; } } /** * @internal */ export namespace Feature { export type Aspect<T, A extends Array<unknown> = []> = Mapper<Element, T, A>; export type RoleAspect = Aspect<Iterable<Role>>; export type AttributesAspect = Aspect<Iterable<Attribute>>; export type NameAspect = Aspect<Option<Name>, [Device, Name.State]>; export function from(namespace: Namespace, name: string): Option<Feature> { return Option.from(Features[namespace]?.[name]).orElse(() => { switch (namespace) { case Namespace.HTML: return Option.of(html()); case Namespace.SVG: return Option.of(svg()); } return None; }); } } function html( role: Feature.RoleAspect = () => [], attributes: Feature.AttributesAspect = () => [], name: Feature.NameAspect = () => None ): Feature { return Feature.of(role, attributes, (element, device, state) => Name.fromSteps( () => name(element, device, state), () => nameFromAttribute(element, "title") ) ); } function svg( role: Feature.RoleAspect = () => [], attributes: Feature.AttributesAspect = () => [], name: Feature.NameAspect = () => None ): Feature { return Feature.of(role, attributes, (element, device, state) => Name.fromSteps( () => name(element, device, state), () => nameFromChild(hasName("title"))(element, device, state), () => nameFromAttribute(element, "title") ) ); } const nameFromAttribute = (element: Element, ...attributes: Array<string>) => { for (const name of attributes) { for (const attribute of element.attribute(name)) { // The attribute value is used as long as it's not completely empty. if (attribute.value.length > 0) { return Name.fromLabel(attribute); } } } return None; }; const nameFromChild = (predicate: Predicate<Element>) => (element: Element, device: Device, state: Name.State) => element .children() .filter(isElement) .find(predicate) .flatMap((child) => Name.fromDescendants(child, device, state.visit(child)).map((name) => Name.of(name.value, [Name.Source.descendant(element, name)]) ) ); const ids = Cache.empty<Node, Map<string, Element>>(); const labels = Cache.empty<Node, Sequence<Element>>(); const nameFromLabel = (element: Element, device: Device, state: Name.State) => { const root = element.root(); const elements = root.inclusiveDescendants().filter(isElement); const isFirstReference = element.id.some((id) => ids .get(root, () => Map.from( elements .collect((element) => element.id.map((id) => [id, element] as const) ) .reverse() ) ) .get(id) .includes(element) ); const references = labels .get(root, () => elements.filter(hasName("label"))) .filter( or( (label) => label.attribute("for").isNone() && label.descendants().includes(element), (label) => isFirstReference && label .attribute("for") .some((attribute) => element.id.includes(attribute.value)) ) ); const names = references.collect((element) => Name.fromNode( element, device, state.reference(Option.of(element)).recurse(true).descend(false) ).map((name) => [name, element] as const) ); const name = names .map(([name]) => name.value) .join(" ") .trim(); if (name === "") { return None; } return Option.of( Name.of( name, names.map(([name, element]) => { for (const attribute of element.attribute("for")) { return Name.Source.reference(attribute, name); } return Name.Source.ancestor(element, name); }) ) ); }; type Features = { [N in Namespace]?: { [element: string]: Feature | undefined; }; }; const Features: Features = { [Namespace.HTML]: { a: html( (element) => element.attribute("href").isSome() ? Option.of(Role.of("link")) : None, () => [], (element, device, state) => Name.fromDescendants(element, device, state.visit(element)) ), area: html( (element) => element.attribute("href").isSome() ? Option.of(Role.of("link")) : None, () => [], (element) => nameFromAttribute(element, "alt") ), article: html(() => Option.of(Role.of("article"))), aside: html(() => Option.of(Role.of("complementary"))), button: html( () => Option.of(Role.of("button")), function* (element) { // https://w3c.github.io/html-aam/#att-disabled for (const _ of element.attribute("disabled")) { yield Attribute.of("aria-disabled", "true"); } } ), // https://w3c.github.io/html-aam/#el-datalist // <datalist> only has a role if it is correctly mapped to an <input> // via the list attribute. We should probably check that. // Additionally, it seems to never be rendered, hence always ignored. datalist: html(() => Option.of(Role.of("listbox"))), dd: html(() => Option.of(Role.of("definition"))), dfn: html(() => Option.of(Role.of("term"))), dialog: html( () => Option.of(Role.of("dialog")), function* (element) { // https://w3c.github.io/html-aam/#att-open-dialog yield Attribute.of( "aria-expanded", element.attribute("open").isSome() ? "true" : "false" ); } ), details: html( () => None, function* (element) { // https://w3c.github.io/html-aam/#att-open-details yield Attribute.of( "aria-expanded", element.attribute("open").isSome() ? "true" : "false" ); } ), dt: html(() => Option.of(Role.of("term"))), fieldset: html( () => Option.of(Role.of("group")), function* (element) { // https://w3c.github.io/html-aam/#att-disabled for (const _ of element.attribute("disabled")) { yield Attribute.of("aria-disabled", "true"); } }, nameFromChild(hasName("legend")) ), figure: html( () => Option.of(Role.of("figure")), () => [], nameFromChild(hasName("figcaption")) ), footer: html((element) => element .ancestors() .filter(isElement) .some(hasName("article", "aside", "main", "nav", "section")) ? None : Option.of(Role.of("contentinfo")) ), form: html(() => Option.of(Role.of("form"))), h1: html( () => Option.of(Role.of("heading")), () => [Attribute.of("aria-level", "1")] ), h2: html( () => Option.of(Role.of("heading")), () => [Attribute.of("aria-level", "2")] ), h3: html( () => Option.of(Role.of("heading")), () => [Attribute.of("aria-level", "3")] ), h4: html( () => Option.of(Role.of("heading")), () => [Attribute.of("aria-level", "4")] ), h5: html( () => Option.of(Role.of("heading")), () => [Attribute.of("aria-level", "5")] ), h6: html( () => Option.of(Role.of("heading")), () => [Attribute.of("aria-level", "6")] ), header: html((element) => element .ancestors() .filter(isElement) .some(hasName("article", "aside", "main", "nav", "section")) ? None : Option.of(Role.of("banner")) ), hr: html(() => Option.of(Role.of("separator"))), img: html( function* (element) { for (const attribute of element.attribute("alt")) { if (attribute.value === "") { yield Role.of("presentation"); } } yield Role.of("img"); }, () => [], (element) => nameFromAttribute(element, "alt") ), input: html( (element): Option<Role> => { if (test(hasInputType("button", "image", "reset", "submit"), element)) { return Option.of(Role.of("button")); } if (test(hasInputType("checkbox"), element)) { return Option.of(Role.of("checkbox")); } if (test(hasInputType("number"), element)) { return Option.of(Role.of("spinbutton")); } if (test(hasInputType("radio"), element)) { return Option.of(Role.of("radio")); } if (test(hasInputType("range"), element)) { return Option.of(Role.of("slider")); } if (test(hasInputType("search"), element)) { return Option.of( Role.of( element.attribute("list").isSome() ? "combobox" : "searchbox" ) ); } if (test(hasInputType("email", "tel", "text", "url"), element)) { return Option.of( Role.of(element.attribute("list").isSome() ? "combobox" : "textbox") ); } return None; }, function* (element) { // https://w3c.github.io/html-aam/#el-input-checkbox // aria-checked should be "mixed" if the indeterminate IDL attribute is // true // aria-checked should otherwise mimic the checkedness, i.e. the // checked *IDL* attribute, not the DOM one. // https://w3c.github.io/html-aam/#att-checked yield Attribute.of( "aria-checked", element.attribute("checked").isSome() ? "true" : "false" ); // https://w3c.github.io/html-aam/#att-list for (const { value } of element.attribute("list")) { yield Attribute.of("aria-controls", value); } // https://w3c.github.io/html-aam/#att-disabled for (const _ of element.attribute("disabled")) { yield Attribute.of("aria-disabled", "true"); } // https://w3c.github.io/html-aam/#att-placeholder for (const { value } of element.attribute("placeholder")) { yield Attribute.of("aria-placeholder", value); } // https://w3c.github.io/html-aam/#att-readonly for (const _ of element.attribute("readonly")) { yield Attribute.of("aria-readonly", "true"); } // https://w3c.github.io/html-aam/#att-required for (const _ of element.attribute("required")) { yield Attribute.of("aria-required", "true"); } // https://w3c.github.io/html-aam/#att-max-input for (const { value } of element.attribute("max")) { yield Attribute.of("aria-valuemax", value); } // https://w3c.github.io/html-aam/#att-min-input for (const { value } of element.attribute("min")) { yield Attribute.of("aria-valuemin", value); } // https://w3c.github.io/html-aam/#att-value-input // but https://github.com/w3c/html-aam/issues/314 for (const { value } of element.attribute("value")) { yield Attribute.of("aria-valuenow", value); } }, (element, device, state) => { if ( test( hasInputType("text", "password", "search", "tel", "email", "url"), element ) ) { return Name.fromSteps( () => nameFromLabel(element, device, state), () => nameFromAttribute(element, "title", "placeholder") ); } if (test(hasInputType("button"), element)) { return nameFromAttribute(element, "value"); } if (test(hasInputType("submit"), element)) { return Name.fromSteps( () => nameFromAttribute(element, "value"), () => Option.of(Name.of("Submit")) ); } if (test(hasInputType("reset"), element)) { return Name.fromSteps( () => nameFromAttribute(element, "value"), () => Option.of(Name.of("Reset")) ); } if (test(hasInputType("image"), element)) { return Name.fromSteps( () => nameFromAttribute(element, "alt"), () => Option.of(Name.of("Submit")) ); } return nameFromLabel(element, device, state); } ), li: html( (element) => element .parent() .filter(Element.isElement) .flatMap((parent) => { switch (parent.name) { case "ol": case "ul": case "menu": return Option.of(Role.of("listitem")); } return None; }), (element) => { // https://w3c.github.io/html-aam/#el-li const siblings = element .inclusiveSiblings() .filter(and(Element.isElement, Element.hasName("li"))); return [ Attribute.of("aria-setsize", `${siblings.size}`), Attribute.of( "aria-posinset", `${ siblings.takeUntil((sibling) => sibling.equals(element)).size + 1 }` ), ]; } ), main: html(() => Option.of(Role.of("main"))), math: html(() => Option.of(Role.of("math"))), menu: html(() => Option.of(Role.of("list"))), nav: html(() => Option.of(Role.of("navigation"))), ol: html(() => Option.of(Role.of("list"))), optgroup: html( () => Option.of(Role.of("group")), function* (element) { // https://w3c.github.io/html-aam/#att-disabled for (const _ of element.attribute("disabled")) { yield Attribute.of("aria-disabled", "true"); } } ), option: html( (element) => element .ancestors() .filter(isElement) .some(hasName("select", "optgroup", "datalist")) ? Option.of(Role.of("option")) : None, function* (element) { // https://w3c.github.io/html-aam/#att-disabled for (const _ of element.attribute("disabled")) { yield Attribute.of("aria-disabled", "true"); } // https://w3c.github.io/html-aam/#att-selected yield Attribute.of( "aria-selected", element.attribute("selected").isSome() ? "true" : "false" ); } ), output: html(() => Option.of(Role.of("status"))), p: html(() => Option.of(Role.of("paragraph"))), section: html(() => Option.of(Role.of("region"))), select: html( () => // Despite what the HTML AAM specifies, we always map <select> elements // to a listbox widget as they currently have no way of mapping to a // valid combobox widget. As a combobox requires an owned textarea and a // list of options, we will always end up mapping <select> elements to // an invalid combobox widget. Option.of(Role.of("listbox")), function* (element) { // https://w3c.github.io/html-aam/#att-disabled for (const _ of element.attribute("disabled")) { yield Attribute.of("aria-disabled", "true"); } // https://w3c.github.io/html-aam/#att-required for (const _ of element.attribute("required")) { yield Attribute.of("aria-required", "true"); } // https://w3c.github.io/html-aam/#att-multiple-select for (const _ of element.attribute("multiple")) { yield Attribute.of("aria-multiselectable", "true"); } }, nameFromLabel ), table: html( () => Option.of(Role.of("table")), () => [], nameFromChild(hasName("caption")) ), tbody: html(() => Option.of(Role.of("rowgroup"))), td: html( (element) => element .ancestors() .filter(isElement) .find(hasName("table")) .flatMap<Role>((table) => { for (const role of Role.from(table)) { if (role.is("table")) { return Option.of(Role.of("cell")); } if (role.is("grid")) { return Option.of(Role.of("gridcell")); } } return None; }), function* (element) { // https://w3c.github.io/html-aam/#att-colspan for (const { value } of element.attribute("colspan")) { yield Attribute.of("aria-colspan", value); } // https://w3c.github.io/html-aam/#att-rowspan for (const { value } of element.attribute("rowspan")) { yield Attribute.of("aria-rowspan", value); } } ), textarea: html( () => Option.of(Role.of("textbox")), function* (element) { // https://w3c.github.io/html-aam/#el-textarea yield Attribute.of("aria-multiline", "true"); // https://w3c.github.io/html-aam/#att-disabled for (const _ of element.attribute("disabled")) { yield Attribute.of("aria-disabled", "true"); } // https://w3c.github.io/html-aam/#att-readonly for (const _ of element.attribute("readonly")) { yield Attribute.of("aria-readonly", "true"); } // https://w3c.github.io/html-aam/#att-required for (const _ of element.attribute("required")) { yield Attribute.of("aria-required", "true"); } // https://w3c.github.io/html-aam/#att-placeholder for (const { value } of element.attribute("placeholder")) { yield Attribute.of("aria-placeholder", value); } }, (element, device, state) => { return Name.fromSteps( () => nameFromLabel(element, device, state), () => nameFromAttribute(element, "title", "placeholder") ); } ), tfoot: html(() => Option.of(Role.of("rowgroup"))), th: html( (element) => element .ancestors() .filter(isElement) .find(hasName("table")) .map(Table.from) .flatMap((table) => table.cells .filter(Cell.isHeader) .find(Cell.hasElement(element)) .map((cell) => { return { table, cell }; }) ) .flatMap<Role>(({ table, cell }) => { switch (cell.scope) { case "column": case "column-group": return Option.of(Role.of("columnheader")); case "row": case "row-group": return Option.of(Role.of("rowheader")); default: for (const role of Role.from(table.element)) { if (role.is("table")) { return Option.of(Role.of("cell")); } if (role.is("grid")) { return Option.of(Role.of("gridcell")); } } return None; } }), function* (element) { // https://w3c.github.io/html-aam/#att-colspan for (const { value } of element.attribute("colspan")) { yield Attribute.of("aria-colspan", value); } // https://w3c.github.io/html-aam/#att-rowspan for (const { value } of element.attribute("rowspan")) { yield Attribute.of("aria-rowspan", value); } } ), thead: html(() => Option.of(Role.of("rowgroup"))), tr: html(() => Option.of(Role.of("row"))), ul: html(() => Option.of(Role.of("list"))), meter: html( () => None, function* (element) { // https://w3c.github.io/html-aam/#att-max for (const { value } of element.attribute("max")) { yield Attribute.of("aria-valuemax", value); } // https://w3c.github.io/html-aam/#att-min for (const { value } of element.attribute("min")) { yield Attribute.of("aria-valuemin", value); } // https://w3c.github.io/html-aam/#att-value-meter for (const { value } of element.attribute("value")) { yield Attribute.of("aria-valuenow", value); } } ), progress: html( () => Option.of(Role.of("progressbar")), function* (element) { // https://w3c.github.io/html-aam/#att-max for (const { value } of element.attribute("max")) { yield Attribute.of("aria-valuemax", value); } // https://w3c.github.io/html-aam/#att-value-meter for (const { value } of element.attribute("value")) { yield Attribute.of("aria-valuenow", value); } } ), }, [Namespace.SVG]: { a: svg((element) => Option.of(Role.of(element.attribute("href").isSome() ? "link" : "group")) ), circle: svg(() => Option.of(Role.of("graphics-symbol"))), ellipse: svg(() => Option.of(Role.of("graphics-symbol"))), foreignObject: svg(() => Option.of(Role.of("group"))), g: svg(() => Option.of(Role.of("group"))), image: svg(() => Option.of(Role.of("img"))), line: svg(() => Option.of(Role.of("graphics-symbol"))), mesh: svg(() => Option.of(Role.of("img"))), path: svg(() => Option.of(Role.of("graphics-symbol"))), polygon: svg(() => Option.of(Role.of("graphics-symbol"))), polyline: svg(() => Option.of(Role.of("graphics-symbol"))), rect: svg(() => Option.of(Role.of("graphics-symbol"))), svg: svg(() => Option.of(Role.of("graphics-document"))), symbol: svg(() => Option.of(Role.of("graphics-object"))), text: svg(() => Option.of(Role.of("group"))), textPath: svg(() => Option.of(Role.of("group"))), use: svg(() => Option.of(Role.of("graphics-object"))), }, };
the_stack
import fs from 'fs-extra'; import path from 'path'; import chalk from 'chalk'; import inquirer from 'inquirer'; import importGlobal from 'import-global'; import importFrom from 'import-from'; import { DynamoDBModelTransformer } from 'graphql-dynamodb-transformer'; import { ModelAuthTransformer } from 'graphql-auth-transformer'; import { ModelConnectionTransformer } from 'graphql-connection-transformer'; import { SearchableModelTransformer } from 'graphql-elasticsearch-transformer'; import { VersionedModelTransformer } from 'graphql-versioned-transformer'; import { FunctionTransformer } from 'graphql-function-transformer'; import { HttpTransformer } from 'graphql-http-transformer'; import { PredictionsTransformer } from 'graphql-predictions-transformer'; import { KeyTransformer } from 'graphql-key-transformer'; import { destructiveUpdatesFlag, ProviderName as providerName } from './constants'; import { AmplifyCLIFeatureFlagAdapter } from './utils/amplify-cli-feature-flag-adapter'; import { isAmplifyAdminApp } from './utils/admin-helpers'; import { $TSContext, AmplifyCategories, getGraphQLTransformerAuthDocLink, getGraphQLTransformerAuthSubscriptionsDocLink, getGraphQLTransformerOpenSearchDocLink, getGraphQLTransformerOpenSearchProductionDocLink, JSONUtilities, pathManager, stateManager, } from 'amplify-cli-core'; import { ResourceConstants } from 'graphql-transformer-common'; import { printer } from 'amplify-prompts'; import _ from 'lodash'; import { isAuthModeUpdated } from './utils/auth-mode-compare'; import { collectDirectivesByTypeNames, readTransformerConfiguration, writeTransformerConfiguration, TRANSFORM_CONFIG_FILE_NAME, TRANSFORM_BASE_VERSION, CLOUDFORMATION_FILE_NAME, getAppSyncServiceExtraDirectives, ITransformer, revertAPIMigration, migrateAPIProject, readProjectConfiguration, buildAPIProject, TransformConfig, getSanityCheckRules, } from 'graphql-transformer-core'; import { print } from 'graphql'; import { hashDirectory } from './upload-appsync-files'; import { exitOnNextTick, FeatureFlags } from 'amplify-cli-core'; import { transformGraphQLSchema as transformGraphQLSchemaV6, getDirectiveDefinitions as getDirectiveDefinitionsV6, } from './graphql-transformer/transform-graphql-schema'; const apiCategory = 'api'; const storageCategory = 'storage'; const parametersFileName = 'parameters.json'; const schemaFileName = 'schema.graphql'; const schemaDirName = 'schema'; const ROOT_APPSYNC_S3_KEY = 'amplify-appsync-files'; const s3ServiceName = 'S3'; export async function searchablePushChecks(context, map, apiName): Promise<void> { const searchableModelTypes = Object.keys(map).filter(type => map[type].includes('searchable') && map[type].includes('model')); if (searchableModelTypes.length) { const currEnv = context.amplify.getEnvInfo().envName; const teamProviderInfo = stateManager.getTeamProviderInfo(); const instanceType = _.get( teamProviderInfo, [currEnv, 'categories', 'api', apiName, ResourceConstants.PARAMETERS.ElasticsearchInstanceType], 't2.small.elasticsearch', ); if (instanceType === 't2.small.elasticsearch' || instanceType === 't3.small.elasticsearch') { const version = await getTransformerVersion(context); const docLink = getGraphQLTransformerOpenSearchProductionDocLink(version); printer.warn( `Your instance type for OpenSearch is ${instanceType}, you may experience performance issues or data loss. Consider reconfiguring with the instructions here ${docLink}`, ); } } } async function warnOnAuth(context, map) { const unAuthModelTypes = Object.keys(map).filter(type => !map[type].includes('auth') && map[type].includes('model')); if (unAuthModelTypes.length) { const transformerVersion = await getTransformerVersion(context); const docLink = getGraphQLTransformerAuthDocLink(transformerVersion); context.print.warning("\nThe following types do not have '@auth' enabled. Consider using @auth with @model"); context.print.warning(unAuthModelTypes.map(type => `\t - ${type}`).join('\n')); context.print.info(`Learn more about @auth here: ${docLink}\n`); } } function getTransformerFactory(context, resourceDir, authConfig?) { return async (addSearchableTransformer, storageConfig?) => { const transformerList: ITransformer[] = [ // TODO: Removing until further discussion. `getTransformerOptions(project, '@model')` new DynamoDBModelTransformer(), new VersionedModelTransformer(), new FunctionTransformer(), new HttpTransformer(), new KeyTransformer(), new ModelConnectionTransformer(), new PredictionsTransformer(storageConfig), ]; if (addSearchableTransformer) { transformerList.push(new SearchableModelTransformer()); } const customTransformersConfig: TransformConfig = await readTransformerConfiguration(resourceDir); const customTransformers = ( customTransformersConfig && customTransformersConfig.transformers ? customTransformersConfig.transformers : [] ) .map(transformer => { const fileUrlMatch = /^file:\/\/(.*)\s*$/m.exec(transformer); const modulePath = fileUrlMatch ? fileUrlMatch[1] : transformer; if (!modulePath) { throw new Error(`Invalid value specified for transformer: '${transformer}'`); } // The loading of transformer can happen multiple ways in the following order: // - modulePath is an absolute path to an NPM package // - modulePath is a package name, then it will be loaded from the project's root's node_modules with createRequireFromPath. // - modulePath is a name of a globally installed package let importedModule; const tempModulePath = modulePath.toString(); try { if (path.isAbsolute(tempModulePath)) { // Load it by absolute path importedModule = require(modulePath); } else { const projectRootPath = context.amplify.pathManager.searchProjectRootPath(); const projectNodeModules = path.join(projectRootPath, 'node_modules'); try { importedModule = importFrom(projectNodeModules, modulePath); } catch (_) { // Intentionally left blank to try global } // Try global package install if (!importedModule) { importedModule = importGlobal(modulePath); } } // At this point we've to have an imported module, otherwise module loader, threw an error. return importedModule; } catch (error) { context.print.error(`Unable to import custom transformer module(${modulePath}).`); context.print.error(`You may fix this error by editing transformers at ${path.join(resourceDir, TRANSFORM_CONFIG_FILE_NAME)}`); throw error; } }) .map(imported => { const CustomTransformer = imported.default; if (typeof CustomTransformer === 'function') { return new CustomTransformer(); } else if (typeof CustomTransformer === 'object') { return CustomTransformer; } throw new Error("Custom Transformers' default export must be a function or an object"); }) .filter(customTransformer => customTransformer); if (customTransformers.length > 0) { transformerList.push(...customTransformers); } // TODO: Build dependency mechanism into transformers. Auth runs last // so any resolvers that need to be protected will already be created. let amplifyAdminEnabled: boolean = false; try { const amplifyMeta = stateManager.getMeta(); const appId = amplifyMeta?.providers?.[providerName]?.AmplifyAppId; const res = await isAmplifyAdminApp(appId); amplifyAdminEnabled = res.isAdminApp; } catch (err) { // if it is not an AmplifyAdmin app, do nothing } transformerList.push(new ModelAuthTransformer({ authConfig, addAwsIamAuthInOutputSchema: amplifyAdminEnabled })); return transformerList; }; } /** * @TODO Include a map of versions to keep track */ async function transformerVersionCheck(context, resourceDir, cloudBackendDirectory, updatedResources, usedDirectives) { const transformerVersion = await getTransformerVersion(context); const authDocLink = getGraphQLTransformerAuthSubscriptionsDocLink(transformerVersion); const searchable = getGraphQLTransformerOpenSearchProductionDocLink(transformerVersion); const versionChangeMessage = `The default behavior for @auth has changed in the latest version of Amplify\nRead here for details: ${authDocLink}`; const warningESMessage = `The behavior for @searchable has changed after version 4.14.1.\nRead here for details: ${searchable}`; const checkVersionExist = config => config && config.Version; const checkESWarningExists = config => config && config.ElasticsearchWarning; let writeToConfig = false; // this is where we check if there is a prev version of the transformer being used // by using the transformer.conf.json file const cloudTransformerConfig = await readTransformerConfiguration(cloudBackendDirectory); const cloudVersionExist = checkVersionExist(cloudTransformerConfig); const cloudWarningExist = checkESWarningExists(cloudTransformerConfig); // check local resource if the question has been answered before const localTransformerConfig = await readTransformerConfiguration(resourceDir); const localVersionExist = checkVersionExist(localTransformerConfig); const localWarningExist = checkESWarningExists(localTransformerConfig); // if we already asked the confirmation question before at a previous push // or during current operations we should not ask again. const showPrompt = !(cloudVersionExist || localVersionExist); const showWarning = !(cloudWarningExist || localWarningExist); const resources = updatedResources.filter(resource => resource.service === 'AppSync'); if (resources.length > 0) { if (showPrompt && usedDirectives.includes('auth')) { await warningMessage(context, versionChangeMessage); } if (showWarning && usedDirectives.includes('searchable')) { await warningMessage(context, warningESMessage); } } // searchable warning flag // Only touch the file if it misses the Version property // Always set to the base version, to not to break existing projects when coming // from an older version of the CLI. if (!localTransformerConfig.Version) { localTransformerConfig.Version = TRANSFORM_BASE_VERSION; writeToConfig = true; } // Add the warning as noted in the elasticsearch if (!localTransformerConfig.warningESMessage) { localTransformerConfig.ElasticsearchWarning = true; writeToConfig = true; } if (writeToConfig) { await writeTransformerConfiguration(resourceDir, localTransformerConfig); } } async function warningMessage(context, warningMessage) { if (context.exeInfo && context.exeInfo.inputParams && context.exeInfo.inputParams.yes) { context.print.warning(`\n${warningMessage}\n`); } else { context.print.warning(`\n${warningMessage}\n`); const response = await inquirer.prompt({ name: 'transformerConfig', type: 'confirm', message: `Do you wish to continue?`, default: false, }); if (!response.transformerConfig) { await context.usageData.emitSuccess(); exitOnNextTick(0); } } } function apiProjectIsFromOldVersion(pathToProject, resourcesToBeCreated) { const resources = resourcesToBeCreated.filter(resource => resource.service === 'AppSync'); if (!pathToProject || resources.length > 0) { return false; } return fs.existsSync(`${pathToProject}/${CLOUDFORMATION_FILE_NAME}`) && !fs.existsSync(`${pathToProject}/${TRANSFORM_CONFIG_FILE_NAME}`); } /** * API migration happens in a few steps. First we calculate which resources need * to remain in the root stack (DDB tables, ES Domains, etc) and write them to * transform.conf.json. We then call CF's update stack on the root stack such * that only the resources that need to be in the root stack remain there * (this deletes resolvers from the schema). We then compile the project with * the new implementation and call update stack again. * @param {*} context * @param {*} resourceDir */ async function migrateProject(context, options) { const { resourceDir, isCLIMigration, cloudBackendDirectory } = options; const updateAndWaitForStack = options.handleMigration || (() => Promise.resolve('Skipping update')); let oldProjectConfig; let oldCloudBackend; try { context.print.info('\nMigrating your API. This may take a few minutes.'); const { project, cloudBackend } = await migrateAPIProject({ projectDirectory: resourceDir, cloudBackendDirectory, }); oldProjectConfig = project; oldCloudBackend = cloudBackend; await updateAndWaitForStack({ isCLIMigration }); } catch (e) { await revertAPIMigration(resourceDir, oldProjectConfig); throw e; } try { // After the intermediate update, we need the transform function // to look at this directory since we did not overwrite the currentCloudBackend with the build options.cloudBackendDirectory = resourceDir; await transformGraphQLSchema(context, options); const result = await updateAndWaitForStack({ isCLIMigration }); context.print.info('\nFinished migrating API.'); return result; } catch (e) { context.print.error('Reverting API migration.'); await revertAPIMigration(resourceDir, oldCloudBackend); try { await updateAndWaitForStack({ isReverting: true, isCLIMigration }); } catch (e) { context.print.error('Error reverting intermediate migration stack.'); } await revertAPIMigration(resourceDir, oldProjectConfig); context.print.error('API successfully reverted.'); throw e; } } export async function transformGraphQLSchema(context, options) { const transformerVersion = await getTransformerVersion(context); if (transformerVersion === 2) { return transformGraphQLSchemaV6(context, options); } const backEndDir = context.amplify.pathManager.getBackendDirPath(); const flags = context.parameters.options; if (flags['no-gql-override']) { return; } let { resourceDir, parameters } = options; const { forceCompile } = options; // Compilation during the push step const { resourcesToBeCreated, resourcesToBeUpdated, allResources } = await context.amplify.getResourceStatus(apiCategory); let resources = resourcesToBeCreated.concat(resourcesToBeUpdated); // When build folder is missing include the API // to be compiled without the backend/api/<api-name>/build // cloud formation push will fail even if there is no changes in the GraphQL API // https://github.com/aws-amplify/amplify-console/issues/10 const resourceNeedCompile = allResources .filter(r => !resources.includes(r)) .filter(r => { const buildDir = path.normalize(path.join(backEndDir, apiCategory, r.resourceName, 'build')); return !fs.existsSync(buildDir); }); resources = resources.concat(resourceNeedCompile); if (forceCompile) { resources = resources.concat(allResources); } resources = resources.filter(resource => resource.service === 'AppSync'); // check if api is in update status or create status const isNewAppSyncAPI: boolean = resourcesToBeCreated.filter(resource => resource.service === 'AppSync').length === 0 ? false : true; if (!resourceDir) { // There can only be one appsync resource if (resources.length > 0) { const resource = resources[0]; if (resource.providerPlugin !== providerName) { return; } const { category, resourceName } = resource; resourceDir = path.normalize(path.join(backEndDir, category, resourceName)); } else { // No appsync resource to update/add return; } } let previouslyDeployedBackendDir = options.cloudBackendDirectory; if (!previouslyDeployedBackendDir) { if (resources.length > 0) { const resource = resources[0]; if (resource.providerPlugin !== providerName) { return; } const { category, resourceName } = resource; const cloudBackendRootDir = context.amplify.pathManager.getCurrentCloudBackendDirPath(); /* eslint-disable */ previouslyDeployedBackendDir = path.normalize(path.join(cloudBackendRootDir, category, resourceName)); /* eslint-enable */ } } const parametersFilePath = path.join(resourceDir, parametersFileName); if (!parameters && fs.existsSync(parametersFilePath)) { try { parameters = JSONUtilities.readJson(parametersFilePath); } catch (e) { parameters = {}; } } const isCLIMigration = options.migrate; const isOldApiVersion = apiProjectIsFromOldVersion(previouslyDeployedBackendDir, resourcesToBeCreated); const migrateOptions = { ...options, resourceDir, migrate: false, isCLIMigration, cloudBackendDirectory: previouslyDeployedBackendDir, }; if (isCLIMigration && isOldApiVersion) { return await migrateProject(context, migrateOptions); } else if (isOldApiVersion) { let IsOldApiProject; if (context.exeInfo && context.exeInfo.inputParams && context.exeInfo.inputParams.yes) { IsOldApiProject = context.exeInfo.inputParams.yes; } else { const migrateMessage = `${chalk.bold('The CLI is going to take the following actions during the migration step:')}\n` + '\n1. If you have a GraphQL API, we will update the corresponding Cloudformation stack to support larger annotated schemas and custom resolvers.\n' + 'In this process, we will be making Cloudformation API calls to update your GraphQL API Cloudformation stack. This operation will result in deletion of your AppSync resolvers and then the creation of new ones and for a brief while your AppSync API will be unavailable until the migration finishes\n' + '\n2. We will be updating your local Cloudformation files present inside the ‘amplify/‘ directory of your app project, for the GraphQL API service\n' + '\n3. If for any reason the migration fails, the CLI will rollback your cloud and local changes and you can take a look at https://aws-amplify.github.io/docs/cli/migrate?sdk=js for manually migrating your project so that it’s compatible with the latest version of the CLI\n' + '\n4. ALL THE ABOVE MENTIONED OPERATIONS WILL NOT DELETE ANY DATA FROM ANY OF YOUR DATA STORES\n' + `\n${chalk.bold('Before the migration, please be aware of the following things:')}\n` + '\n1. Make sure to have an internet connection through the migration process\n' + '\n2. Make sure to not exit/terminate the migration process (by interrupting it explicitly in the middle of migration), as this will lead to inconsistency within your project\n' + '\n3. Make sure to take a backup of your entire project (including the amplify related config files)\n' + '\nDo you want to continue?\n'; ({ IsOldApiProject } = await inquirer.prompt({ name: 'IsOldApiProject', type: 'confirm', message: migrateMessage, default: true, })); } if (!IsOldApiProject) { throw new Error('Migration cancelled. Please downgrade to a older version of the Amplify CLI or migrate your API project.'); } return await migrateProject(context, migrateOptions); } let { authConfig } = options; // // If we don't have an authConfig from the caller, use it from the // already read resources[0], which is an AppSync API. // if (!authConfig) { if (resources[0].output.securityType) { // Convert to multi-auth format if needed. authConfig = { defaultAuthentication: { authenticationType: resources[0].output.securityType, }, additionalAuthenticationProviders: [], }; } else { ({ authConfig } = resources[0].output); } } // for the predictions directive get storage config const s3ResourceName = await invokeS3GetResourceName(context); const storageConfig = { bucketName: s3ResourceName ? await getBucketName(context, s3ResourceName) : undefined, }; const buildDir = path.normalize(path.join(resourceDir, 'build')); const schemaFilePath = path.normalize(path.join(resourceDir, schemaFileName)); const schemaDirPath = path.normalize(path.join(resourceDir, schemaDirName)); let deploymentRootKey = await getPreviousDeploymentRootKey(previouslyDeployedBackendDir); if (!deploymentRootKey) { const deploymentSubKey = await hashDirectory(resourceDir); deploymentRootKey = `${ROOT_APPSYNC_S3_KEY}/${deploymentSubKey}`; } const projectBucket = options.dryRun ? 'fake-bucket' : getProjectBucket(context); const buildParameters = { ...parameters, S3DeploymentBucket: projectBucket, S3DeploymentRootKey: deploymentRootKey, }; // If it is a dry run, don't create the build folder as it could make a follow-up command // to not to trigger a build, hence a corrupt deployment. if (!options.dryRun) { fs.ensureDirSync(buildDir); } // Transformer compiler code // const schemaText = await readProjectSchema(resourceDir); const project = await readProjectConfiguration(resourceDir); // Check for common errors const directiveMap = collectDirectivesByTypeNames(project.schema); await warnOnAuth(context, directiveMap.types); await searchablePushChecks(context, directiveMap.types, parameters[ResourceConstants.PARAMETERS.AppSyncApiName]); await transformerVersionCheck(context, resourceDir, previouslyDeployedBackendDir, resourcesToBeUpdated, directiveMap.directives); const transformerListFactory = getTransformerFactory(context, resourceDir, authConfig); let searchableTransformerFlag = false; if (directiveMap.directives.includes('searchable')) { searchableTransformerFlag = true; } const ff = new AmplifyCLIFeatureFlagAdapter(); const allowDestructiveUpdates = context?.input?.options?.[destructiveUpdatesFlag] || context?.input?.options?.force; const sanityCheckRulesList = getSanityCheckRules(isNewAppSyncAPI, ff, allowDestructiveUpdates); const buildConfig = { ...options, buildParameters, projectDirectory: resourceDir, transformersFactory: transformerListFactory, transformersFactoryArgs: [searchableTransformerFlag, storageConfig], rootStackFileName: 'cloudformation-template.json', currentCloudBackendDirectory: previouslyDeployedBackendDir, minify: options.minify, featureFlags: ff, sanityCheckRules: sanityCheckRulesList, }; const transformerOutput = await buildAPIProject(buildConfig); context.print.success(`GraphQL schema compiled successfully.\n\nEdit your schema at ${schemaFilePath} or \ place .graphql files in a directory at ${schemaDirPath}`); if (isAuthModeUpdated(options)) { parameters.AuthModeLastUpdated = new Date(); } if (!options.dryRun) { JSONUtilities.writeJson(parametersFilePath, parameters); } return transformerOutput; } function getProjectBucket(context) { const projectDetails = context.amplify.getProjectDetails(); const projectBucket = projectDetails.amplifyMeta.providers ? projectDetails.amplifyMeta.providers[providerName].DeploymentBucketName : ''; return projectBucket; } async function getPreviousDeploymentRootKey(previouslyDeployedBackendDir) { // this is the function let parameters; try { const parametersPath = path.join(previouslyDeployedBackendDir, 'build', parametersFileName); const parametersExists = fs.existsSync(parametersPath); if (parametersExists) { const parametersString = await fs.readFile(parametersPath); parameters = JSON.parse(parametersString.toString()); } return parameters.S3DeploymentRootKey; } catch (err) { return undefined; } } // TODO: Remove until further discussion // function getTransformerOptions(project, transformerName) { // if ( // project && // project.config && // project.config.TransformerOptions && // project.config.TransformerOptions[transformerName] // ) { // return project.config.TransformerOptions[transformerName]; // } // return undefined; // } export async function getDirectiveDefinitions(context, resourceDir) { const transformerVersion = await getTransformerVersion(context); if (transformerVersion === 2) { return getDirectiveDefinitionsV6(context, resourceDir); } const transformList = await getTransformerFactory(context, resourceDir)(true); const appSynDirectives = getAppSyncServiceExtraDirectives(); const transformDirectives = transformList .map(transformPluginInst => [transformPluginInst.directive, ...transformPluginInst.typeDefinitions].map(node => print(node)).join('\n')) .join('\n'); return [appSynDirectives, transformDirectives].join('\n'); } /** * Check if storage exists in the project if not return undefined */ function s3ResourceAlreadyExists(context) { const { amplify } = context; try { let resourceName; const { amplifyMeta } = amplify.getProjectDetails(); if (amplifyMeta[storageCategory]) { const categoryResources = amplifyMeta[storageCategory]; Object.keys(categoryResources).forEach(resource => { if (categoryResources[resource].service === s3ServiceName) { resourceName = resource; } }); } return resourceName; } catch (error) { if (error.name === 'UndeterminedEnvironmentError') { return undefined; } throw error; } } /** * S3API * TBD: Remove this once all invoke functions are moved to a library shared across amplify * */ async function invokeS3GetUserInputs(context, s3ResourceName) { const s3UserInputs = await context.amplify.invokePluginMethod(context, 'storage', undefined, 's3GetUserInput', [context, s3ResourceName]); return s3UserInputs; } /** * S3API * TBD: Remove this once all invoke functions are moved to a library shared across amplify * */ async function invokeS3GetResourceName(context) { const s3ResourceName = await context.amplify.invokePluginMethod(context, 'storage', undefined, 's3GetResourceName', [context]); return s3ResourceName; } async function getBucketName(context: $TSContext, s3ResourceName: string) { const { amplify } = context; const { amplifyMeta } = amplify.getProjectDetails(); const stackName = amplifyMeta.providers.awscloudformation.StackName; const s3ResourcePath = pathManager.getResourceDirectoryPath(undefined, AmplifyCategories.STORAGE, s3ResourceName); const cliInputsPath = path.join(s3ResourcePath, 'cli-inputs.json'); let bucketParameters; // get bucketParameters 1st from cli-inputs , if not present, then parameters.json if (fs.existsSync(cliInputsPath)) { bucketParameters = JSONUtilities.readJson(cliInputsPath); } else { bucketParameters = stateManager.getResourceParametersJson(undefined, AmplifyCategories.STORAGE, s3ResourceName); } const bucketName = stackName.startsWith('amplify-') ? `${bucketParameters.bucketName}\${hash}-\${env}` : `${bucketParameters.bucketName}${s3ResourceName}-\${env}`; return bucketName; } export async function getTransformerVersion(context) { const useExperimentalPipelineTransformer = FeatureFlags.getBoolean('graphQLTransformer.useExperimentalPipelinedTransformer'); let transformerVersion; if (useExperimentalPipelineTransformer === false) { transformerVersion = 1; } else { await migrateToTransformerVersionFeatureFlag(context); transformerVersion = FeatureFlags.getNumber('graphQLTransformer.transformerVersion'); if (transformerVersion !== 1 && transformerVersion !== 2) { throw new Error(`Invalid value specified for transformerVersion: '${transformerVersion}'`); } } return transformerVersion; } async function migrateToTransformerVersionFeatureFlag(context) { const projectPath = pathManager.findProjectRoot() ?? process.cwd(); let config = stateManager.getCLIJSON(projectPath, undefined, { throwIfNotExist: false, preserveComments: true, }); const useExperimentalPipelineTransformer = FeatureFlags.getBoolean('graphQLTransformer.useExperimentalPipelinedTransformer'); const transformerVersion = FeatureFlags.getNumber('graphQLTransformer.transformerVersion'); if (useExperimentalPipelineTransformer && transformerVersion === 1) { config.features.graphqltransformer.transformerversion = 2; stateManager.setCLIJSON(projectPath, config); await FeatureFlags.reloadValues(); context.print.warning( `\nThe project is configured with 'transformerVersion': ${transformerVersion}, but 'useExperimentalPipelinedTransformer': ${useExperimentalPipelineTransformer}. Setting the 'transformerVersion': ${config.features.graphqltransformer.transformerversion}. 'useExperimentalPipelinedTransformer' is deprecated.`, ); } }
the_stack
* Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. */ import { Templates } from './templates'; /** * Multi locale Template Manager for language generation. This template manager will enumerate multi-locale LG files and will select * the appropriate template using the current culture to perform template evaluation. */ export class MultiLanguageLG { languagePolicy: Map<string, string[]>; lgPerLocale: Map<string, Templates>; private readonly locales = [ '', 'aa', 'aa-dj', 'aa-er', 'aa-et', 'af', 'af-na', 'af-za', 'agq', 'agq-cm', 'ak', 'ak-gh', 'am', 'am-et', 'ar', 'ar-001', 'ar-ae', 'ar-bh', 'ar-dj', 'ar-dz', 'ar-eg', 'ar-er', 'ar-il', 'ar-iq', 'ar-jo', 'ar-km', 'ar-kw', 'ar-lb', 'ar-ly', 'ar-ma', 'ar-mr', 'ar-om', 'ar-ps', 'ar-qa', 'ar-sa', 'ar-sd', 'ar-so', 'ar-ss', 'ar-sy', 'ar-td', 'ar-tn', 'ar-ye', 'arn', 'arn-cl', 'as', 'as-in', 'asa', 'asa-tz', 'ast', 'ast-es', 'az', 'az-cyrl', 'az-cyrl-az', 'az-latn', 'az-latn-az', 'ba', 'ba-ru', 'bas', 'bas-cm', 'be', 'be-by', 'bem', 'bem-zm', 'bez', 'bez-tz', 'bg', 'bg-bg', 'bin', 'bin-ng', 'bm', 'bm-latn', 'bm-latn-ml', 'bn', 'bn-bd', 'bn-in', 'bo', 'bo-cn', 'bo-in', 'br', 'br-fr', 'brx', 'brx-in', 'bs', 'bs-cyrl', 'bs-cyrl-ba', 'bs-latn', 'bs-latn-ba', 'byn', 'byn-er', 'ca', 'ca-ad', 'ca-es', 'ca-es-valencia', 'ca-fr', 'ca-it', 'ce', 'ce-ru', 'cgg', 'cgg-ug', 'chr', 'chr-cher', 'chr-cher-us', 'co', 'co-fr', 'cs', 'cs-cz', 'cu', 'cu-ru', 'cy', 'cy-gb', 'da', 'da-dk', 'da-gl', 'dav', 'dav-ke', 'de', 'de-at', 'de-be', 'de-ch', 'de-de', 'de-it', 'de-li', 'de-lu', 'dje', 'dje-ne', 'dsb', 'dsb-de', 'dua', 'dua-cm', 'dv', 'dv-mv', 'dyo', 'dyo-sn', 'dz', 'dz-bt', 'ebu', 'ebu-ke', 'ee', 'ee-gh', 'ee-tg', 'el', 'el-cy', 'el-gr', 'en', 'en-001', 'en-029', 'en-150', 'en-ag', 'en-ai', 'en-as', 'en-at', 'en-au', 'en-bb', 'en-be', 'en-bi', 'en-bm', 'en-bs', 'en-bw', 'en-bz', 'en-ca', 'en-cc', 'en-ch', 'en-ck', 'en-cm', 'en-cx', 'en-cy', 'en-de', 'en-dk', 'en-dm', 'en-er', 'en-fi', 'en-fj', 'en-fk', 'en-fm', 'en-gb', 'en-gd', 'en-gg', 'en-gh', 'en-gi', 'en-gm', 'en-gu', 'en-gy', 'en-hk', 'en-id', 'en-ie', 'en-il', 'en-im', 'en-in', 'en-io', 'en-je', 'en-jm', 'en-ke', 'en-ki', 'en-kn', 'en-ky', 'en-lc', 'en-lr', 'en-ls', 'en-mg', 'en-mh', 'en-mo', 'en-mp', 'en-ms', 'en-mt', 'en-mu', 'en-mw', 'en-my', 'en-na', 'en-nf', 'en-ng', 'en-nl', 'en-nr', 'en-nu', 'en-nz', 'en-pg', 'en-ph', 'en-pk', 'en-pn', 'en-pr', 'en-pw', 'en-rw', 'en-sb', 'en-sc', 'en-sd', 'en-se', 'en-sg', 'en-sh', 'en-si', 'en-sl', 'en-ss', 'en-sx', 'en-sz', 'en-tc', 'en-tk', 'en-to', 'en-tt', 'en-tv', 'en-tz', 'en-ug', 'en-um', 'en-us', 'en-vc', 'en-vg', 'en-vi', 'en-vu', 'en-ws', 'en-za', 'en-zm', 'en-zw', 'eo', 'eo-001', 'es', 'es-419', 'es-ar', 'es-bo', 'es-br', 'es-bz', 'es-cl', 'es-co', 'es-cr', 'es-cu', 'es-do', 'es-ec', 'es-es', 'es-gq', 'es-gt', 'es-hn', 'es-mx', 'es-ni', 'es-pa', 'es-pe', 'es-ph', 'es-pr', 'es-py', 'es-sv', 'es-us', 'es-uy', 'es-ve', 'et', 'et-ee', 'eu', 'eu-es', 'ewo', 'ewo-cm', 'fa', 'fa-ir', 'ff', 'ff-latn', 'ff-latn-bf', 'ff-latn-cm', 'ff-latn-gh', 'ff-latn-gm', 'ff-latn-gn', 'ff-latn-gw', 'ff-latn-lr', 'ff-latn-mr', 'ff-latn-ne', 'ff-latn-ng', 'ff-latn-sl', 'ff-latn-sn', 'fi', 'fi-fi', 'fil', 'fil-ph', 'fo', 'fo-dk', 'fo-fo', 'fr', 'fr-029', 'fr-be', 'fr-bf', 'fr-bi', 'fr-bj', 'fr-bl', 'fr-ca', 'fr-cd', 'fr-cf', 'fr-cg', 'fr-ch', 'fr-ci', 'fr-cm', 'fr-dj', 'fr-dz', 'fr-fr', 'fr-ga', 'fr-gf', 'fr-gn', 'fr-gp', 'fr-gq', 'fr-ht', 'fr-km', 'fr-lu', 'fr-ma', 'fr-mc', 'fr-mf', 'fr-mg', 'fr-ml', 'fr-mq', 'fr-mr', 'fr-mu', 'fr-nc', 'fr-ne', 'fr-pf', 'fr-pm', 'fr-re', 'fr-rw', 'fr-sc', 'fr-sn', 'fr-sy', 'fr-td', 'fr-tg', 'fr-tn', 'fr-vu', 'fr-wf', 'fr-yt', 'fur', 'fur-it', 'fy', 'fy-nl', 'ga', 'ga-ie', 'gd', 'gd-gb', 'gl', 'gl-es', 'gn', 'gn-py', 'gsw', 'gsw-ch', 'gsw-fr', 'gsw-li', 'gu', 'gu-in', 'guz', 'guz-ke', 'gv', 'gv-im', 'ha', 'ha-latn', 'ha-latn-gh', 'ha-latn-ne', 'ha-latn-ng', 'haw', 'haw-us', 'he', 'he-il', 'hi', 'hi-in', 'hr', 'hr-ba', 'hr-hr', 'hsb', 'hsb-de', 'hu', 'hu-hu', 'hy', 'hy-am', 'ia', 'ia-001', 'ibb', 'ibb-ng', 'id', 'id-id', 'ig', 'ig-ng', 'ii', 'ii-cn', 'is', 'is-is', 'it', 'it-ch', 'it-it', 'it-sm', 'it-va', 'iu', 'iu-cans', 'iu-cans-ca', 'iu-latn', 'iu-latn-ca', 'ja', 'ja-jp', 'jgo', 'jgo-cm', 'jmc', 'jmc-tz', 'jv', 'jv-java', 'jv-java-id', 'jv-latn', 'jv-latn-id', 'ka', 'ka-ge', 'kab', 'kab-dz', 'kam', 'kam-ke', 'kde', 'kde-tz', 'kea', 'kea-cv', 'khq', 'khq-ml', 'ki', 'ki-ke', 'kk', 'kk-kz', 'kkj', 'kkj-cm', 'kl', 'kl-gl', 'kln', 'kln-ke', 'km', 'km-kh', 'kn', 'kn-in', 'ko', 'ko-kp', 'ko-kr', 'kok', 'kok-in', 'kr', 'kr-latn', 'kr-latn-ng', 'ks', 'ks-arab', 'ks-arab-in', 'ks-deva', 'ks-deva-in', 'ksb', 'ksb-tz', 'ksf', 'ksf-cm', 'ksh', 'ksh-de', 'ku', 'ku-arab', 'ku-arab-iq', 'ku-arab-ir', 'kw', 'kw-gb', 'ky', 'ky-kg', 'la', 'la-001', 'lag', 'lag-tz', 'lb', 'lb-lu', 'lg', 'lg-ug', 'lkt', 'lkt-us', 'ln', 'ln-ao', 'ln-cd', 'ln-cf', 'ln-cg', 'lo', 'lo-la', 'lrc', 'lrc-iq', 'lrc-ir', 'lt', 'lt-lt', 'lu', 'lu-cd', 'luo', 'luo-ke', 'luy', 'luy-ke', 'lv', 'lv-lv', 'mas', 'mas-ke', 'mas-tz', 'mer', 'mer-ke', 'mfe', 'mfe-mu', 'mg', 'mg-mg', 'mgh', 'mgh-mz', 'mgo', 'mgo-cm', 'mi', 'mi-nz', 'mk', 'mk-mk', 'ml', 'ml-in', 'mn', 'mn-cyrl', 'mn-mn', 'mn-mong', 'mn-mong-cn', 'mn-mong-mn', 'mni', 'mni-in', 'moh', 'moh-ca', 'mr', 'mr-in', 'ms', 'ms-bn', 'ms-my', 'ms-sg', 'mt', 'mt-mt', 'mua', 'mua-cm', 'my', 'my-mm', 'mzn', 'mzn-ir', 'naq', 'naq-na', 'nb', 'nb-no', 'nb-sj', 'nd', 'nd-zw', 'nds', 'nds-de', 'nds-nl', 'ne', 'ne-in', 'ne-np', 'nl', 'nl-aw', 'nl-be', 'nl-bq', 'nl-cw', 'nl-nl', 'nl-sr', 'nl-sx', 'nmg', 'nmg-cm', 'nn', 'nn-no', 'nnh', 'nnh-cm', 'no', 'nqo', 'nqo-gn', 'nr', 'nr-za', 'nso', 'nso-za', 'nus', 'nus-ss', 'nyn', 'nyn-ug', 'oc', 'oc-fr', 'om', 'om-et', 'om-ke', 'or', 'or-in', 'os', 'os-ge', 'os-ru', 'pa', 'pa-arab', 'pa-arab-pk', 'pa-guru', 'pa-in', 'pap', 'pap-029', 'pl', 'pl-pl', 'prg', 'prg-001', 'prs', 'prs-af', 'ps', 'ps-af', 'pt', 'pt-ao', 'pt-br', 'pt-ch', 'pt-cv', 'pt-gq', 'pt-gw', 'pt-lu', 'pt-mo', 'pt-mz', 'pt-pt', 'pt-st', 'pt-tl', 'quc', 'quc-latn', 'quc-latn-gt', 'quz', 'quz-bo', 'quz-ec', 'quz-pe', 'rm', 'rm-ch', 'rn', 'rn-bi', 'ro', 'ro-md', 'ro-ro', 'rof', 'rof-tz', 'ru', 'ru-by', 'ru-kg', 'ru-kz', 'ru-md', 'ru-ru', 'ru-ua', 'rw', 'rw-rw', 'rwk', 'rwk-tz', 'sa', 'sa-in', 'sah', 'sah-ru', 'saq', 'saq-ke', 'sbp', 'sbp-tz', 'sd', 'sd-arab', 'sd-arab-pk', 'sd-deva', 'sd-deva-in', 'se', 'se-fi', 'se-no', 'se-se', 'seh', 'seh-mz', 'ses', 'ses-ml', 'sg', 'sg-cf', 'shi', 'shi-latn', 'shi-latn-ma', 'shi-tfng', 'shi-tfng-ma', 'si', 'si-lk', 'sk', 'sk-sk', 'sl', 'sl-si', 'sma', 'sma-no', 'sma-se', 'smj', 'smj-no', 'smj-se', 'smn', 'smn-fi', 'sms', 'sms-fi', 'sn', 'sn-latn', 'sn-latn-zw', 'so', 'so-dj', 'so-et', 'so-ke', 'so-so', 'sq', 'sq-al', 'sq-mk', 'sq-xk', 'sr', 'sr-cyrl', 'sr-cyrl-ba', 'sr-cyrl-me', 'sr-cyrl-rs', 'sr-cyrl-xk', 'sr-latn', 'sr-latn-ba', 'sr-latn-me', 'sr-latn-rs', 'sr-latn-xk', 'ss', 'ss-sz', 'ss-za', 'ssy', 'ssy-er', 'st', 'st-ls', 'st-za', 'sv', 'sv-ax', 'sv-fi', 'sv-se', 'sw', 'sw-cd', 'sw-ke', 'sw-tz', 'sw-ug', 'syr', 'syr-sy', 'ta', 'ta-in', 'ta-lk', 'ta-my', 'ta-sg', 'te', 'te-in', 'teo', 'teo-ke', 'teo-ug', 'tg', 'tg-cyrl', 'tg-cyrl-tj', 'th', 'th-th', 'ti', 'ti-er', 'ti-et', 'tig', 'tig-er', 'tk', 'tk-tm', 'tn', 'tn-bw', 'tn-za', 'to', 'to-to', 'tr', 'tr-cy', 'tr-tr', 'ts', 'ts-za', 'tt', 'tt-ru', 'twq', 'twq-ne', 'tzm', 'tzm-arab', 'tzm-arab-ma', 'tzm-latn', 'tzm-latn-dz', 'tzm-latn-ma', 'tzm-tfng', 'tzm-tfng-ma', 'ug', 'ug-cn', 'uk', 'uk-ua', 'ur', 'ur-in', 'ur-pk', 'uz', 'uz-arab', 'uz-arab-af', 'uz-cyrl', 'uz-cyrl-uz', 'uz-latn', 'uz-latn-uz', 'vai', 'vai-latn', 'vai-latn-lr', 'vai-vaii', 'vai-vaii-lr', 've', 've-za', 'vi', 'vi-vn', 'vo', 'vo-001', 'vun', 'vun-tz', 'wae', 'wae-ch', 'wal', 'wal-et', 'wo', 'wo-sn', 'xh', 'xh-za', 'xog', 'xog-ug', 'yav', 'yav-cm', 'yi', 'yi-001', 'yo', 'yo-bj', 'yo-ng', 'zgh', 'zgh-tfng', 'zgh-tfng-ma', 'zh', 'zh-cn', 'zh-hans', 'zh-hans-hk', 'zh-hans-mo', 'zh-hant', 'zh-hk', 'zh-mo', 'zh-sg', 'zh-tw', 'zu', 'zu-za', ]; /** * Initializes a new instance of the MultiLanguageLG class. * @param templatesPerLocale A map of LG file templates per locale. * @param filePerLocale A map of locale and LG file. * @param defaultLanguage Default language. */ constructor( templatesPerLocale: Map<string, Templates> | undefined, filePerLocale: Map<string, string> | undefined, defaultLanguage?: string ) { if (templatesPerLocale !== undefined) { this.lgPerLocale = templatesPerLocale; } else if (filePerLocale === undefined) { throw new Error(`input is empty`); } else { this.lgPerLocale = new Map<string, Templates>(); for (const item of filePerLocale.entries()) { this.lgPerLocale.set(item[0], Templates.parseFile(item[1])); } } const defaultLanguageArray = defaultLanguage === undefined ? [''] : [defaultLanguage]; this.languagePolicy = this.getDefaultPolicy(defaultLanguageArray); } /** * Generate template evaluate result. * @param template Template name. * @param data Scope data. * @param locale Locale info. */ generate(template: string, data?: object, locale?: string): any { if (!template) { throw new Error('template is empty'); } if (!locale) { locale = ''; } if (this.lgPerLocale.has(locale)) { return this.lgPerLocale.get(locale).evaluate(template, data); } const fallbackLocales: string[] = []; if (this.languagePolicy.has(locale)) { fallbackLocales.push(...this.languagePolicy.get(locale)); } if (locale !== '' && this.languagePolicy.has('')) { fallbackLocales.push(...this.languagePolicy.get('')); } if (fallbackLocales.length === 0) { throw new Error(`No supported language found for ${locale}`); } for (const fallBackLocale of fallbackLocales) { if (this.lgPerLocale.has(fallBackLocale)) { return this.lgPerLocale.get(fallBackLocale).evaluate(template, data); } } throw new Error(`No LG responses found for locale: ${locale}`); } /** * @private */ private getDefaultPolicy(defaultLanguages: string[]): Map<string, string[]> { if (defaultLanguages === undefined) { defaultLanguages = ['']; } const result = new Map<string, string[]>(); for (const locale of this.locales) { let lang = locale.toLowerCase(); const fallback: string[] = []; while (lang) { fallback.push(lang); const i = lang.lastIndexOf('-'); if (i > 0) { lang = lang.substr(0, i); } else { break; } } if (locale === '') { // here we set the default fallback.push(...defaultLanguages); } result.set(locale, fallback); } return result; } }
the_stack
import { ComponentClass } from 'react' import Taro, { Component } from '@tarojs/taro' import { View, Image } from '@tarojs/components' import tool from '../../utils/tool' import './index.less' import loading from '../../assets/images/pic_loading.png' import scale from '../../assets/images/scale.png' type ComponentStateProps = {} type ComponentOwnProps = { onChangeStyle: () => void, onTouchend: (data?:any) => void, onTouchstart: (data?:any) => void, onImageLoaded?: (detail:object, item?:any) => void, url: string, stylePrams: object } type ComponentState = { framePrams: { width: number, height: number, left: number, top: number, } } type IProps = ComponentStateProps & ComponentOwnProps interface Sticker { props: IProps; throttledStickerOntouchmove: () => void; throttledArrowOntouchmove: () => void; } class Sticker extends Component { static defaultProps = { url: loading, stylePrams: { id: '', zIndex: 0, width: 0, height: 0, x: 0, y: 0, rotate: 0, originWidth: 0, // 原始宽度 originHeight: 0, // 原始高度 autoWidth: 0, // 自适应后的宽度 autoHeight: 0, // 自适应后的高度 autoScale: 1, // 相对画框缩放比例 fixed: false, // 是否固定 isActive: false, // 是否激活 visible: true, // 是否显示 }, } state = { framePrams: { width: 0, height: 0, left: 0, top: 0, }, } gesture = { startX: 0, startY: 0, zoom: false, distance: 0, preV: {x:null, y:null}, center: {x:0, y:0}, // 中心点y坐标 scale: 1 } constructor (props) { super(props) this.throttledStickerOntouchmove = this.throttle(this.stickerOntouchmove, 1000/20).bind(this) this.throttledArrowOntouchmove = this.throttle(this.arrowOntouchmove, 1000/20).bind(this) } componentWillMount () { } componentWillReceiveProps (nextProps) { // console.log('sticker componentWillReceiveProps', this.props, nextProps) if (nextProps.framePrams && nextProps.framePrams.width > 0) { this.setState({ framePrams: nextProps.framePrams }) } } isFixed = () => { const {stylePrams} = this.props return stylePrams.fixed || false } emitTouchstart = () => { const {onTouchstart, stylePrams} = this.props typeof onTouchstart === 'function' && onTouchstart(stylePrams) } emitTouchend = () => { const {onTouchend, stylePrams} = this.props typeof onTouchend === 'function' && onTouchend(stylePrams) } stickerOntouchstart = (e) => { if (this.isFixed()) { // 若固定则不能移动 return } // console.log('stickerOntouchstart', e) const {gesture} = this const {framePrams} = this.state const frameOffsetX = framePrams.left const frameOffsetY = framePrams.top if (e.touches.length === 1) { let { clientX, clientY } = e.touches[0] gesture.startX = clientX - frameOffsetX gesture.startY = clientY - frameOffsetY // console.log('gesture-one', gesture) } else { let xMove = e.touches[1].clientX - e.touches[0].clientX let yMove = e.touches[1].clientY - e.touches[0].clientY let distance = Math.sqrt(xMove * xMove + yMove * yMove) // 记录旋转 let v = { x: xMove, y: yMove } gesture.preV = v gesture.distance = distance gesture.zoom = true // console.log('双指缩放', gesture) } this.emitTouchstart() } stickerOntouchmove = (e) => { if (this.isFixed()) { // 若固定则不能移动 return } // console.log('stickerOntouchmove', e) const {gesture} = this const {stylePrams} = this.props const {framePrams} = this.state const frameOffsetX = framePrams.left const frameOffsetY = framePrams.top if (e.touches.length === 1) { //单指移动 if (gesture.zoom) { //缩放状态,不处理单指 // console.log('不能移动') return } let { clientX, clientY } = e.touches[0]; const pointX = clientX - frameOffsetX // 触摸点所在画框坐标系的x坐标 const pointY = clientY - frameOffsetY // 触摸点所在画框坐标系的y坐标 let offsetX = pointX - gesture.startX; let offsetY = pointY - gesture.startY; gesture.startX = pointX; gesture.startY = pointY; this.changeStyleParams({ offsetX, offsetY }, 'offset') } else { //双指缩放 let xMove = e.touches[1].clientX - e.touches[0].clientX; let yMove = e.touches[1].clientY - e.touches[0].clientY; let distance = Math.sqrt(xMove * xMove + yMove * yMove); // 计算缩放 let distanceDiff = distance - gesture.distance; let newScale = gesture.scale + 0.005 * distanceDiff; // console.log('newScale', newScale) if (newScale < 0.3) { newScale = 0.3; } if (newScale > 4) { newScale = 4; } let newWidth = newScale * stylePrams.autoWidth let newHeight = newScale * stylePrams.autoHeight let newX = stylePrams.x - (newWidth - gesture.scale * stylePrams.autoWidth) * 0.5 let newY = stylePrams.y - (newHeight - gesture.scale * stylePrams.autoHeight) * 0.5 // 计算旋转 let newRotate = 0 let preV = gesture.preV let v = { x: xMove, y: yMove } if (preV.x !== null) { let angle = tool.getRotateAngle(v, preV) newRotate = parseFloat(stylePrams.rotate) + angle } // 更新数据 gesture.scale = newScale gesture.distance = distance gesture.preV = v this.changeStyleParams({ ...stylePrams, width: newWidth, height: newHeight, x : newX, y : newY, rotate: newRotate }) } } stickerOntouchend = (e) => { if (this.isFixed()) { // 若固定则不能移动 return } // console.log('stickerOntouchend', e) if (e.touches.length === 0) { //重置缩放状态 this.gesture.zoom = false } this.emitTouchend() } arrowOntouchstart = (e) => { if (this.isFixed()) { // 若固定则不能移动 return } const {gesture} = this const {stylePrams} = this.props const {framePrams} = this.state const frameOffsetX = framePrams.left const frameOffsetY = framePrams.top const center = tool.calcCenterPosition(stylePrams.x, stylePrams.y, stylePrams.width, stylePrams.height) if (e.touches.length === 1) { let { clientX, clientY } = e.touches[0] gesture.startX = clientX - frameOffsetX gesture.startY = clientY - frameOffsetY // console.log('gesture-one', gesture) let xMove = clientX - frameOffsetX - center.x; let yMove = clientY - frameOffsetY -center.y; let distance = Math.sqrt(xMove * xMove + yMove * yMove); // 记录旋转 let v = { x: xMove, y: yMove } gesture.distance = distance gesture.zoom = true gesture.preV = v gesture.center = center } this.emitTouchstart() } arrowOntouchmove = (e) => { if (this.isFixed()) { // 若固定则不能移动 return } // console.log('arrowOntouchmove', e) const {gesture} = this const {stylePrams} = this.props const {center} = gesture const {framePrams} = this.state const frameOffsetX = framePrams.left const frameOffsetY = framePrams.top if (e.touches.length === 1) { let xMove = e.touches[0].clientX - frameOffsetX - center.x let yMove = e.touches[0].clientY - frameOffsetY - center.y let distance = Math.sqrt(xMove * xMove + yMove * yMove) // 计算缩放 let distanceDiff = distance - gesture.distance; let newScale = gesture.scale + 0.005 * distanceDiff; if (newScale < 0.2) { newScale = 0.2; } if (newScale > 4) { newScale = 4; } let newWidth = newScale * stylePrams.autoWidth let newHeight = newScale * stylePrams.autoHeight let newX = stylePrams.x - (newWidth - gesture.scale * stylePrams.autoWidth) * 0.5 let newY = stylePrams.y - (newHeight - gesture.scale * stylePrams.autoHeight) * 0.5 // 计算旋转 let newRotate let preV = gesture.preV let v = { x: xMove, y: yMove } if (preV.x !== null) { let angle = tool.getRotateAngle(v, preV) newRotate = parseFloat(stylePrams.rotate) + angle } // 更新数据 gesture.scale = newScale gesture.distance = distance gesture.preV = v stylePrams.width = newWidth stylePrams.height = newHeight stylePrams.x = newX stylePrams.y = newY stylePrams.rotate = newRotate this.changeStyleParams({ ...stylePrams, width: newWidth, height: newHeight, x : newX, y : newY, rotate: newRotate }) } } arrowOntouchend = (e) => { if (this.isFixed()) { // 若固定则不能移动 return } // console.log('arrowOntouchend', e) if (e.touches.length === 0) { //重置缩放状态 this.gesture.zoom = false } this.emitTouchend() } handleImageLoaded = (e) => { const { onImageLoaded, stylePrams } = this.props onImageLoaded && onImageLoaded(e.detail, stylePrams) } changeStyleParams = (obj:any, type?:string) => { const {stylePrams} = this.props const {onChangeStyle} = this.props let newStylePrams:any = null if (type === 'offset') { const {offsetX, offsetY} = obj newStylePrams = { ...stylePrams, x: stylePrams.x + offsetX, y: stylePrams.y + offsetY, } } else { newStylePrams = { ...stylePrams, ...obj } } typeof onChangeStyle === 'function' && onChangeStyle(newStylePrams) } throttle = (func, deltaX) => { let lastCalledAt = new Date().getTime(); let that = this; return function() { if(new Date().getTime() - lastCalledAt >= deltaX) { func.apply(that, arguments); lastCalledAt = new Date().getTime(); } else { console.log('不执行') } } } formatStyle = (style) => { const {zIndex, width, height, x, y, rotate} = style return { zIndex: zIndex, width:`${width}px`, height:`${height}px`, transform: `translate(${x}px, ${y}px) rotate(${rotate}deg)` } } render() { const { url, stylePrams } = this.props const { framePrams } = this.state const styleObj = this.formatStyle(this.props.stylePrams) // console.log('sticker(this.props)', this.props) return ( <View className={`sticker-wrap ${stylePrams.fixed ? 'event-through' : ''} ${(stylePrams.visible && stylePrams.width > 0) ? '' : 'hidden' }`} style={styleObj} > {/* <View style="position: absolute;left:0;top:0">{framePrams.width}</View> */} {/* <View style="position: absolute;left:0;top:20px">{stylePrams.autoWidth}</View> */} {/* <View style="position: absolute;left:0;top:20px">{stylePrams.width}</View> */} <Image src={url} mode="widthFix" style="width:100%;height:100%" onLoad={this.handleImageLoaded} onTouchstart={this.stickerOntouchstart} onTouchmove={this.throttledStickerOntouchmove} onTouchend={this.stickerOntouchend}/> <View className={`border ${stylePrams.isActive ? 'active' : ''}`}></View> <View className={`control ${stylePrams.isActive ? 'active' : ''}`} onTouchstart={this.arrowOntouchstart} onTouchmove={this.throttledArrowOntouchmove} onTouchend={this.arrowOntouchend} > <Image src={scale} mode="widthFix" style="width:50%;height:50%"/> </View> </View> ) } } export default Sticker as ComponentClass<ComponentOwnProps, ComponentState>
the_stack
import Chunk from '../Chunk'; import Graph from '../Graph'; import Module from '../Module'; import { EmittedChunk, FilePlaceholder, NormalizedInputOptions, NormalizedOutputOptions, OutputBundleWithPlaceholders, WarningHandler } from '../rollup/types'; import { BuildPhase } from './buildPhase'; import { createHash } from './crypto'; import { errAssetNotFinalisedForFileName, errAssetReferenceIdNotFoundForSetSource, errAssetSourceAlreadySet, errChunkNotGeneratedForFileName, errFailedValidation, errFileNameConflict, errFileReferenceIdNotFoundForFilename, errInvalidRollupPhaseForChunkEmission, errNoAssetSourceSet, error, warnDeprecation } from './error'; import { extname } from './path'; import { isPathFragment } from './relativeId'; import { makeUnique, renderNamePattern } from './renderNamePattern'; function generateAssetFileName( name: string | undefined, source: string | Uint8Array, outputOptions: NormalizedOutputOptions, bundle: OutputBundleWithPlaceholders ): string { const emittedName = outputOptions.sanitizeFileName(name || 'asset'); return makeUnique( renderNamePattern( typeof outputOptions.assetFileNames === 'function' ? outputOptions.assetFileNames({ name, source, type: 'asset' }) : outputOptions.assetFileNames, 'output.assetFileNames', { ext: () => extname(emittedName).substr(1), extname: () => extname(emittedName), hash() { const hash = createHash(); hash.update(emittedName); hash.update(':'); hash.update(source); return hash.digest('hex').substr(0, 8); }, name: () => emittedName.substr(0, emittedName.length - extname(emittedName).length) } ), bundle ); } function reserveFileNameInBundle( fileName: string, bundle: OutputBundleWithPlaceholders, warn: WarningHandler ) { if (fileName in bundle) { warn(errFileNameConflict(fileName)); } bundle[fileName] = FILE_PLACEHOLDER; } interface ConsumedChunk { fileName: string | undefined; module: null | Module; name: string; type: 'chunk'; } interface ConsumedAsset { fileName: string | undefined; name: string | undefined; source: string | Uint8Array | undefined; type: 'asset'; } interface EmittedFile { [key: string]: unknown; fileName?: string; name?: string; type: 'chunk' | 'asset'; } type ConsumedFile = ConsumedChunk | ConsumedAsset; export const FILE_PLACEHOLDER: FilePlaceholder = { type: 'placeholder' }; function hasValidType( emittedFile: unknown ): emittedFile is { [key: string]: unknown; type: 'asset' | 'chunk' } { return Boolean( emittedFile && ((emittedFile as { [key: string]: unknown }).type === 'asset' || (emittedFile as { [key: string]: unknown }).type === 'chunk') ); } function hasValidName(emittedFile: { [key: string]: unknown; type: 'asset' | 'chunk'; }): emittedFile is EmittedFile { const validatedName = emittedFile.fileName || emittedFile.name; return !validatedName || (typeof validatedName === 'string' && !isPathFragment(validatedName)); } function getValidSource( source: unknown, emittedFile: { fileName?: string; name?: string }, fileReferenceId: string | null ): string | Uint8Array { if (!(typeof source === 'string' || source instanceof Uint8Array)) { const assetName = emittedFile.fileName || emittedFile.name || fileReferenceId; return error( errFailedValidation( `Could not set source for ${ typeof assetName === 'string' ? `asset "${assetName}"` : 'unnamed asset' }, asset source needs to be a string, Uint8Array or Buffer.` ) ); } return source; } function getAssetFileName(file: ConsumedAsset, referenceId: string): string { if (typeof file.fileName !== 'string') { return error(errAssetNotFinalisedForFileName(file.name || referenceId)); } return file.fileName; } function getChunkFileName( file: ConsumedChunk, facadeChunkByModule: Map<Module, Chunk> | null ): string { const fileName = file.fileName || (file.module && facadeChunkByModule?.get(file.module)?.id); if (!fileName) return error(errChunkNotGeneratedForFileName(file.fileName || file.name)); return fileName; } export class FileEmitter { private bundle: OutputBundleWithPlaceholders | null = null; private facadeChunkByModule: Map<Module, Chunk> | null = null; private filesByReferenceId: Map<string, ConsumedFile>; private outputOptions: NormalizedOutputOptions | null = null; constructor( private readonly graph: Graph, private readonly options: NormalizedInputOptions, baseFileEmitter?: FileEmitter ) { this.filesByReferenceId = baseFileEmitter ? new Map(baseFileEmitter.filesByReferenceId) : new Map(); } public assertAssetsFinalized = (): void => { for (const [referenceId, emittedFile] of this.filesByReferenceId.entries()) { if (emittedFile.type === 'asset' && typeof emittedFile.fileName !== 'string') return error(errNoAssetSourceSet(emittedFile.name || referenceId)); } }; public emitFile = (emittedFile: unknown): string => { if (!hasValidType(emittedFile)) { return error( errFailedValidation( `Emitted files must be of type "asset" or "chunk", received "${ emittedFile && (emittedFile as any).type }".` ) ); } if (!hasValidName(emittedFile)) { return error( errFailedValidation( `The "fileName" or "name" properties of emitted files must be strings that are neither absolute nor relative paths, received "${ emittedFile.fileName || emittedFile.name }".` ) ); } if (emittedFile.type === 'chunk') { return this.emitChunk(emittedFile); } else { return this.emitAsset(emittedFile); } }; public getFileName = (fileReferenceId: string): string => { const emittedFile = this.filesByReferenceId.get(fileReferenceId); if (!emittedFile) return error(errFileReferenceIdNotFoundForFilename(fileReferenceId)); if (emittedFile.type === 'chunk') { return getChunkFileName(emittedFile, this.facadeChunkByModule); } else { return getAssetFileName(emittedFile, fileReferenceId); } }; public setAssetSource = (referenceId: string, requestedSource: unknown): void => { const consumedFile = this.filesByReferenceId.get(referenceId); if (!consumedFile) return error(errAssetReferenceIdNotFoundForSetSource(referenceId)); if (consumedFile.type !== 'asset') { return error( errFailedValidation( `Asset sources can only be set for emitted assets but "${referenceId}" is an emitted chunk.` ) ); } if (consumedFile.source !== undefined) { return error(errAssetSourceAlreadySet(consumedFile.name || referenceId)); } const source = getValidSource(requestedSource, consumedFile, referenceId); if (this.bundle) { this.finalizeAsset(consumedFile, source, referenceId, this.bundle); } else { consumedFile.source = source; } }; public setOutputBundle = ( outputBundle: OutputBundleWithPlaceholders, outputOptions: NormalizedOutputOptions, facadeChunkByModule: Map<Module, Chunk> ): void => { this.outputOptions = outputOptions; this.bundle = outputBundle; this.facadeChunkByModule = facadeChunkByModule; for (const emittedFile of this.filesByReferenceId.values()) { if (emittedFile.fileName) { reserveFileNameInBundle(emittedFile.fileName, this.bundle, this.options.onwarn); } } for (const [referenceId, consumedFile] of this.filesByReferenceId.entries()) { if (consumedFile.type === 'asset' && consumedFile.source !== undefined) { this.finalizeAsset(consumedFile, consumedFile.source, referenceId, this.bundle); } } }; private assignReferenceId(file: ConsumedFile, idBase: string): string { let referenceId: string | undefined; do { const hash = createHash(); if (referenceId) { hash.update(referenceId); } else { hash.update(idBase); } referenceId = hash.digest('hex').substr(0, 8); } while (this.filesByReferenceId.has(referenceId)); this.filesByReferenceId.set(referenceId, file); return referenceId; } private emitAsset(emittedAsset: EmittedFile): string { const source = typeof emittedAsset.source !== 'undefined' ? getValidSource(emittedAsset.source, emittedAsset, null) : undefined; const consumedAsset: ConsumedAsset = { fileName: emittedAsset.fileName, name: emittedAsset.name, source, type: 'asset' }; const referenceId = this.assignReferenceId( consumedAsset, emittedAsset.fileName || emittedAsset.name || emittedAsset.type ); if (this.bundle) { if (emittedAsset.fileName) { reserveFileNameInBundle(emittedAsset.fileName, this.bundle, this.options.onwarn); } if (source !== undefined) { this.finalizeAsset(consumedAsset, source, referenceId, this.bundle); } } return referenceId; } private emitChunk(emittedChunk: EmittedFile): string { if (this.graph.phase > BuildPhase.LOAD_AND_PARSE) { return error(errInvalidRollupPhaseForChunkEmission()); } if (typeof emittedChunk.id !== 'string') { return error( errFailedValidation( `Emitted chunks need to have a valid string id, received "${emittedChunk.id}"` ) ); } const consumedChunk: ConsumedChunk = { fileName: emittedChunk.fileName, module: null, name: emittedChunk.name || emittedChunk.id, type: 'chunk' }; this.graph.moduleLoader .emitChunk(emittedChunk as unknown as EmittedChunk) .then(module => (consumedChunk.module = module)) .catch(() => { // Avoid unhandled Promise rejection as the error will be thrown later // once module loading has finished }); return this.assignReferenceId(consumedChunk, emittedChunk.id); } private finalizeAsset( consumedFile: ConsumedFile, source: string | Uint8Array, referenceId: string, bundle: OutputBundleWithPlaceholders ): void { const fileName = consumedFile.fileName || findExistingAssetFileNameWithSource(bundle, source) || generateAssetFileName(consumedFile.name, source, this.outputOptions!, bundle); // We must not modify the original assets to avoid interaction between outputs const assetWithFileName = { ...consumedFile, fileName, source }; this.filesByReferenceId.set(referenceId, assetWithFileName); const { options } = this; bundle[fileName] = { fileName, get isAsset(): true { warnDeprecation( 'Accessing "isAsset" on files in the bundle is deprecated, please use "type === \'asset\'" instead', true, options ); return true; }, name: consumedFile.name, source, type: 'asset' }; } } function findExistingAssetFileNameWithSource( bundle: OutputBundleWithPlaceholders, source: string | Uint8Array ): string | null { for (const [fileName, outputFile] of Object.entries(bundle)) { if (outputFile.type === 'asset' && areSourcesEqual(source, outputFile.source)) return fileName; } return null; } function areSourcesEqual( sourceA: string | Uint8Array | Buffer, sourceB: string | Uint8Array | Buffer ): boolean { if (typeof sourceA === 'string') { return sourceA === sourceB; } if (typeof sourceB === 'string') { return false; } if ('equals' in sourceA) { return sourceA.equals(sourceB); } if (sourceA.length !== sourceB.length) { return false; } for (let index = 0; index < sourceA.length; index++) { if (sourceA[index] !== sourceB[index]) { return false; } } return true; }
the_stack
const Service = require('egg').Service; import moment = require('moment'); import _ = require('lodash'); import { ForbiddenError } from 'apollo-server'; module.exports = class OrderService extends Service { ctx: any; app: any; constructor(ctx) { super(ctx); } // 获取 async fetch(params) { const { ctx } = this; const { Order, Op, User, Store } = ctx.model; let pageSize = 10; if (params && params.pageSize) pageSize = Number(params.pageSize); const where: any = {}; where.status = { [Op.not]: 'unpaid' }; const { userId, storeId, status, type } = params; const user = await User.findByPk(ctx.state.user.sub); switch (user.role) { case 'user': where.userId = user.id; break; case 'member': where.userId = user.id; break; case 'merchant': const store = await Store.findOne({ where: { userId: user.id } }); where.storeId = store.id; break; default: break; } if (status) where.status = status; if (type && type !== 'merge') where.type = type; if (type === 'merge') { where.type = { [Op.or]: ['distribution', 'unmanned'], }; } if (userId) where.userId = userId; if (storeId) where.storeId = storeId; const options: any = { limit: pageSize, offset: params.currentPage ? pageSize * (params.currentPage - 1) : 0, order: [ ['id', 'DESC'], ], raw: true, where, }; const { count: total, rows: list, } = await Order.findAndCountAll(options); const newList: any = []; for (const iterator of list) { if (iterator.code) { iterator.qrCode = await ctx.service.file.generatePrivateDownloadUrl(`${iterator.id}.png`); await newList.push(iterator); } else { iterator.qrCode = null; await newList.push(iterator); } } return { list: newList, pagination: { total, pageSize, current: parseInt(params.currentPage, 10) || 1, }, }; }; // 创建 async create(input) { const { ctx } = this; const { Order, Item, OrderItem, Store, Coupon, WechatAccount, Config, Address, } = ctx.model; if (input.type === 'storeBuy') { return await ctx.service.payWay.storePay(input); } // 找到当前用户的 openId const wechatAccount = await WechatAccount.findOne({ where: { userId: ctx.state.user.sub, }, }); // 找到店铺 const store = await Store.findByPk(input.storeId); // 取出商品的实际价格及数量 const items: any = []; // 定义订单内的商品 const orderItem: any = []; // 循环创建订单内的商品 for (const iterator of input.itemIds) { const item = await Item.findByPk(iterator.itemId); await orderItem.push({ fileKey: item.imageKey, title: item.name, price: item.price, commission: item.commission, amount: iterator.number * item.price, number: iterator.number, itemCode: item.code, }); await items.push({ price: item.price, number: iterator.number, }); } let code = ''; for (let i = 0; i < 8; i++) { const num = Math.floor(Math.random() * 10); code += num; } // 整合订单所需参数 const data: any = { userId: wechatAccount.userId, storeId: store.id, time: input.time, price: items.reduce( (total, { price, number }) => total + price * number, 0, ), amount: items.reduce( (total, { price, number }) => total + price * number, 0, ), code, }; // 邮费相关 const rel = await Config.findByPk('free'); const cost = await Config.findByPk('cost'); // pe if (data.amount < rel.integer && input.address && input.address.receiverName) { data.amount += cost.integer; } // 创建订单 const order = await Order.create(_.pickBy( _.pick( data, [ 'price', 'discount', 'amount', 'couponId', 'userId', 'storeId', 'code', 'time', ] ), value => value !== null )); if (input.couponId) { // 找到所传优惠券 const coupon = await Coupon.findByPk(input.couponId); // 判断优惠券是否存在 且价格是否大于等于优惠券需求的价格 if (coupon && data.amount >= coupon.require) { data.discount += coupon.amount; data.amount -= coupon.amount; await coupon.update({ orderId: order.id }); } } if (order) { // 生成二维码并存储至阿里云 await ctx.service.file.generateImage(order.id); await order.update({ qrCode: code }); } if (input.address && input.address.receiverName) { const { receiverName, receiverPhone, receiverAddress } = input.address; // 若地址存在 则更新订单为配送类型 await order.update({ type: 'distribution' }); // 并创建订单地址 await Address.create({ receiverName, receiverPhone, receiverAddress, orderId: order.id, }); } // 循环创建订单内的商品 for (const iterator of orderItem) { iterator.orderId = order.id; await OrderItem.create(iterator) } // 创建交易单号 const { id: tradeId } = await order.createTrade({ price: data.amount, orderId: order.id, }); return await ctx.app.wechatPay.requestPayment({ body: '购买商品', out_trade_no: String(tradeId), total_fee: order.amount, spbill_create_ip: ctx.get('x-real-ip'), trade_type: 'JSAPI', openid: wechatAccount.openId, }); }; async balanceCreate(input) { const { ctx } = this; const { Order, Item, OrderItem, Store, Coupon, WechatAccount, Config, Address, User, Point, Balance, } = ctx.model; // 找到当前用户的 openId const wechatAccount = await WechatAccount.findOne({ where: { userId: ctx.state.user.sub, }, }); // 找到店铺 const store = await Store.findByPk(input.storeId); // 取出商品的实际价格及数量 const items: any = []; // 定义订单内的商品 const orderItem: any = []; // 循环创建订单内的商品 for (const iterator of input.itemIds) { const item = await Item.findByPk(iterator.itemId); await orderItem.push({ fileKey: item.imageKey, title: item.name, price: item.price, commission: item.commission, amount: iterator.number * item.price, number: iterator.number, itemCode: item.code, }); await items.push({ price: item.price, number: iterator.number, }); } let code = ''; for (let i = 0; i < 8; i++) { const num = Math.floor(Math.random() * 10); code += num; } // 整合订单所需参数 const data: any = { userId: wechatAccount.userId, storeId: store.id, time: input.time, price: items.reduce( (total, { price, number }) => total + price * number, 0, ), amount: items.reduce( (total, { price, number }) => total + price * number, 0, ), code, }; // 邮费相关 const rel = await Config.findByPk('free'); const cost = await Config.findByPk('cost'); if (data.amount < rel.integer && input.address && input.address.receiverName) { data.amount += cost.integer; } // 创建订单 const order = await Order.create(_.pickBy( _.pick(data, [ 'price', 'discount', 'amount', 'couponId', 'userId', 'storeId', 'code', 'time', ]), value => value !== null)); if (input.couponId) { // 找到所传优惠券 const coupon = await Coupon.findByPk(input.couponId); // 判断优惠券是否存在 且价格是否大于等于优惠券需求的价格 if (coupon && data.amount >= coupon.require) { data.discount += coupon.amount; data.amount -= coupon.amount; await coupon.update({ orderId: order.id }); } } if (order) { // 生成二维码并存储至阿里云 await ctx.service.file.generateImage(order.id); await order.update({ qrCode: code }); } if (input.address && input.address.receiverName) { const { receiverName, receiverPhone, receiverAddress } = input.address; // 若地址存在 则更新订单为配送类型 await order.update({ type: 'distribution' }); // 并创建订单地址 await Address.create({ receiverName, receiverPhone, receiverAddress, orderId: order.id, }); } // 循环创建订单内的商品 for (const iterator of orderItem) { iterator.orderId = order.id; await OrderItem.create(iterator) } // 创建交易单号 const trade = await order.createTrade({ price: data.amount, orderId: order.id, }); // 找到当前用户 const user = await User.findByPk(wechatAccount.userId); // 若用户的余额大于等于订单的总价 if (user.balance >= order.amount) { // 更新交易状态 await trade.update({ status: 'paid' }); await order.update({ status: 'paid' }); const coupon = await Coupon.findOne({ where: { orderId: order.id, }, }); if (coupon) await coupon.update({ usedAt: order.createdAt }); const orderItems = await OrderItem.findAll({ where: { orderId: order.id, }, }); for (const iterator of orderItems) { await iterator.update({ status: 'completed' }); const item = await Item.findByPk(iterator.itemCode); await item.update({ stock: item.stock - iterator.number }); } const store = await Store.findByPk(order.storeId); // 记录店铺收入 await Balance.create({ balance: store.balance + order.amount, price: order.amount, add: true, remark: '店铺收入', userId: store.userId, storeId: store.id, orderId: order.id, }); // 记录用户消费 await Balance.create({ balance: user.balance - order.amount, price: order.amount, add: false, remark: '用户消费', userId: order.userId, orderId: order.id, }); await store.update({ balance: store.balance + order.amount, sales: store.balance + order.amount, }); // 找到当前用户 const currentUser = await User.findByPk(order.userId); // 记录购买积分 await Point.create({ point: currentUser.point + order.amount, price: order.amount, add: 'true', remark: '购买商品所得', userId: currentUser.id, orderId: order.id, }); await currentUser.update({ point: currentUser.point + order.amount, balance: currentUser.balance - order.amount, }); if (currentUser.inviterId) { // 找到当前用户的邀请者 const inviterUser = await User.findByPk(currentUser.inviterId); if (inviterUser && inviterUser.role === 'member') await ctx.service.math.index(inviterUser.id, order.id) } return true; } else { throw new ForbiddenError('余额不足!'); } } // 财务统计 本日 本周 本月 本年 async timeOrder(time) { const { ctx } = this; const { Order, Store, Op } = ctx.model; // 定义一个事件数组 const arr: any = []; switch (time) { case 'today': const beginToday = moment().subtract(0, "day").startOf('day').format('YYYY-MM-DD') + " 00:00:00"; const endOfToday = moment().subtract(0, "day").endOf('day').format('YYYY-MM-DD') + " 23:59:59"; arr.push(beginToday, endOfToday); break; case 'week': const beginWeek = moment().weekday(1).format('YYYY-MM-DD') + " 00:00:00"; const endOfWeek = moment().weekday(7).format('YYYY-MM-DD') + " 23:59:59"; arr.push(beginWeek, endOfWeek); break; case 'month': const beginMonth = moment().startOf('month').format("YYYY-MM-DD") + " 00:00:00"; const endOfMonth = moment().endOf('month').format('YYYY-MM-DD') + " 23:59:59"; arr.push(beginMonth, endOfMonth); break; case 'year': const beginYear = moment().startOf('year').format('YYYY-MM-DD') + " 00:00:00"; const endOfYear = moment().endOf('year').format('YYYY-MM-DD') + " 23:59:59"; arr.push(beginYear, endOfYear); break; default: break; }; // 根据登录的用户查找店铺 const myStore = await Store.findOne({ where: { userId: ctx.state.user.sub, } }); // 通过店铺ID查找订单 const totalOrder = await Order.findAll({ where: { storeId: myStore.id, status: { [Op.not]: 'unpaid', }, }, raw: true }); // 带有时间条件的店铺订单 const timeOrder = await Order.findAll({ where: { storeId: myStore.id, status: { [Op.not]: 'unpaid', }, createdAt: { [Op.between]: arr, }, }, raw: true }); let totalMerchantSales = 0; let merchantSales = 0; // 总销售额 for (const iterator of totalOrder) { totalMerchantSales += iterator.amount; } // 月销售额 周销售额 日销售额 for (const iterator of timeOrder) { merchantSales += iterator.amount; } // 返回查找的值 const merchantData = { totalMerchantSales, merchantSales, timeOrder, }; return merchantData; }; // 取货 /** * @param code 取货码 */ async draw(code) { const { ctx } = this; const { Order, Store } = ctx.model; const order = await Order.findOne({ where: { code, status: 'fetch', }, }); if (!order) { await ctx.service.detection.error('nonentity'); } // 找到当前店铺 const store = await Store.findByPk(order.storeId); if (store.id !== order.storeId || code !== order.code) { await ctx.service.detection.error('notMatching'); } switch (order.status) { case 'unpaid': await ctx.service.detection.error('unpaid'); break; case 'paid': await ctx.service.detection.error('paid'); break; case 'completed': await ctx.service.detection.error('completed'); break; default: break; } // 更新订单状态 await order.update({ status: 'completed' }); return true; }; }
the_stack
import {runIfMain} from "../../deps/mocha.ts"; import {expect} from "../../deps/chai.ts"; import {Post} from "./entity/Post.ts"; import {Guest as GuestV1} from "./entity/v1/Guest.ts"; import {Comment as CommentV1} from "./entity/v1/Comment.ts"; import {Guest as GuestV2} from "./entity/v2/Guest.ts"; import {Comment as CommentV2} from "./entity/v2/Comment.ts"; import {View} from "./entity/View.ts"; import {Category} from "./entity/Category.ts"; import {closeTestingConnections, createTestingConnections, setupSingleTestingConnection} from "../../utils/test-utils.ts"; import {Connection} from "../../../src/connection/Connection.ts"; import {Repository} from "../../../src/repository/Repository.ts"; import {TreeRepository} from "../../../src/repository/TreeRepository.ts"; import {getConnectionManager} from "../../../src/index.ts"; import {NoConnectionForRepositoryError} from "../../../src/error/NoConnectionForRepositoryError.ts"; import {EntityManager} from "../../../src/entity-manager/EntityManager.ts"; import {CannotGetEntityManagerNotConnectedError} from "../../../src/error/CannotGetEntityManagerNotConnectedError.ts"; import {ConnectionOptions} from "../../../src/connection/ConnectionOptions.ts"; // import {PostgresConnectionOptions} from "../../../src/driver/postgres/PostgresConnectionOptions.ts"; import {PromiseUtils} from "../../../src/util/PromiseUtils.ts"; describe("Connection", () => { // const resourceDir = __dirname + "/../../../../../test/functional/connection/"; describe("before connection is established", function() { let connection: Connection; before(async () => { const options = setupSingleTestingConnection("mysql", { name: "default", entities: [] }); if (!options) return; connection = getConnectionManager().create(options); }); after(() => { if (connection && connection.isConnected) return connection.close(); return Promise.resolve(); }); it("connection.isConnected should be false", () => { if (!connection) return; connection.isConnected.should.be.false; }); it.skip("entity manager and reactive entity manager should not be accessible", () => { expect(() => connection.manager).to.throw(CannotGetEntityManagerNotConnectedError); // expect(() => connection.reactiveEntityManager).to.throw(CannotGetEntityManagerNotConnectedError); }); // todo: they aren't promises anymore /*it("import entities, entity schemas, subscribers and naming strategies should work", () => { return Promise.all([ connection.importEntities([Post]).should.be.fulfilled, connection.importEntitySchemas([]).should.be.fulfilled, connection.importSubscribers([]).should.be.fulfilled, connection.importNamingStrategies([]).should.be.fulfilled, connection.importEntitiesFromDirectories([]).should.be.fulfilled, connection.importEntitySchemaFromDirectories([]).should.be.fulfilled, connection.importSubscribersFromDirectories([]).should.be.fulfilled, connection.importNamingStrategiesFromDirectories([]).should.be.fulfilled ]); });*/ it("should not be able to close", async () => { if (!connection) return; try { await connection.close(); // CannotCloseNotConnectedError expect.fail("an error to be thrown"); } catch (err) { expect(err.message).not.to.be.equal("an error to be thrown"); } }); it("should not be able to sync a schema", async () => { if (!connection) return; try { await connection.synchronize(); // CannotCloseNotConnectedError expect.fail("an error to be thrown"); } catch (err) { expect(err.message).not.to.equal("an error to be thrown"); } }); it.skip("should not be able to use repositories", () => { if (!connection) return; expect(() => connection.getRepository(Post)).to.throw(NoConnectionForRepositoryError); expect(() => connection.getTreeRepository(Category)).to.throw(NoConnectionForRepositoryError); // expect(() => connection.getReactiveRepository(Post)).to.throw(NoConnectionForRepositoryError); // expect(() => connection.getReactiveTreeRepository(Category)).to.throw(NoConnectionForRepositoryError); }); it("should be able to connect", async () => { if (!connection) return; await connection.connect(); }); }); describe.skip("establishing connection", function() { it("should throw DriverOptionNotSetError when extra.socketPath and host is missing", function() { expect(() => { getConnectionManager().create(<ConnectionOptions>{ type: "mysql", username: "test", password: "test", database: "test", entities: [], dropSchema: false, schemaCreate: false, enabledDrivers: ["mysql"], }); }).to.throw(Error); }); }); describe("after connection is established successfully", function() { let connections: Connection[]; beforeEach(() => createTestingConnections({ entities: [Post, Category], schemaCreate: true, dropSchema: true }).then(all => connections = all)); afterEach(() => closeTestingConnections(connections)); it("connection.isConnected should be true", () => connections.forEach(connection => { connection.isConnected.should.be.true; })); it("entity manager and reactive entity manager should be accessible", () => connections.forEach(connection => { expect(connection.manager).to.be.instanceOf(EntityManager); // expect(connection.reactiveEntityManager).to.be.instanceOf(ReactiveEntityManager); })); it("should not be able to connect again", () => connections.forEach(async connection => { try { await connection.connect(); // CannotConnectAlreadyConnectedError expect.fail("an error to be thrown"); } catch (err) { expect(err.message).not.to.equal("an error to be thrown"); } })); it("should be able to close a connection", async () => Promise.all(connections.map(connection => { return connection.close(); }))); }); describe("working with repositories after connection is established successfully", function() { let connections: Connection[]; before(() => createTestingConnections({ entities: [Post, Category], schemaCreate: true, dropSchema: true }).then(all => connections = all)); after(() => closeTestingConnections(connections)); it("should be able to get simple entity repository", () => connections.forEach(connection => { connection.getRepository(Post).should.be.instanceOf(Repository); connection.getRepository(Post).should.not.be.instanceOf(TreeRepository); connection.getRepository(Post).target.should.be.eql(Post); })); it("should be able to get tree entity repository", () => connections.forEach(connection => { connection.getTreeRepository(Category).should.be.instanceOf(TreeRepository); connection.getTreeRepository(Category).target.should.be.eql(Category); })); // it("should be able to get simple entity reactive repository", () => connections.forEach(connection => { // connection.getReactiveRepository(Post).should.be.instanceOf(ReactiveRepository); // connection.getReactiveRepository(Post).should.not.be.instanceOf(TreeReactiveRepository); // connection.getReactiveRepository(Post).target.should.be.eql(Post); // })); // it("should be able to get tree entity reactive repository", () => connections.forEach(connection => { // connection.getReactiveTreeRepository(Category).should.be.instanceOf(TreeReactiveRepository); // connection.getReactiveTreeRepository(Category).target.should.be.eql(Category); // })); it("should not be able to get tree entity repository of the non-tree entities", () => connections.forEach(connection => { expect(() => connection.getTreeRepository(Post)).to.throw(Error); // RepositoryNotTreeError // expect(() => connection.getReactiveTreeRepository(Post)).to.throw(RepositoryNotTreeError); })); it("should not be able to get repositories that are not registered", () => connections.forEach(connection => { expect(() => connection.getRepository("SomeEntity")).to.throw(Error); // RepositoryNotTreeError expect(() => connection.getTreeRepository("SomeEntity")).to.throw(Error); // RepositoryNotTreeError // expect(() => connection.getReactiveRepository("SomeEntity")).to.throw(RepositoryNotFoundError); // expect(() => connection.getReactiveTreeRepository("SomeEntity")).to.throw(RepositoryNotFoundError); })); }); describe("generate a schema when connection.syncSchema is called", function() { let connections: Connection[]; before(() => createTestingConnections({ entities: [Post], schemaCreate: true, dropSchema: true }).then(all => connections = all)); after(() => closeTestingConnections(connections)); it("database should be empty after schema is synced with dropDatabase flag", () => Promise.all(connections.map(async connection => { const postRepository = connection.getRepository(Post); const post = new Post(); post.title = "new post"; await postRepository.save(post); const loadedPost = await postRepository.findOne(post.id); expect(loadedPost).to.be.eql(post); await connection.synchronize(true); const againLoadedPost = await postRepository.findOne(post.id); expect(againLoadedPost).to.be.undefined; }))); }); describe("log a schema when connection.logSyncSchema is called", function() { let connections: Connection[]; before(async () => connections = await createTestingConnections({ entities: [Post] })); after(() => closeTestingConnections(connections)); it("should return sql log properly", () => Promise.all(connections.map(async connection => { await connection.driver.createSchemaBuilder().log(); // console.log(sql); }))); }); describe("after connection is closed successfully", function() { // open a close connections let connections: Connection[] = []; before(() => createTestingConnections({ entities: [Post], schemaCreate: true, dropSchema: true }).then(all => { connections = all; return Promise.all(connections.map(connection => connection.close())); })); it("should not be able to close already closed connection", () => connections.forEach(async connection => { try { await connection.close(); // CannotCloseNotConnectedError expect.fail("an error to be thrown"); } catch (err) { expect(err.message).not.to.equal("an error to be thrown"); } })); it("connection.isConnected should be false", () => connections.forEach(connection => { connection.isConnected.should.be.false; })); }); describe("skip schema generation when synchronize option is set to false", function() { let connections: Connection[]; beforeEach(() => createTestingConnections({ entities: [View], dropSchema: true }).then(all => connections = all)); afterEach(() => closeTestingConnections(connections)); it("database should be empty after schema sync", () => Promise.all(connections.map(async connection => { await connection.synchronize(true); const queryRunner = connection.createQueryRunner(); let schema = await queryRunner.getTables(["view"]); await queryRunner.release(); expect(schema.some(table => table.name === "view")).to.be.false; }))); }); describe("different names of the same content of the schema", () => { let connections: Connection[]; beforeEach(async () => { const connections1 = await createTestingConnections({ name: "test", enabledDrivers: ["postgres"], entities: [CommentV1, GuestV1], schema: "test-schema", dropSchema: true, }); const connections2 = await createTestingConnections({ name: "another", enabledDrivers: ["postgres"], entities: [CommentV1, GuestV1], schema: "another-schema", dropSchema: true }); connections = [...connections1, ...connections2]; }); after(() => closeTestingConnections(connections)); it("should not interfere with each other", async () => { await PromiseUtils.runInSequence(connections, c => c.synchronize()); await closeTestingConnections(connections); const connections1 = await createTestingConnections({ name: "test", enabledDrivers: ["postgres"], entities: [CommentV2, GuestV2], schema: "test-schema", dropSchema: false, schemaCreate: true }); const connections2 = await createTestingConnections({ name: "another", enabledDrivers: ["postgres"], entities: [CommentV2, GuestV2], schema: "another-schema", dropSchema: false, schemaCreate: true }); connections = [...connections1, ...connections2]; }); }); describe("can change postgres default schema name", () => { let connections: Connection[]; beforeEach(async () => { const connections1 = await createTestingConnections({ name: "test", enabledDrivers: ["postgres"], entities: [CommentV1, GuestV1], schema: "test-schema", dropSchema: true, }); const connections2 = await createTestingConnections({ name: "another", enabledDrivers: ["postgres"], entities: [CommentV1, GuestV1], schema: "another-schema", dropSchema: true }); connections = [...connections1, ...connections2]; }); afterEach(() => closeTestingConnections(connections)); it("schema name can be set", () => { return Promise.all(connections.map(async connection => { await connection.synchronize(true); // TODO(uki00a) remove `any` when `PostgresDriver` is implemented. const schemaName = (connection.options as /*PostgresConnectionOptions*/any).schema; const comment = new CommentV1(); comment.title = "Change SchemaName"; comment.context = `To ${schemaName}`; const commentRepo = connection.getRepository(CommentV1); await commentRepo.save(comment); const queryRunner = connection.createQueryRunner(); const rows = await queryRunner.query(`select * from "${schemaName}"."comment" where id = $1`, [comment.id]); await queryRunner.release(); expect(rows[0]["context"]).to.be.eq(comment.context); })); }); }); }); runIfMain(import.meta);
the_stack
import BalenaAuth from 'balena-auth'; import { getRequest } from 'balena-request'; import * as mockttp from 'mockttp'; import { expect } from 'chai'; import { IS_BROWSER } from './integration/setup'; import tokens from './data/tokens'; import { createPinejsClient } from '../lib/pine'; const mockServer = mockttp.getLocal(); let dataDirectory; if (!IS_BROWSER) { // tslint:disable-next-line:no-var-requires const temp = require('temp').track(); dataDirectory = temp.mkdirSync(); } const auth = new BalenaAuth({ dataDirectory }); const request = getRequest({ auth }); const apiVersion = 'v6'; const buildPineInstance = (apiUrl: string, extraOpts?: {}) => createPinejsClient( {}, { apiUrl, apiVersion, request, auth, ...extraOpts, }, ); describe('Pine', function () { beforeEach(() => mockServer.start()); afterEach(() => mockServer.stop()); describe('.apiPrefix', () => { it(`should equal /${apiVersion}/`, function () { const pine = buildPineInstance(mockServer.url); expect(pine.apiPrefix).that.is.a('string'); }); }); // The intention of this spec is to quickly double check // the internal _request() method works as expected. // The nitty grits of request are tested in balena-request. describe('given a /user/v1/refresh-token endpoint', function () { beforeEach(async function () { this.pine = buildPineInstance(mockServer.url); mockServer .forGet('/user/v1/refresh-token') .thenReply(200, tokens.johndoe.token); mockServer .forGet('/foo') .withHeaders({ Authorization: `Bearer ${tokens.johndoe.token}`, }) .thenJson(200, { hello: 'world' }); await mockServer.forGet('/foo').thenCallback(function (req) { if (req.url.endsWith(`?apikey=${tokens.johndoe.token}`)) { return { status: 200, json: { hello: 'world' }, }; } return { status: 401, body: 'Unauthorized', }; }); }); describe('._request()', function () { describe('given there is no auth', function () { beforeEach(() => auth.removeKey()); describe('given a simple GET endpoint', function () { describe('given a public resource', function () { beforeEach(async function () { this.pine = buildPineInstance(mockServer.url); await mockServer .forGet('/public_resource') .thenJson(200, { hello: 'public world' }); }); describe('given there is no api key', function () { it('should be successful', async function () { const promise = this.pine._request({ baseUrl: this.pine.API_URL, method: 'GET', url: '/public_resource', }); await expect(promise).to.become({ hello: 'public world' }); }); it('should be successful, if sent anonymously', async function () { const promise = this.pine._request({ baseUrl: this.pine.API_URL, method: 'GET', url: '/public_resource', anonymous: true, }); await expect(promise).to.become({ hello: 'public world' }); }); }); describe('given there is an api key', () => it('should make the request successfully', async function () { const promise = this.pine._request({ baseUrl: this.pine.API_URL, method: 'GET', url: '/public_resource', }); await expect(promise).to.become({ hello: 'public world' }); })); }); describe('given a non-public resource', function () { describe('given there is no api key', function () { beforeEach(function () { this.pine = buildPineInstance(mockServer.url, { apiKey: '' }); }); it('should be rejected with an authentication error message', async function () { const promise = this.pine._request({ baseUrl: this.pine.API_URL, method: 'GET', url: '/foo', }); await expect(promise).to.be.rejectedWith('You have to log in'); }); it('should be rejected with an unauthorized error, if sent anonymously', async function () { const promise = this.pine._request({ baseUrl: this.pine.API_URL, method: 'GET', url: '/foo', anonymous: true, }); await expect(promise) .to.be.rejectedWith('Unauthorized') .then((res) => expect(res.statusCode).to.equal(401)); }); }); describe('given there is an api key', function () { beforeEach(function () { this.pine = buildPineInstance(mockServer.url, { apiKey: tokens.johndoe.token, }); }); it('should make the request successfully', async function () { const promise = this.pine._request({ baseUrl: this.pine.API_URL, method: 'GET', url: '/foo', }); await expect(promise).to.become({ hello: 'world' }); }); it('should make the request successfully, if sent anonymously', async function () { const promise = this.pine._request({ baseUrl: this.pine.API_URL, method: 'GET', url: '/foo', anonymous: true, }); await expect(promise).to.become({ hello: 'world' }); }); }); }); }); }); describe('given there is an auth', function () { beforeEach(async () => { await auth.setKey(tokens.johndoe.token); }); describe('given a simple GET endpoint', function () { describe('given a public resource', function () { beforeEach(async function () { this.pine = buildPineInstance(mockServer.url); await mockServer .forGet('/public_resource') .thenJson(200, { hello: 'public world' }); }); it('should be successful', async function () { const promise = this.pine._request({ baseUrl: this.pine.API_URL, method: 'GET', url: '/public_resource', }); await expect(promise).to.become({ hello: 'public world' }); }); it('should be successful, if sent anonymously', async function () { const promise = this.pine._request({ baseUrl: this.pine.API_URL, method: 'GET', url: '/public_resource', anonymous: true, }); await expect(promise).to.become({ hello: 'public world' }); }); }); describe('given a non-public resource', function () { beforeEach(function () { this.pine = buildPineInstance(mockServer.url); }); it('should eventually become the response body', async function () { const promise = this.pine._request({ baseUrl: this.pine.API_URL, method: 'GET', url: '/foo', }); await expect(promise).to.eventually.become({ hello: 'world' }); }); it('should not send the auth token, if using an anonymous flag', async function () { const promise = this.pine._request({ baseUrl: this.pine.API_URL, method: 'GET', url: '/foo', anonymous: true, }); await expect(promise).to.be.rejectedWith( 'Request error: Unauthorized', ); }); }); }); describe('given a POST endpoint that mirrors the request body', function () { beforeEach(async function () { this.pine = buildPineInstance(mockServer.url); await mockServer.forPost('/foo').thenCallback(async (req) => ({ status: 200, json: await req.body.getJson(), })); }); it('should eventually become the body', async function () { const promise = this.pine._request({ baseUrl: this.pine.API_URL, method: 'POST', url: '/foo', body: { foo: 'bar', }, }); await expect(promise).to.eventually.become({ foo: 'bar' }); }); }); describe('.get()', function () { describe('given a working pine endpoint', function () { beforeEach(async function () { this.pine = buildPineInstance(mockServer.url); this.applications = { d: [ { id: 1, app_name: 'Bar' }, { id: 2, app_name: 'Foo' }, ], }; await mockServer .forGet(`/${apiVersion}/application`) .withQuery({ $orderby: 'app_name asc' }) .thenJson(200, this.applications); }); it('should make the correct request', async function () { const promise = this.pine.get({ resource: 'application', options: { $orderby: 'app_name asc', }, }); await expect(promise).to.eventually.become(this.applications.d); }); }); describe('given an endpoint that returns an error', function () { beforeEach(async function () { this.pine = buildPineInstance(mockServer.url); await mockServer .forGet(`/${apiVersion}/application`) .thenReply(500, 'Internal Server Error'); }); it('should reject the promise with an error message', async function () { const promise = this.pine.get({ resource: 'application', }); await expect(promise).to.be.rejectedWith('Internal Server Error'); }); }); }); describe('.post()', function () { describe('given a working pine endpoint that gives back the request body', function () { beforeEach(async function () { this.pine = buildPineInstance(mockServer.url); await mockServer .forPost(`/${apiVersion}/application`) .thenCallback(async (req) => ({ status: 201, json: await req.body.getJson(), })); }); it('should get back the body', async function () { const promise = this.pine.post({ resource: 'application', body: { app_name: 'App1', device_type: 'raspberry-pi', }, }); await expect(promise).to.eventually.become({ app_name: 'App1', device_type: 'raspberry-pi', }); }); }); describe('given pine endpoint that returns an error', function () { beforeEach(async function () { this.pine = buildPineInstance(mockServer.url); await mockServer .forGet(`/${apiVersion}/application`) .thenReply(404, 'Unsupported device type'); }); it('should reject the promise with an error message', async function () { const promise = this.pine.post({ resource: 'application', body: { app_name: 'App1', }, }); await expect(promise).to.be.rejectedWith( 'Unsupported device type', ); }); }); }); }); }); }); });
the_stack
* Copyright 2020 Bonitasoft S.A. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import { expectAsWarning, parseJsonAndExpectOnlyEdges, parseJsonAndExpectOnlyFlowNodes, parsingMessageCollector, verifyLabelFont } from './JsonTestUtils'; import each from 'jest-each'; import { TProcess } from '../../../../../src/model/bpmn/json/baseElement/rootElement/rootElement'; import { shapeBpmnElementKindForLabelTests } from './BpmnJsonParser.label.test'; import { LabelStyleMissingFontWarning } from '../../../../../src/component/parser/json/warnings'; function expectMissingFontWarning(shapeOrEdgeId: string, labelStyleId: string): void { const warning = expectAsWarning<LabelStyleMissingFontWarning>(parsingMessageCollector.getWarnings()[0], LabelStyleMissingFontWarning); expect(warning.shapeOrEdgeId).toEqual(shapeOrEdgeId); expect(warning.labelStyleId).toEqual(labelStyleId); } describe('parse bpmn as json for label font', () => { each(shapeBpmnElementKindForLabelTests).it( "should convert as Shape with Font, when a BPMNShape (attached to %s & who references a BPMNLabelStyle with font) is an attribute (as object) of 'BPMNPlane' (as object)", sourceKind => { const json = { definitions: { targetNamespace: '', process: { id: 'Process_1', }, BPMNDiagram: { id: 'BpmnDiagram_1', BPMNPlane: { id: 'BpmnPlane_1', BPMNShape: { id: 'shape_source_id_0', bpmnElement: 'source_id_0', Bounds: { x: 362, y: 232, width: 36, height: 45 }, BPMNLabel: { id: 'label_id', labelStyle: 'style_id', }, }, }, BPMNLabelStyle: { id: 'style_id', Font: { name: 'Arial', size: 11.0, }, }, }, }, }; (json.definitions.process as TProcess)[`${sourceKind}`] = { id: 'source_id_0', name: `${sourceKind}_id_0` }; const model = parseJsonAndExpectOnlyFlowNodes(json, 1); verifyLabelFont(model.flowNodes[0].label, { name: 'Arial', size: 11.0 }); }, ); it("should convert as Edge with Font, when a BPMNEdge (which references a BPMNLabelStyle with font) is an attribute (as object) of 'BPMNPlane' (as object)", () => { const json = { definitions: { targetNamespace: '', process: { sequenceFlow: { id: 'sequenceFlow_id_0', sourceRef: 'sourceRef', targetRef: 'targetRef', }, }, BPMNDiagram: { id: 'BpmnDiagram_1', BPMNPlane: { id: 'BpmnPlane_1', BPMNEdge: { id: 'BPMNEdge_id_0', bpmnElement: 'sequenceFlow_id_0', waypoint: [{ x: 10, y: 10 }], BPMNLabel: { id: 'label_id', labelStyle: 'style_id', }, }, }, BPMNLabelStyle: { id: 'style_id', Font: { name: 'Arial', size: 11.0, }, }, }, }, }; const model = parseJsonAndExpectOnlyEdges(json, 1); verifyLabelFont(model.edges[0].label, { name: 'Arial', size: 11.0 }); }); it("should convert as Shape[] with Font, when several BPMNShapes (who reference the same BPMNLabelStyle) are an attribute (as array) of 'BPMNPlane' (as object)", () => { const json = { definitions: { targetNamespace: '', process: { task: { id: 'task_id_0', name: 'task name', }, userTask: { id: 'user_task_id_0', name: 'user task name', }, }, BPMNDiagram: { id: 'BpmnDiagram_1', BPMNPlane: { id: 'BpmnPlane_1', BPMNShape: [ { id: 'BPMNShape_id_0', bpmnElement: 'task_id_0', Bounds: { x: 362, y: 232, width: 36, height: 45 }, BPMNLabel: { labelStyle: 'style_id_1', }, }, { id: 'BPMNShape_id_1', bpmnElement: 'user_task_id_0', Bounds: { x: 362, y: 232, width: 36, height: 45 }, BPMNLabel: { labelStyle: 'style_id_1', }, }, ], }, BPMNLabelStyle: { id: 'style_id_1', Font: { name: 'Arial', size: 11.0, }, }, }, }, }; const model = parseJsonAndExpectOnlyFlowNodes(json, 2); verifyLabelFont(model.flowNodes[0].label, { name: 'Arial', size: 11.0 }); verifyLabelFont(model.flowNodes[1].label, { name: 'Arial', size: 11.0 }); }); it("should convert as Edge[] with Font, when several BPMNEdges (who reference the same BPMNLabelStyle) are an attribute (as array) of 'BPMNPlane' (as object)", () => { const json = { definitions: { targetNamespace: '', process: { sequenceFlow: [ { id: 'sequenceFlow_id_0', sourceRef: 'sourceRef_0', targetRef: 'targetRef_0', }, { id: 'sequenceFlow_id_1', sourceRef: 'sourceRef_1', targetRef: 'targetRef_1', }, ], }, BPMNDiagram: { id: 'BpmnDiagram_1', BPMNPlane: { id: 'BpmnPlane_1', BPMNEdge: [ { id: 'BPMNEdge_id_0', bpmnElement: 'sequenceFlow_id_0', waypoint: [{ x: 10, y: 10 }], BPMNLabel: { labelStyle: 'style_id_1', }, }, { id: 'BPMNEdge_id_1', bpmnElement: 'sequenceFlow_id_1', waypoint: [{ x: 10, y: 10 }], BPMNLabel: { labelStyle: 'style_id_1', }, }, ], }, BPMNLabelStyle: { id: 'style_id_1', Font: { name: 'Arial', size: 11.0, }, }, }, }, }; const model = parseJsonAndExpectOnlyEdges(json, 2); verifyLabelFont(model.edges[0].label, { name: 'Arial', size: 11.0 }); verifyLabelFont(model.edges[1].label, { name: 'Arial', size: 11.0 }); }); it("should convert as Shape[] without Font, when BPMNShapes (who reference a BPMNLabelStyle) are an attribute (as array) of 'BPMNPlane' (as object) & BPMNLabelStyle (with font with/without all attributes) is an attribute (as array) of 'BPMNDiagram' (as object)", () => { const json = { definitions: { targetNamespace: '', process: { task: { id: 'task_id_0', name: 'task name', }, userTask: { id: 'user_task_id_0', name: 'user task name', }, }, BPMNDiagram: { id: 'BpmnDiagram_1', BPMNPlane: { id: 'BpmnPlane_1', BPMNShape: [ { id: 'BPMNShape_id_0', bpmnElement: 'task_id_0', Bounds: { x: 362, y: 232, width: 36, height: 45 }, BPMNLabel: { id: 'label_id_1', labelStyle: 'style_id_1', }, }, { id: 'BPMNShape_id_1', bpmnElement: 'user_task_id_0', Bounds: { x: 362, y: 232, width: 36, height: 45 }, BPMNLabel: { id: 'label_id_2', labelStyle: 'style_id_2', }, }, ], }, BPMNLabelStyle: [ { id: 'style_id_1', Font: { id: '1', isBold: false, isItalic: false, isStrikeThrough: false, isUnderline: false, name: 'Arial', size: 11.0, }, }, { id: 'style_id_2', Font: '', }, ], }, }, }; const model = parseJsonAndExpectOnlyFlowNodes(json, 2, 1); verifyLabelFont(model.flowNodes[0].label, { name: 'Arial', size: 11.0, isBold: false, isItalic: false, isStrikeThrough: false, isUnderline: false }); expect(model.flowNodes[1].label).toBeUndefined(); expectMissingFontWarning('BPMNShape_id_1', 'style_id_2'); }); it("should convert as Edge[] without Font, when BPMNEdges (which reference a BPMNLabelStyle) are an attribute (as array) of 'BPMNPlane' (as object) & BPMNLabelStyle (with font with/without all attributes) is an attribute (as array) of 'BPMNDiagram' (as object)", () => { const json = { definitions: { targetNamespace: '', process: { sequenceFlow: [ { id: 'sequenceFlow_id_0', sourceRef: 'sourceRef_0', targetRef: 'targetRef_0', }, { id: 'sequenceFlow_id_1', sourceRef: 'sourceRef_1', targetRef: 'targetRef_1', }, ], }, BPMNDiagram: { id: 'BpmnDiagram_1', BPMNPlane: { id: 'BpmnPlane_1', BPMNEdge: [ { id: 'BPMNEdge_id_0', bpmnElement: 'sequenceFlow_id_0', waypoint: [{ x: 10, y: 10 }], BPMNLabel: { id: 'label_id_1', labelStyle: 'style_id_1', }, }, { id: 'BPMNEdge_id_1', bpmnElement: 'sequenceFlow_id_1', waypoint: [{ x: 10, y: 10 }], BPMNLabel: { id: 'label_id_2', labelStyle: 'style_id_2', }, }, ], }, BPMNLabelStyle: [ { id: 'style_id_1', Font: { id: '1', isBold: false, isItalic: false, isStrikeThrough: false, isUnderline: false, name: 'Arial', size: 11.0, }, }, { id: 'style_id_2', Font: '', }, ], }, }, }; const model = parseJsonAndExpectOnlyEdges(json, 2, 1); verifyLabelFont(model.edges[0].label, { name: 'Arial', size: 11.0, isBold: false, isItalic: false, isStrikeThrough: false, isUnderline: false }); expect(model.edges[1].label).toBeUndefined(); expectMissingFontWarning('BPMNEdge_id_1', 'style_id_2'); }); it("should convert as Shape without Font, when a BPMNShape (who references a BPMNLabelStyle without font) is an attribute (as object) of 'BPMNPlane' (as object)", () => { const json = { definitions: { targetNamespace: '', process: { task: { id: 'task_id_0', name: 'task name', }, }, BPMNDiagram: { id: 'BpmnDiagram_1', BPMNPlane: { id: 'BpmnPlane_1', BPMNShape: { id: 'BPMNShape_id_0', bpmnElement: 'task_id_0', Bounds: { x: 362, y: 232, width: 36, height: 45 }, BPMNLabel: { id: 'label_id', labelStyle: 'style_id', }, }, }, BPMNLabelStyle: { id: 'style_id', }, }, }, }; const model = parseJsonAndExpectOnlyFlowNodes(json, 1, 1); expect(model.flowNodes[0].label).toBeUndefined(); expectMissingFontWarning('BPMNShape_id_0', 'style_id'); }); it("should convert as Edge without Font, when a BPMNEdge (which references a BPMNLabelStyle without font) is an attribute (as object) of 'BPMNPlane' (as object)", () => { const json = { definitions: { targetNamespace: '', process: { sequenceFlow: { id: 'sequenceFlow_id_0', sourceRef: 'sourceRef_0', targetRef: 'targetRef_0', }, }, BPMNDiagram: { id: 'BpmnDiagram_1', BPMNPlane: { id: 'BpmnPlane_1', BPMNEdge: { id: 'BPMNEdge_id_0', bpmnElement: 'sequenceFlow_id_0', waypoint: [{ x: 10, y: 10 }], BPMNLabel: { id: 'label_id', labelStyle: 'style_id', }, }, }, BPMNLabelStyle: { id: 'style_id', }, }, }, }; const model = parseJsonAndExpectOnlyEdges(json, 1, 1); expect(model.edges[0].label).toBeUndefined(); expectMissingFontWarning('BPMNEdge_id_0', 'style_id'); }); it("should convert as Shape without Font, when a BPMNShape (who references a non-existing BPMNLabelStyle) is an attribute (as object) of 'BPMNPlane' (as object)", () => { console.warn = jest.fn(); const json = { definitions: { targetNamespace: '', process: { task: { id: 'task_id_0', name: 'task name', }, }, BPMNDiagram: { id: 'BpmnDiagram_1', BPMNPlane: { id: 'BpmnPlane_1', BPMNShape: { id: 'BPMNShape_id_0', bpmnElement: 'task_id_0', Bounds: { x: 362, y: 232, width: 36, height: 45 }, BPMNLabel: { id: 'label_id', labelStyle: 'non-existing_style_id', }, }, }, }, }, }; const model = parseJsonAndExpectOnlyFlowNodes(json, 1, 1); expect(model.flowNodes[0].label).toBeUndefined(); expectMissingFontWarning('BPMNShape_id_0', 'non-existing_style_id'); }); it("should convert as Edge without Font, when a BPMNEdge (which references a non-existing BPMNLabelStyle) is an attribute (as object) of 'BPMNPlane' (as object)", () => { console.warn = jest.fn(); const json = { definitions: { targetNamespace: '', process: { sequenceFlow: { id: 'sequenceFlow_id_0', sourceRef: 'sourceRef_0', targetRef: 'targetRef_0', }, }, BPMNDiagram: { id: 'BpmnDiagram_1', BPMNPlane: { id: 'BpmnPlane_1', BPMNEdge: { id: 'BPMNEdge_id_0', bpmnElement: 'sequenceFlow_id_0', waypoint: [{ x: 10, y: 10 }], BPMNLabel: { id: 'label_id', labelStyle: 'non-existing_style_id', }, }, }, }, }, }; const model = parseJsonAndExpectOnlyEdges(json, 1, 1); expect(model.edges[0].label).toBeUndefined(); expectMissingFontWarning('BPMNEdge_id_0', 'non-existing_style_id'); }); });
the_stack
import { Document } from "../../Document"; import { Helper, RTFJSError } from "../../Helper"; import { RenderChp } from "../../renderer/RenderChp"; import { Renderer } from "../../renderer/Renderer"; import { RenderPap } from "../../renderer/RenderPap"; import { Chp, GlobalState, Pap, Sep } from "../Containers"; import { DestinationBase } from "./DestinationBase"; export class RtfDestination extends DestinationBase { private _metadata: { [key: string]: any }; private parser: GlobalState; private inst: Document; private _charFormatHandlers: { [key: string]: (param: number) => void } = { ansicpg: (param: number) => { // if the value is 0, use the default charset as 0 is not valid if (param > 0) { Helper.log("[rtf] using charset: " + param); this.parser.codepage = param; } }, sectd: () => { Helper.log("[rtf] reset to section defaults"); this.parser.state.sep = new Sep(null); }, plain: () => { Helper.log("[rtf] reset to character defaults"); this.parser.state.chp = new Chp(null); }, pard: () => { Helper.log("[rtf] reset to paragraph defaults"); this.parser.state.pap = new Pap(null); }, b: this._genericFormatOnOff("chp", "bold"), i: this._genericFormatOnOff("chp", "italic"), cf: this._genericFormatSetValRequired("chp", "colorindex"), highlight: this._genericFormatSetValRequired("chp", "highlightindex"), fs: this._genericFormatSetValRequired("chp", "fontsize"), f: this._genericFormatSetValRequired("chp", "fontfamily"), loch: this._genericFormatSetNoParam("pap", "charactertype", Helper.CHARACTER_TYPE.LOWANSI), hich: this._genericFormatSetNoParam("pap", "charactertype", Helper.CHARACTER_TYPE.HIGHANSI), dbch: this._genericFormatSetNoParam("pap", "charactertype", Helper.CHARACTER_TYPE.DOUBLE), strike: this._genericFormatOnOff("chp", "strikethrough"), striked: this._genericFormatOnOff("chp", "dblstrikethrough"), ul: this._genericFormatOnOff("chp", "underline", Helper.UNDERLINE.CONTINUOUS, Helper.UNDERLINE.NONE), uld: this._genericFormatOnOff("chp", "underline", Helper.UNDERLINE.DOTTED, Helper.UNDERLINE.NONE), uldash: this._genericFormatOnOff("chp", "underline", Helper.UNDERLINE.DASHED, Helper.UNDERLINE.NONE), uldashd: this._genericFormatOnOff("chp", "underline", Helper.UNDERLINE.DASHDOTTED, Helper.UNDERLINE.NONE), uldashdd: this._genericFormatOnOff("chp", "underline", Helper.UNDERLINE.DASHDOTDOTTED, Helper.UNDERLINE.NONE), uldb: this._genericFormatOnOff("chp", "underline", Helper.UNDERLINE.DOUBLE, Helper.UNDERLINE.NONE), ulhwave: this._genericFormatOnOff("chp", "underline", Helper.UNDERLINE.HEAVYWAVE, Helper.UNDERLINE.NONE), ulldash: this._genericFormatOnOff("chp", "underline", Helper.UNDERLINE.LONGDASHED, Helper.UNDERLINE.NONE), ulnone: this._genericFormatSetNoParam("chp", "underline", Helper.UNDERLINE.NONE), ulth: this._genericFormatOnOff("chp", "underline", Helper.UNDERLINE.THICK, Helper.UNDERLINE.NONE), ulthd: this._genericFormatOnOff("chp", "underline", Helper.UNDERLINE.THICKDOTTED, Helper.UNDERLINE.NONE), ulthdash: this._genericFormatOnOff("chp", "underline", Helper.UNDERLINE.THICKDASHED, Helper.UNDERLINE.NONE), ulthdashd: this._genericFormatOnOff("chp", "underline", Helper.UNDERLINE.THICKDASHDOTTED, Helper.UNDERLINE.NONE), ulthdashdd: this._genericFormatOnOff("chp", "underline", Helper.UNDERLINE.THICKDASHDOTDOTTED, Helper.UNDERLINE.NONE), ululdbwave: this._genericFormatOnOff("chp", "underline", Helper.UNDERLINE.DOUBLEWAVE, Helper.UNDERLINE.NONE), ulw: this._genericFormatOnOff("chp", "underline", Helper.UNDERLINE.WORD, Helper.UNDERLINE.NONE), ulwave: this._genericFormatOnOff("chp", "underline", Helper.UNDERLINE.WAVE, Helper.UNDERLINE.NONE), li: this._genericFormatSetMemberVal("pap", "indent", "left", 0), ri: this._genericFormatSetMemberVal("pap", "indent", "right", 0), fi: this._genericFormatSetMemberVal("pap", "indent", "firstline", 0), sa: this._genericFormatSetValRequired("pap", "spaceafter"), sb: this._genericFormatSetValRequired("pap", "spacebefore"), cols: this._genericFormatSetVal("sep", "columns", 0), sbknone: this._genericFormatSetNoParam("sep", "breaktype", Helper.BREAKTYPE.NONE), sbkcol: this._genericFormatSetNoParam("sep", "breaktype", Helper.BREAKTYPE.COL), sbkeven: this._genericFormatSetNoParam("sep", "breaktype", Helper.BREAKTYPE.EVEN), sbkodd: this._genericFormatSetNoParam("sep", "breaktype", Helper.BREAKTYPE.ODD), sbkpage: this._genericFormatSetNoParam("sep", "breaktype", Helper.BREAKTYPE.PAGE), pgnx: this._genericFormatSetMemberVal("sep", "pagenumber", "x", 0), pgny: this._genericFormatSetMemberVal("sep", "pagenumber", "y", 0), pgndec: this._genericFormatSetNoParam("sep", "pagenumberformat", Helper.PAGENUMBER.DECIMAL), pgnucrm: this._genericFormatSetNoParam("sep", "pagenumberformat", Helper.PAGENUMBER.UROM), pgnlcrm: this._genericFormatSetNoParam("sep", "pagenumberformat", Helper.PAGENUMBER.LROM), pgnucltr: this._genericFormatSetNoParam("sep", "pagenumberformat", Helper.PAGENUMBER.ULTR), pgnlcltr: this._genericFormatSetNoParam("sep", "pagenumberformat", Helper.PAGENUMBER.LLTR), qc: this._genericFormatSetNoParam("pap", "justification", Helper.JUSTIFICATION.CENTER), ql: this._genericFormatSetNoParam("pap", "justification", Helper.JUSTIFICATION.LEFT), qr: this._genericFormatSetNoParam("pap", "justification", Helper.JUSTIFICATION.RIGHT), qj: this._genericFormatSetNoParam("pap", "justification", Helper.JUSTIFICATION.JUSTIFY), paperw: this._genericFormatSetVal("dop", "width", 12240), paperh: this._genericFormatSetVal("dop", "height", 15480), margl: this._genericFormatSetMemberVal("dop", "margin", "left", 1800), margr: this._genericFormatSetMemberVal("dop", "margin", "right", 1800), margt: this._genericFormatSetMemberVal("dop", "margin", "top", 1440), margb: this._genericFormatSetMemberVal("dop", "margin", "bottom", 1440), pgnstart: this._genericFormatSetVal("dop", "pagenumberstart", 1), facingp: this._genericFormatSetNoParam("dop", "facingpages", true), landscape: this._genericFormatSetNoParam("dop", "landscape", true), par: this._addInsHandler((renderer) => { renderer.startPar(); }), line: this._addInsHandler((renderer) => { renderer.lineBreak(); }), }; constructor(parser: GlobalState, inst: Document, name: string, param: number) { super(name); if (parser.version != null) { throw new RTFJSError("Unexpected rtf destination"); } // This parameter should be one, but older versions of the spec allow for omission of the version number if (param && param !== 1) { throw new RTFJSError("Unsupported rtf version"); } parser.version = 1; this._metadata = {}; this.parser = parser; this.inst = inst; } public addIns(func: (renderer: Renderer) => void): void { this.inst.addIns(func); } public appendText(text: string): void { Helper.log("[rtf] output: " + text); this.inst.addIns(text); } public sub(): void { Helper.log("[rtf].sub()"); } public handleKeyword(keyword: string, param: number): boolean { const handler = this._charFormatHandlers[keyword]; if (handler != null) { handler(param); return true; } return false; } public apply(): void { Helper.log("[rtf] apply()"); for (const prop in this._metadata) { this.inst._meta[prop] = this._metadata[prop]; } delete this._metadata; } public setMetadata(prop: string, val: any): void { this._metadata[prop] = val; } private _addInsHandler(func: (renderer: Renderer) => void) { return (param: number) => { this.inst.addIns(func); }; } private _addFormatIns(ptype: string, props: Chp | Pap) { Helper.log("[rtf] update " + ptype); switch (ptype) { case "chp": { const rchp = new RenderChp(new Chp(props as Chp)); this.inst.addIns((renderer) => { renderer.setChp(rchp); }); break; } case "pap": { const rpap = new RenderPap(new Pap(props as Pap)); this.inst.addIns((renderer) => { renderer.setPap(rpap); }); break; } } } private _genericFormatSetNoParam(ptype: string, prop: string, val: any) { return (param: number) => { const props = this.parser.state[ptype]; props[prop] = val; Helper.log("[rtf] state." + ptype + "." + prop + " = " + props[prop].toString()); this._addFormatIns(ptype, props); }; } private _genericFormatOnOff(ptype: string, prop: string, onval?: string, offval?: string) { return (param: number) => { const props = this.parser.state[ptype]; props[prop] = (param == null || param !== 0) ? (onval != null ? onval : true) : (offval != null ? offval : false); Helper.log("[rtf] state." + ptype + "." + prop + " = " + props[prop].toString()); this._addFormatIns(ptype, props); }; } private _genericFormatSetVal(ptype: string, prop: string, defaultval: number) { return (param: number) => { const props = this.parser.state[ptype]; props[prop] = (param == null) ? defaultval : param; Helper.log("[rtf] state." + ptype + "." + prop + " = " + props[prop].toString()); this._addFormatIns(ptype, props); }; } private _genericFormatSetValRequired(ptype: string, prop: string) { return (param: number) => { if (param == null) { throw new RTFJSError("Keyword without required param"); } const props = this.parser.state[ptype]; props[prop] = param; Helper.log("[rtf] state." + ptype + "." + prop + " = " + props[prop].toString()); this._addFormatIns(ptype, props); }; } private _genericFormatSetMemberVal(ptype: string, prop: string, member: string, defaultval: number) { return (param: number) => { const props = this.parser.state[ptype]; const members = props[prop]; members[member] = (param == null) ? defaultval : param; Helper.log("[rtf] state." + ptype + "." + prop + "." + member + " = " + members[member].toString()); this._addFormatIns(ptype, props); }; } }
the_stack
import * as express from "express"; import * as _ from "lodash"; import { Types } from "mongoose"; import * as remote from "remote-file-size"; import * as auth from "../lib/auth"; import Episode from "../models/episode"; import Podcast, { IPodcast } from "../models/podcast"; import * as parsePodcast from "node-podcast-parser"; import * as request from "request"; import { deleteFile, saveToStorage } from "../lib/googleCloudStorage"; import User, { IUser } from "../models/user"; async function getSelectedPodcast(userId) { const podcasts = await Podcast.find({ owner: userId }); const user = await User.findOne({ _id: userId }); const selectedPodcast = user.selectedPodcast; if (podcasts.length > 0) { if (selectedPodcast) { // The user has a selected podcast const podcast = _.find(podcasts, { _id: selectedPodcast }); if (podcast) { return podcast._id; } else if (podcasts.length > 0) { return podcasts[0]._id; } else { return undefined; } } else { // The user doesn't have a selected podcast falling back to first in array return podcasts[0]._id; } } else { return undefined; } } function addStorageUsage(url, userId, decrement = false) { return new Promise((resolve, reject) => { try { remote(url, async (err, size) => { const mb = 1048576; // tslint:disable-next-line:max-line-length const currentPodcast = await Podcast.findOne({ _id: await getSelectedPodcast(userId) }).exec(); let incSize = decrement ? (-1 * (size / mb)) : (size / mb); const currentUse = _.get(currentPodcast, "usedStorage", 0); if (currentUse + incSize < 0) { const diff = currentUse + incSize; incSize = incSize - diff; } if (!currentPodcast.storageReset) { Podcast.update({ "owner.0": userId }, { $inc: { usedStorage: incSize }, storageReset: new Date(), }, { multi: true }, async (error) => { const podcast = Podcast.findOne({ _id: await getSelectedPodcast(userId) }); resolve(podcast); }); } else { Podcast.update({ "owner.0": userId }, { $inc: { usedStorage: incSize }, }, { multi: true }, async (error) => { const podcast = Podcast.findOne({ _id: await getSelectedPodcast(userId) }); resolve(podcast); }); } }); } catch (err) { reject(); } }); } export default (router: express.Router) => { router.get("/episode/:episode_id", auth.mustBeLoggedIn, async (req, res) => { const podcast = await Podcast.findOne({ _id: await getSelectedPodcast(req.user._id) }).exec(); if (!podcast) { return res.sendStatus(404); } const findQuery = { _id: Types.ObjectId(req.params.episode_id), podcast: podcast._id }; const episode = await Episode.findOne(findQuery).exec(); if (!episode) { return res.sendStatus(404); } res.json(episode); }); /* To test from command line, get the cookie value from chrome, the podcast id and then run: curl -X POST \ -d 'title=title&audioURL=http://someaudio.com&summary=summary&fullContent=fullContent&published=true' \ --cookie 'connect.sid=s%3Ag7iVTGsglECWle_6D8_COFX0HVoPVmgo.RdW4DiKe1At%2Fk1yyBTtoEed6%2F%2FdyU38i2ZfmUKNx5PY' \ http://lvh.me:3000/podcast/episode */ router.post("/episode", auth.mustBeLoggedIn, async (req, res) => { const owner = req.user._id; const podcast = await Podcast.findOne({ _id: await getSelectedPodcast(req.user._id) }).exec(); if (!podcast) { return res.status(403).send("Please create a podcast first."); } const { _id, title, audioUrl, audioDuration, uploadUrl, summary, fullContent, published, publishedAt, preview, adPlacement, } = req.body; // tslint:disable-next-line:no-console const episodeFields: any = { podcast: podcast._id, title, audioUrl, uploadUrl, summary, fullContent, audioDuration, adPlacement, }; if (published !== undefined) { episodeFields.published = published; } if (publishedAt !== undefined) { episodeFields.publishedAt = publishedAt; } if (preview !== undefined) { episodeFields.preview = preview; } const currentEpisode = await Episode.findOne({ _id }); const currentAudioUrl = _.get(currentEpisode, "audioUrl", null); if (audioUrl !== currentAudioUrl) { if (currentAudioUrl) { await addStorageUsage(currentAudioUrl, req.user._id, true); } await addStorageUsage(audioUrl, req.user._id); if (currentAudioUrl) { try { await deleteFile(currentAudioUrl); } catch (e) { // tslint:disable-next-line:no-console console.warn("error deleting file", e); } } } const updatedPodcast = await Podcast.findOne({ _id: await getSelectedPodcast(req.user._id) }).exec(); // We remove any previews, before updating the episodes await Episode.remove({ preview: true }); const findQuery = { _id: Types.ObjectId(_id) }; // if _id is provided, we use it to update an episode if (_id) { Episode.findOneAndUpdate(findQuery, episodeFields, { upsert: true, new: true }, (err, episode) => { if (err) { return res.status(403).send(err.message); } res.json({ episode, podcast: updatedPodcast }); }); } else { try { const episode = await Episode.create(episodeFields); if (!episode) { res.status(403).send("Episode creation failed."); } res.json({ episode, podcast: updatedPodcast }); } catch (err) { res.status(403).send(err.message); } } }); router.post("/reset-import", auth.mustBeLoggedIn, async (req, res) => { const owner = req.user._id; const fields = { importProgress: {}, }; // tslint:disable-next-line:max-line-length const podcast = await Podcast.findOneAndUpdate({ _id: await getSelectedPodcast(req.user._id) }, fields, { upsert: true, new: true }); res.json({ podcast }); }); router.post("/import-rss", auth.mustBeLoggedIn, async (req, res) => { const owner = req.user._id; const feedUrl = req.body.url; const publish = req.body.publish; const selectedPodcast = await getSelectedPodcast(req.user._id); request(feedUrl, (err, resp, data) => { if (err) { res.status(403); return; } parsePodcast(data, async (pErr, p) => { const podcast = { episodes: p.episodes.reverse(), ...p, }; if (err) { res.status(403); return; } else { // tslint:disable-next-line:no-console res.json({ podcast }).send(); } let errorFound = false; try { // tslint:disable-next-line:prefer-for-of for (let i = 0; i < podcast.episodes.length; i++) { // tslint:disable-next-line:no-shadowed-variable const podcastFields = { importProgress: { current: i + 1, total: podcast.episodes.length, status: "Importing episodes", done: false, }, }; // tslint:disable-next-line:max-line-length await Podcast.findOneAndUpdate({ _id: selectedPodcast }, podcastFields, { upsert: true, new: true }); const episode = podcast.episodes[i]; let publicFileUrl = null; if (episode.enclosure && episode.enclosure.url) { const file_url = episode.enclosure.url; const file_name = file_url.split("/").pop().split("%2F").pop(); publicFileUrl = await saveToStorage(file_url, file_name); } const currentPodcast = await Podcast.findOne({ _id: selectedPodcast }).exec(); const episodeFields: any = { podcast: currentPodcast._id, title: episode.title, audioUrl: publicFileUrl, summary: episode.description, fullContent: episode.description, audioDuration: 0, published: false, publishedAt: new Date(), guid: episode.guid, }; if (publish && episode.title && episode.description) { episodeFields.published = true; } try { const currentEpisode = await Episode.create(episodeFields); } catch (err) { // tslint:disable-next-line:no-console console.warn("ERROR", err); errorFound = true; const fields = { importProgress: { current: podcast.episodes.length, total: podcast.episodes.length, status: "Feed import failed", error: true, done: true, }, }; // tslint:disable-next-line:max-line-length await Podcast.findOneAndUpdate({ _id: selectedPodcast }, fields, { upsert: true, new: true }); } } } catch (err) { errorFound = true; const fields = { importProgress: { current: podcast.episodes.length, total: podcast.episodes.length, status: "Feed import failed", error: true, done: true, }, }; // tslint:disable-next-line:max-line-length await Podcast.findOneAndUpdate({ _id: selectedPodcast }, fields, { upsert: true, new: true }); } if (!errorFound) { const podcastFields = { importProgress: { current: podcast.episodes.length, total: podcast.episodes.length, status: "Feed imported successfully", done: true, }, }; // tslint:disable-next-line:max-line-length await Podcast.findOneAndUpdate({ _id: selectedPodcast }, podcastFields, { upsert: true, new: true }); } }); }); }); router.delete("/episode/:episode_id", auth.mustBeLoggedIn, async (req, res) => { const podcast = await Podcast.findOne({ _id: await getSelectedPodcast(req.user._id) }).exec(); if (!podcast) { return res.sendStatus(404); } const findQuery = { _id: Types.ObjectId(req.params.episode_id), podcast: podcast._id }; const episode = await Episode.findOne(findQuery).exec(); if (!episode) { return res.sendStatus(404); } if (episode.audioUrl) { try { await addStorageUsage(episode.audioUrl, req.user._id, true); await deleteFile(episode.audioUrl); } catch (e) { // tslint:disable-next-line:no-console console.warn("Failed to delete file", e); } } await episode.remove(); const updatedPodcast = await Podcast.findOne({ _id: await getSelectedPodcast(req.user._id) }).exec(); res.json({ podcast: updatedPodcast }); }); };
the_stack
import _ from 'lodash'; import { describe, beforeEach, it, expect } from '../../../../../test/lib/common'; import TimeSeries from 'app/core/time_series2'; import { convertToHeatMap, convertToCards, histogramToHeatmap, calculateBucketSize, isHeatmapDataEqual, } from '../heatmap_data_converter'; import { HeatmapData } from '../types'; describe('isHeatmapDataEqual', () => { const ctx: any = {}; beforeEach(() => { ctx.heatmapA = { '1422774000000': { x: 1422774000000, buckets: { '1': { y: 1, values: [1, 1.5] }, '2': { y: 2, values: [1] }, }, }, }; ctx.heatmapB = { '1422774000000': { x: 1422774000000, buckets: { '1': { y: 1, values: [1.5, 1] }, '2': { y: 2, values: [1] }, }, }, }; }); it('should proper compare objects', () => { const heatmapC = _.cloneDeep(ctx.heatmapA); heatmapC['1422774000000'].buckets['1'].values = [1, 1.5]; const heatmapD = _.cloneDeep(ctx.heatmapA); heatmapD['1422774000000'].buckets['1'].values = [1.5, 1, 1.6]; const heatmapE = _.cloneDeep(ctx.heatmapA); heatmapE['1422774000000'].buckets['1'].values = [1, 1.6]; const empty = {}; const emptyValues = _.cloneDeep(ctx.heatmapA); emptyValues['1422774000000'].buckets['1'].values = []; expect(isHeatmapDataEqual(ctx.heatmapA, ctx.heatmapB)).toBe(true); expect(isHeatmapDataEqual(ctx.heatmapB, ctx.heatmapA)).toBe(true); expect(isHeatmapDataEqual(ctx.heatmapA, heatmapC)).toBe(true); expect(isHeatmapDataEqual(heatmapC, ctx.heatmapA)).toBe(true); expect(isHeatmapDataEqual(ctx.heatmapA, heatmapD)).toBe(false); expect(isHeatmapDataEqual(heatmapD, ctx.heatmapA)).toBe(false); expect(isHeatmapDataEqual(ctx.heatmapA, heatmapE)).toBe(false); expect(isHeatmapDataEqual(heatmapE, ctx.heatmapA)).toBe(false); expect(isHeatmapDataEqual(empty, ctx.heatmapA)).toBe(false); expect(isHeatmapDataEqual(ctx.heatmapA, empty)).toBe(false); expect(isHeatmapDataEqual(emptyValues, ctx.heatmapA)).toBe(false); expect(isHeatmapDataEqual(ctx.heatmapA, emptyValues)).toBe(false); }); }); describe('calculateBucketSize', () => { const ctx: any = {}; describe('when logBase is 1 (linear scale)', () => { beforeEach(() => { ctx.logBase = 1; ctx.bounds_set = [ { bounds: [], size: 0 }, { bounds: [0], size: 0 }, { bounds: [4], size: 4 }, { bounds: [0, 1, 2, 3, 4], size: 1 }, { bounds: [0, 1, 3, 5, 7], size: 1 }, { bounds: [0, 3, 7, 9, 15], size: 2 }, { bounds: [0, 7, 3, 15, 9], size: 2 }, { bounds: [0, 5, 10, 15, 50], size: 5 }, ]; }); it('should properly calculate bucket size', () => { _.each(ctx.bounds_set, b => { const bucketSize = calculateBucketSize(b.bounds, ctx.logBase); expect(bucketSize).toBe(b.size); }); }); }); describe('when logBase is 2', () => { beforeEach(() => { ctx.logBase = 2; ctx.bounds_set = [ { bounds: [], size: 0 }, { bounds: [0], size: 0 }, { bounds: [4], size: 4 }, { bounds: [1, 2, 4, 8], size: 1 }, { bounds: [1, Math.SQRT2, 2, 8, 16], size: 0.5 }, ]; }); it('should properly calculate bucket size', () => { _.each(ctx.bounds_set, b => { const bucketSize = calculateBucketSize(b.bounds, ctx.logBase); expect(isEqual(bucketSize, b.size)).toBe(true); }); }); }); }); describe('HeatmapDataConverter', () => { const ctx: any = {}; beforeEach(() => { ctx.series = []; ctx.series.push( new TimeSeries({ datapoints: [ [1, 1422774000000], [1, 1422774000010], [2, 1422774060000], ], alias: 'series1', }) ); ctx.series.push( new TimeSeries({ datapoints: [ [2, 1422774000000], [2, 1422774000010], [3, 1422774060000], ], alias: 'series2', }) ); ctx.series.push( new TimeSeries({ datapoints: [ [5, 1422774000000], [3, 1422774000010], [4, 1422774060000], ], alias: 'series3', }) ); ctx.xBucketSize = 60000; // 60s ctx.yBucketSize = 2; ctx.logBase = 1; }); describe('when logBase is 1 (linear scale)', () => { beforeEach(() => { ctx.logBase = 1; }); it('should build proper heatmap data', () => { const expectedHeatmap = { '1422774000000': { x: 1422774000000, buckets: { '0': { y: 0, values: [1, 1], count: 2, bounds: { bottom: 0, top: 2 }, }, '2': { y: 2, values: [2, 2, 3], count: 3, bounds: { bottom: 2, top: 4 }, }, '4': { y: 4, values: [5], count: 1, bounds: { bottom: 4, top: 6 } }, }, }, '1422774060000': { x: 1422774060000, buckets: { '2': { y: 2, values: [2, 3], count: 3, bounds: { bottom: 2, top: 4 }, }, '4': { y: 4, values: [4], count: 1, bounds: { bottom: 4, top: 6 } }, }, }, }; const heatmap = convertToHeatMap(ctx.series, ctx.yBucketSize, ctx.xBucketSize, ctx.logBase); expect(isHeatmapDataEqual(heatmap, expectedHeatmap)).toBe(true); }); }); describe.skip('when logBase is 2', () => { beforeEach(() => { ctx.logBase = 2; }); it('should build proper heatmap data', () => { const expectedHeatmap = { '1422774000000': { x: 1422774000000, buckets: { '1': { y: 1, values: [1] }, '2': { y: 2, values: [2] }, }, }, '1422774060000': { x: 1422774060000, buckets: { '2': { y: 2, values: [2, 3] }, }, }, }; const heatmap = convertToHeatMap(ctx.series, ctx.yBucketSize, ctx.xBucketSize, ctx.logBase); expect(isHeatmapDataEqual(heatmap, expectedHeatmap)).toBe(true); }); }); }); describe('Histogram converter', () => { const ctx: any = {}; beforeEach(() => { ctx.series = []; ctx.series.push( new TimeSeries({ datapoints: [ [1, 1422774000000], [0, 1422774060000], ], alias: '1', label: '1', }) ); ctx.series.push( new TimeSeries({ datapoints: [ [5, 1422774000000], [3, 1422774060000], ], alias: '2', label: '2', }) ); ctx.series.push( new TimeSeries({ datapoints: [ [0, 1422774000000], [1, 1422774060000], ], alias: '3', label: '3', }) ); }); describe('when converting histogram', () => { beforeEach(() => {}); it('should build proper heatmap data', () => { const expectedHeatmap: HeatmapData = { '1422774000000': { x: 1422774000000, buckets: { '0': { y: 0, count: 1, bounds: { bottom: 0, top: null }, values: [], points: [], }, '1': { y: 1, count: 5, bounds: { bottom: 1, top: null }, values: [], points: [], }, '2': { y: 2, count: 0, bounds: { bottom: 2, top: null }, values: [], points: [], }, }, }, '1422774060000': { x: 1422774060000, buckets: { '0': { y: 0, count: 0, bounds: { bottom: 0, top: null }, values: [], points: [], }, '1': { y: 1, count: 3, bounds: { bottom: 1, top: null }, values: [], points: [], }, '2': { y: 2, count: 1, bounds: { bottom: 2, top: null }, values: [], points: [], }, }, }, }; const heatmap = histogramToHeatmap(ctx.series); expect(heatmap).toEqual(expectedHeatmap); }); it('should use bucket index as a bound', () => { const heatmap = histogramToHeatmap(ctx.series); const bucketLabels = _.map(heatmap['1422774000000'].buckets, (b, label) => label); const bucketYs = _.map(heatmap['1422774000000'].buckets, 'y'); const bucketBottoms = _.map(heatmap['1422774000000'].buckets, b => b.bounds.bottom); const expectedBounds = [0, 1, 2]; expect(bucketLabels).toEqual(_.map(expectedBounds, b => b.toString())); expect(bucketYs).toEqual(expectedBounds); expect(bucketBottoms).toEqual(expectedBounds); }); }); }); describe('convertToCards', () => { let buckets: HeatmapData = {}; beforeEach(() => { buckets = { '1422774000000': { x: 1422774000000, buckets: { '1': { y: 1, values: [1], count: 1, bounds: {} }, '2': { y: 2, values: [2], count: 1, bounds: {} }, }, }, '1422774060000': { x: 1422774060000, buckets: { '2': { y: 2, values: [2, 3], count: 2, bounds: {} }, }, }, }; }); it('should build proper cards data', () => { const expectedCards = [ { x: 1422774000000, y: 1, count: 1, values: [1], yBounds: {} }, { x: 1422774000000, y: 2, count: 1, values: [2], yBounds: {} }, { x: 1422774060000, y: 2, count: 2, values: [2, 3], yBounds: {} }, ]; const res = convertToCards(buckets); expect(res.cards).toMatchObject(expectedCards); }); it('should build proper cards stats', () => { const expectedStats = { min: 1, max: 2 }; const res = convertToCards(buckets); expect(res.cardStats).toMatchObject(expectedStats); }); }); /** * Compare two numbers with given precision. Suitable for compare float numbers after conversions with precision loss. * @param a * @param b * @param precision */ function isEqual(a: number, b: number, precision = 0.000001): boolean { if (a === b) { return true; } else { return Math.abs(1 - a / b) <= precision; } }
the_stack
/// <reference types="angular" /> declare module 'AngularFormly' { export = AngularFormly; } declare module 'angular-formly' { var angularFormlyDefaultExport: string; export = angularFormlyDefaultExport; } declare namespace AngularFormly { interface IFieldArray extends Array<IFieldConfigurationObject | IFieldGroup> { } interface IFieldGroup { data?: { [key: string]: any; } | undefined; className?: string | undefined; elementAttributes?: string | undefined; fieldGroup?: IFieldArray | undefined; form?: Object | undefined; hide?: boolean | undefined; hideExpression?: string | IExpressionFunction | undefined; key?: string | number | undefined; model?: string | { [key: string]: any; } | undefined; options?: IFormOptionsAPI | undefined; templateOptions?: ITemplateOptions | undefined; wrapper?: string | string[] | undefined; } interface IFormOptionsAPI { data?: { [key: string]: any; } | undefined; fieldTransform?: Function | Array<Function> | undefined; formState?: Object | undefined; removeChromeAutoComplete?: boolean | undefined; resetModel?: Function | undefined; templateManipulators?: ITemplateManipulators | undefined; updateInitialValue?: Function | undefined; wrapper?: string | string[] | undefined; } /** * see http://docs.angular-formly.com/docs/formly-expressions#expressionproperties-validators--messages */ interface IExpressionFunction { ($viewValue: any, $modelValue: any, scope: ITemplateScope): any; } interface IModelOptions { updateOn?: string | undefined; debounce?: number | undefined; allowInvalid?: boolean | undefined; getterSetter?: string | undefined; timezone?: string | undefined; } interface ITemplateManipulator { (template: string | HTMLElement, options: Object, scope: ITemplateScope): string | HTMLElement; } interface ITemplateManipulators { preWrapper?: ITemplateManipulator[] | undefined; postWrapper?: ITemplateManipulator[] | undefined; } interface ISelectOption { name?: string | undefined; value?: string | undefined; group?: string | undefined; [key: string]: any; } /** * see http://docs.angular-formly.com/docs/ngmodelattrstemplatemanipulator */ interface ITemplateOptions { // both attribute or regular attribute disabled?: boolean | undefined; maxlength?: number | undefined; minlength?: number | undefined; pattern?: string | undefined; required?: boolean | undefined; //attribute only max?: number | undefined; min?: number | undefined; placeholder?: number | string | undefined; tabindex?: number | undefined; type?: string | undefined; //expression types onBlur?: string | IExpressionFunction | undefined; onChange?: string | IExpressionFunction | undefined; onClick?: string | IExpressionFunction | undefined; onFocus?: string | IExpressionFunction | undefined; onKeydown?: string | IExpressionFunction | undefined; onKeypress?: string | IExpressionFunction | undefined; onKeyup?: string | IExpressionFunction | undefined; //Bootstrap types label?: string | undefined; description?: string | undefined; [key: string]: any; // types for select/radio fields options?: Array<ISelectOption> | undefined; groupProp?: string | undefined; // default: group valueProp?: string | undefined; // default: value labelProp?: string | undefined; // default: name } /** * see http://docs.angular-formly.com/docs/field-configuration-object#validators-object */ interface IValidator { expression: string | IExpressionFunction; message?: string | IExpressionFunction | undefined; } /** * An object which has at least two properties called expression and listener. The watch.expression * is added to the formly-form directive's scope (to allow it to run even when hide is true). You * can specify a type ($watchCollection or $watchGroup) via the watcher.type property (defaults to * $watch) and whether you want it to be a deep watch via the watcher.deep property (defaults to false). * * see http://docs.angular-formly.com/docs/field-configuration-object#watcher-objectarray-of-watches */ interface IWatcher { deep?: boolean | undefined; //Defaults to false expression?: string | { (field: IFieldRuntimeObject, scope: ITemplateScope): boolean } | undefined; listener: (field: IFieldRuntimeObject, newValue: any, oldValue: any, scope: ITemplateScope, stopWatching: Function) => void; type?: string | undefined; //Defaults to $watch but can be set to $watchCollection or $watchGroup } interface IFieldRuntimeObject extends IFieldObject { model: { [key: string]: any; }; } interface IFieldConfigurationObject extends IFieldObject { /** * By default, the model passed to the formly-field directive is the same as the model passed to the * formly-form. However, if the field has a model specified, then it is used for that field (and that * field only). In addition, a deep watch is added to the formly-field directive's scope to run the * expressionProperties when the specified model changes. * * Note, the formly-form directive will allow you to specify a string which is an (almost) formly * expression which allows you to define the model as relative to the scope of the form. * * see http://docs.angular-formly.com/docs/field-configuration-object#model-object--string */ model?: string | { [key: string]: any; } | undefined; } // see http://docs.angular-formly.com/docs/field-configuration-object interface IFieldObject { /** * Added in 6.18.0 * * Demo * see http://angular-formly.com/#/example/other/unique-value-async-validation */ asyncValidators?: { [key: string]: string | IExpressionFunction | IValidator; } | undefined; /** * This is a great way to add custom behavior to a specific field. It is injectable with the $scope of the * field, and anything else you have in your injector. * * see http://docs.angular-formly.com/docs/field-configuration-object#controller-controller-name-as-string--controller-f */ controller?: string | Function | undefined; /** * This is reserved for the developer. You have our guarantee to be able to use this and not worry about * future versions of formly overriding your usage and preventing you from upgrading :-) * * see http://docs.angular-formly.com/docs/field-configuration-object#data-object */ data?: { [key: string]: any; } | undefined; /** * Use defaultValue to initialize it the model. If this is provided and the value of the * model at compile-time is undefined, then the value of the model will be assigned to defaultValue. * * see http://docs.angular-formly.com/docs/field-configuration-object#defaultvalue-any */ defaultValue?: any; /** * You can specify your own class that will be applied to the formly-field directive (or ng-form of * a fieldGroup). * * see http://docs.angular-formly.com/docs/field-configuration-object#classname-string */ className?: string | undefined; elementAttributes?: { [key: string]: string; } | undefined; /** * An object where the key is a property to be set on the main field config and the value is an * expression used to assign that property. The value is a formly expressions. The returned value is * wrapped in $q.when so you can return a promise from your function :-) * * see http://docs.angular-formly.com/docs/field-configuration-object#expressionproperties-object */ expressionProperties?: { [key: string]: string | IExpressionFunction | IValidator; } | undefined; /** * Uses ng-if. Whether to hide the field. Defaults to false. If you wish this to be conditional, use * hideExpression. See below. * * see http://docs.angular-formly.com/docs/field-configuration-object#hide-boolean */ hide?: boolean | undefined; /** * This is similar to expressionProperties with a slight difference. You should (hopefully) never * notice the difference with the most common use case. This is available due to limitations with * expressionProperties and ng-if not working together very nicely. * * see http://docs.angular-formly.com/docs/field-configuration-object#hideexpression-string--function */ hideExpression?: string | IExpressionFunction | undefined; /** * This allows you to specify the id of your field (which will be used for its name as well unless * a name is provided). Note, you can also override the id generation code using the formlyConfig * extra called getFieldId. * * AVOID THIS * If you don't have to do this, don't. Specifying IDs makes it harder to re-use things and it's * just extra work. Part of the beauty that angular-formly provides is the fact that you don't need * to concern yourself with making sure that this is unique. * * see http://docs.angular-formly.com/docs/field-configuration-object#id-string */ id?: string | undefined; initialValue?: any; /** * Can be set instead of type or template to use a custom html template form field. Works * just like a directive templateUrl and uses the $templateCache * * see http://docs.angular-formly.com/docs/field-configuration-object#key-string */ key?: string | number | undefined; /** * This allows you to specify a link function. It is invoked after your template has finished compiling. * You are passed the normal arguments for a normal link function. * * see http://docs.angular-formly.com/docs/field-configuration-object#link-link-function */ link?: ng.IDirectiveLinkFn | undefined; /** * Allows you to take advantage of ng-model-options directive. Formly's built-in templateManipulator (see * below) will add this attribute to your ng-model element automatically if this property exists. Note, * if you use the getter/setter option, formly's templateManipulator will change the value of ng-model * to options.value which is a getterSetter that formly adds to field options. * * see http://docs.angular-formly.com/docs/field-configuration-object#modeloptions */ modelOptions?: IModelOptions | undefined; /** * If you wish to, you can specify a specific name for your ng-model. This is useful if you're posting * the form to a server using techniques of yester-year. * * AVOID THIS * If you don't have to do this, don't. It's just extra work. Part of the beauty that angular-formly * provides is the fact that you don't need to concern yourself with stuff like this. * * see http://docs.angular-formly.com/docs/field-configuration-object#name-string */ name?: string | undefined; /** * This is used by ngModelAttrsTemplateManipulator to automatically add attributes to the ng-model element * of field templates. You will likely not use this often. This object is a little complex, but extremely * powerful. It's best to explain this api via an example. For more information, see the guide on ngModelAttrs. * * see http://docs.angular-formly.com/docs/field-configuration-object#ngmodelattrs-object */ ngModelAttrs?: { attribute?: any; bound?: any; expression?: any; value?: any; [key: string]: any; } | undefined; /** * This allows you to place attributes with string values on the ng-model element. * Easy to use alternative to ngModelAttrs option. * * see http://docs.angular-formly.com/docs/field-configuration-object#ngmodelelattrs-object */ ngModelElAttrs?: { [key: string]: string; } | undefined; /** * Used to tell angular-formly to not attempt to add the formControl property to your object. This is useful * for things like validation, but not necessary if your "field" doesn't use ng-model (if it's just a horizontal * line for example). Defaults to undefined. * * see http://docs.angular-formly.com/docs/field-configuration-object#noformcontrol-boolean */ noFormControl?: boolean | undefined; /** * Allows you to specify extra types to get options from. Duplicate options are overridden in later priority * (index 1 will override index 0 properties). Also, these are applied after the type's defaultOptions and * hence will override any duplicates of those properties as well. * * see http://docs.angular-formly.com/docs/field-configuration-object#optionstypes-string--array-of-strings */ optionsTypes?: string | string[] | undefined; /** * Can be set instead of type or templateUrl to use a custom html * template form field. Recommended to be used with one-liners mostly * (like a directive), or if you're using webpack with the ability to require templates :-) * * If a function is passed, it is invoked with the field configuration object and can return * either a string for the template or a promise that resolves to a string. * * see http://docs.angular-formly.com/docs/field-configuration-object#template-string--function */ template?: string | { (fieldConfiguration: IFieldConfigurationObject): string | ng.IPromise<string> } | undefined; /** * Allows you to specify custom template manipulators for this specific field. (use defaultOptions in a * type configuration if you want it to apply to all fields of a certain type). * * see http://docs.angular-formly.com/docs/field-configuration-object#templatemanipulator-object-of-arrays-of-functions */ templateManipulators?: ITemplateManipulators | undefined; /** * This is reserved for the templates. Any template-specific options go in here. Look at your specific * template implementation to know the options required for this. * * see http://docs.angular-formly.com/docs/field-configuration-object#templateoptions-object */ templateOptions?: ITemplateOptions | undefined; /** * Can be set instead of type or template to use a custom html template form field. Works * just like a directive templateUrl and uses the $templateCache * * see http://docs.angular-formly.com/docs/field-configuration-object#templateurl-string--function */ templateUrl?: string | { (fieldConfiguration: IFieldConfigurationObject): string | ng.IPromise<string> } | undefined; /** * The type of field to be rendered. This is the recommended method * for defining fields. Types must be pre-defined using formlyConfig. * * see http://docs.angular-formly.com/docs/field-configuration-object#type-string */ type?: string | undefined; /** * An object with a few useful properties mostly handy when used in combination with ng-messages */ validation?: { /** * This is set by angular-formly. This is a boolean indicating whether an error message should be shown. Because * you generally only want to show error messages when the user has interacted with a specific field, this value * is set to true based on this rule: field invalid && (field touched || validation.show) (with slight difference * for pre-angular 1.3 because it doesn't have touched support). */ errorExistsAndShouldBeVisible?: boolean | undefined; /** * A map of Formly Expressions mapped to message names. This is really useful when you're using ng-messages * like in this example. */ messages?: { [key: string]: IExpressionFunction | string; } | undefined /** * A boolean you as the developer can set to specify to force options.validation.errorExistsAndShouldBeVisible * to be set to true when there are $errors. This is useful when you're trying to call the user's attention to * some fields for some reason. */ show?: boolean | undefined; } | undefined; /** * An object where the keys are the name of the validator and the values are Formly Expressions; * * Async Validation * All function validators can return true/false/Promise. A validator passes if it returns true or a promise * that is resolved. A validator fails if it returns false or a promise that is rejected. * * see http://docs.angular-formly.com/docs/field-configuration-object#validators-object */ validators?: { [key: string]: string | IExpressionFunction | IValidator; } | undefined; /** * This is a getter/setter function for the value that your field is representing. Useful when using getterSetter: true * in the modelOptions (in fact, if you don't disable the ngModelAttrsTemplateManipulator that comes built-in with formly, * it will automagically change your field's ng-model attribute to use options.value. * * see http://docs.angular-formly.com/docs/field-configuration-object#value-gettersetter-function */ value?(): any; //Getter value?(val: any): void; //Setter /** * An object which has at least two properties called expression and listener. The watch.expression is added * to the formly-form directive's scope (to allow it to run even when hide is true). You can specify a type * ($watchCollection or $watchGroup) via the watcher.type property (defaults to $watch) and whether you want * it to be a deep watch via the watcher.deep property (defaults to false). * * see http://docs.angular-formly.com/docs/field-configuration-object#watcher-objectarray-of-watches */ watcher?: IWatcher | IWatcher[] | undefined; /** * This makes reference to setWrapper in formlyConfig. It is expected to be the name of the wrapper. If * given an array, the formly field template will be wrapped by the first wrapper, then the second, then * the third, etc. You can also specify these as part of a type (which is the recommended approach). * Specifying this property will override the wrappers for the type for this field. * * http://docs.angular-formly.com/docs/field-configuration-object#wrapper-string--array-of-strings */ wrapper?: string | string[] | undefined; //ALL PROPERTIES BELOW ARE ADDED (So you should not be setting them yourself.) /** * This is the NgModelController for the field. It provides you with awesome stuff like $errors :-) * * see http://docs.angular-formly.com/docs/field-configuration-object#formcontrol-ngmodelcontroller */ formControl?: ng.IFormController | ng.IFormController[] | undefined; /** * Will reset the field's model and the field control to the last initialValue. This is used by the * formly-form's options.resetModel function. * * see http://docs.angular-formly.com/docs/field-configuration-object#resetmodel-function */ resetModel?: (() => void) | undefined; /** * It is not likely that you'll ever want to invoke this function. It simply runs the expressionProperties expressions. * It is used internally and you shouldn't have to use it, but you can if you want to, and any breaking changes to the * way it works will result in a major version change, so you can rely on its api. * * see http://docs.angular-formly.com/docs/field-configuration-object#runexpressions-function */ runExpressions?: (() => void) | undefined; /** * Will reset the field's initialValue to the current state of the model. Useful if you load the model asynchronously. * Invoke this when the model gets set. This is used by the formly-form's options.updateInitialValue function. * * see http://docs.angular-formly.com/docs/field-configuration-object#updateinitialvalue-function */ updateInitialValue?: (() => void) | undefined; } /** * * * see http://docs.angular-formly.com/docs/custom-templates#section-formlyconfig-settype-options */ interface ITypeOptions { apiCheck?: { [key: string]: Function } | undefined; apiCheckFunction?: string | undefined; //'throw' or 'warn apiCheckInstance?: any; apiCheckOptions?: Object | undefined; defaultOptions?: IFieldConfigurationObject | Function | undefined; controller?: Function | string | any[] | undefined; data?: { [key: string]: any; } | undefined; extends?: string | undefined; link?: ng.IDirectiveLinkFn | undefined; overwriteOk?: boolean | undefined; name: string; template?: Function | string | undefined; templateUrl?: Function | string | undefined; validateOptions?: Function | undefined; wrapper?: string | string[] | undefined; } interface IWrapperOptions { apiCheck?: { [key: string]: Function } | undefined; apiCheckFunction?: string | undefined; //'throw' or 'warn apiCheckInstance?: any; apiCheckOptions?: Object | undefined; overwriteOk?: boolean | undefined; name?: string | undefined; template?: string | undefined; templateUrl?: string | undefined; types?: string[] | undefined; validateOptions?: Function | undefined; } interface IFormlyConfigExtras { disableNgModelAttrsManipulator: boolean; apiCheckInstance: any; ngModelAttrsManipulatorPreferUnbound: boolean; removeChromeAutoComplete: boolean; defaultHideDirective: string; errorExistsAndShouldBeVisibleExpression: any; getFieldId: Function; fieldTransform: Function | Array<Function>; explicitAsync: boolean; } interface IFormlyConfig { disableWarnings: boolean; extras: IFormlyConfigExtras; setType(typeOptions: ITypeOptions): void; setWrapper(wrapperOptions: IWrapperOptions | Array<IWrapperOptions>): void; templateManipulators: ITemplateManipulators; } interface ITemplateScopeOptions { formControl: ng.IFormController | ng.IFormController[]; templateOptions: ITemplateOptions; validation: Object; } /** * see http://docs.angular-formly.com/docs/custom-templates#templates-scope */ interface ITemplateScope { options: ITemplateScopeOptions; //Shortcut to options.formControl fc: ng.IFormController | ng.IFormController[]; //all the fields for the form fields: IFieldArray; //the form controller the field is in form: any; //The object passed as options.formState to the formly-form directive. Use this to share state between fields. formState: Object; //The id of the field. You shouldn't have to use this. id: string; //The index of the field the form is on (in ng-repeat) index: number; //the model of the form (or the model specified by the field if it was specified). model?: { [key: string]: any; } | undefined; //Shortcut to options.validation.errorExistsAndShouldBeVisible showError: boolean; //Shortcut to options.templateOptions to: ITemplateOptions; } /** * see http://docs.angular-formly.com/docs/formlyvalidationmessages#addtemplateoptionvaluemessage */ interface IValidationMessages { addTemplateOptionValueMessage(name: string, prop: string, prefix: string, suffix: string, alternate: string): void; addStringMessage(name: string, string: string): void; messages: { [key: string]: ($viewValue: any, $modelValue: any, scope: ITemplateScope) => string }; } }
the_stack
import { createAttributeBuffers, ElementsBuffer, AttributeKind } from './buffer'; import { createTextures, Texture, Textures } from './texture'; import { WebGLContext, checkError } from './context'; import { ShaderCode, DefineValues } from '../shader-code'; import { Program } from './program'; import { RenderableSchema, RenderableValues, AttributeSpec, getValueVersions, splitValues, DefineSpec } from '../renderable/schema'; import { idFactory } from '../../mol-util/id-factory'; import { ValueCell } from '../../mol-util'; import { TextureImage, TextureVolume } from '../../mol-gl/renderable/util'; import { checkFramebufferStatus } from './framebuffer'; import { isDebugMode } from '../../mol-util/debug'; import { VertexArray } from './vertex-array'; import { fillSerial } from '../../mol-util/array'; import { deepClone } from '../../mol-util/object'; import { cloneUniformValues } from './uniform'; const getNextRenderItemId = idFactory(); export type DrawMode = 'points' | 'lines' | 'line-strip' | 'line-loop' | 'triangles' | 'triangle-strip' | 'triangle-fan' export function getDrawMode(ctx: WebGLContext, drawMode: DrawMode) { const { gl } = ctx; switch (drawMode) { case 'points': return gl.POINTS; case 'lines': return gl.LINES; case 'line-strip': return gl.LINE_STRIP; case 'line-loop': return gl.LINE_LOOP; case 'triangles': return gl.TRIANGLES; case 'triangle-strip': return gl.TRIANGLE_STRIP; case 'triangle-fan': return gl.TRIANGLE_FAN; } } export interface RenderItem<T extends string> { readonly id: number readonly materialId: number getProgram: (variant: T) => Program render: (variant: T, sharedTexturesList?: Textures) => void update: () => Readonly<ValueChanges> destroy: () => void } // const GraphicsRenderVariant = { 'colorBlended': '', 'colorWboit': '', 'pickObject': '', 'pickInstance': '', 'pickGroup': '', 'depth': '', 'markingDepth': '', 'markingMask': '' }; export type GraphicsRenderVariant = keyof typeof GraphicsRenderVariant const GraphicsRenderVariants = Object.keys(GraphicsRenderVariant) as GraphicsRenderVariant[]; const ComputeRenderVariant = { 'compute': '' }; export type ComputeRenderVariant = keyof typeof ComputeRenderVariant const ComputeRenderVariants = Object.keys(ComputeRenderVariant) as ComputeRenderVariant[]; function createProgramVariant(ctx: WebGLContext, variant: string, defineValues: DefineValues, shaderCode: ShaderCode, schema: RenderableSchema) { defineValues = { ...defineValues, dRenderVariant: ValueCell.create(variant) }; if (schema.dRenderVariant === undefined) { Object.defineProperty(schema, 'dRenderVariant', { value: DefineSpec('string') }); } return ctx.resources.program(defineValues, shaderCode, schema); } // type ProgramVariants = { [k: string]: Program } type VertexArrayVariants = { [k: string]: VertexArray | null } interface ValueChanges { attributes: boolean defines: boolean elements: boolean textures: boolean } function createValueChanges() { return { attributes: false, defines: false, elements: false, textures: false, }; } function resetValueChanges(valueChanges: ValueChanges) { valueChanges.attributes = false; valueChanges.defines = false; valueChanges.elements = false; valueChanges.textures = false; } // export type GraphicsRenderItem = RenderItem<GraphicsRenderVariant> export function createGraphicsRenderItem(ctx: WebGLContext, drawMode: DrawMode, shaderCode: ShaderCode, schema: RenderableSchema, values: RenderableValues, materialId: number) { return createRenderItem(ctx, drawMode, shaderCode, schema, values, materialId, GraphicsRenderVariants); } export type ComputeRenderItem = RenderItem<ComputeRenderVariant> export function createComputeRenderItem(ctx: WebGLContext, drawMode: DrawMode, shaderCode: ShaderCode, schema: RenderableSchema, values: RenderableValues, materialId = -1) { return createRenderItem(ctx, drawMode, shaderCode, schema, values, materialId, ComputeRenderVariants); } /** * Creates a render item * * - assumes that `values.drawCount` and `values.instanceCount` exist */ export function createRenderItem<T extends string>(ctx: WebGLContext, drawMode: DrawMode, shaderCode: ShaderCode, schema: RenderableSchema, values: RenderableValues, materialId: number, renderVariants: T[]): RenderItem<T> { const id = getNextRenderItemId(); const { stats, state, resources } = ctx; const { instancedArrays, vertexArrayObject } = ctx.extensions; // emulate gl_VertexID when needed // if (!ctx.isWebGL2 && values.uVertexCount) { // not using gl_VertexID in WebGL2 but aVertex to ensure there is an active attribute with divisor 0 // since FF 85 this is not needed anymore but lets keep it for backwards compatibility // https://bugzilla.mozilla.org/show_bug.cgi?id=1679693 // see also note in src/mol-gl/shader/chunks/common-vert-params.glsl.ts if (values.uVertexCount) { const vertexCount = values.uVertexCount.ref.value; (values as any).aVertex = ValueCell.create(fillSerial(new Float32Array(vertexCount))); (schema as any).aVertex = AttributeSpec('float32', 1, 0); } const { attributeValues, defineValues, textureValues, uniformValues, materialUniformValues, bufferedUniformValues } = splitValues(schema, values); const uniformValueEntries = Object.entries(uniformValues); const materialUniformValueEntries = Object.entries(materialUniformValues); const backBufferUniformValueEntries = Object.entries(bufferedUniformValues); const frontBufferUniformValueEntries = Object.entries(cloneUniformValues(bufferedUniformValues)); const defineValueEntries = Object.entries(defineValues); const versions = getValueVersions(values); const glDrawMode = getDrawMode(ctx, drawMode); const programs: ProgramVariants = {}; for (const k of renderVariants) { programs[k] = createProgramVariant(ctx, k, defineValues, shaderCode, schema); } const textures = createTextures(ctx, schema, textureValues); const attributeBuffers = createAttributeBuffers(ctx, schema, attributeValues); let elementsBuffer: ElementsBuffer | undefined; const elements = values.elements; if (elements && elements.ref.value) { elementsBuffer = resources.elements(elements.ref.value); } const vertexArrays: VertexArrayVariants = {}; for (const k of renderVariants) { vertexArrays[k] = vertexArrayObject ? resources.vertexArray(programs[k], attributeBuffers, elementsBuffer) : null; } let drawCount = values.drawCount.ref.value; let instanceCount = values.instanceCount.ref.value; stats.drawCount += drawCount; stats.instanceCount += instanceCount; stats.instancedDrawCount += instanceCount * drawCount; const valueChanges = createValueChanges(); let destroyed = false; let currentProgramId = -1; return { id, materialId, getProgram: (variant: T) => programs[variant], render: (variant: T, sharedTexturesList?: Textures) => { if (drawCount === 0 || instanceCount === 0 || ctx.isContextLost) return; const program = programs[variant]; if (program.id === currentProgramId && state.currentRenderItemId === id) { program.setUniforms(uniformValueEntries); if (sharedTexturesList && sharedTexturesList.length > 0) { program.bindTextures(sharedTexturesList, 0); program.bindTextures(textures, sharedTexturesList.length); } else { program.bindTextures(textures, 0); } } else { const vertexArray = vertexArrays[variant]; if (program.id !== state.currentProgramId || program.id !== currentProgramId || materialId === -1 || materialId !== state.currentMaterialId ) { // console.log('program.id changed or materialId changed/-1', materialId) if (program.id !== state.currentProgramId) program.use(); program.setUniforms(materialUniformValueEntries); state.currentMaterialId = materialId; currentProgramId = program.id; } program.setUniforms(uniformValueEntries); program.setUniforms(frontBufferUniformValueEntries); if (sharedTexturesList && sharedTexturesList.length > 0) { program.bindTextures(sharedTexturesList, 0); program.bindTextures(textures, sharedTexturesList.length); } else { program.bindTextures(textures, 0); } if (vertexArray) { vertexArray.bind(); // need to bind elements buffer explicitly since it is not always recorded in the VAO if (elementsBuffer) elementsBuffer.bind(); } else { if (elementsBuffer) elementsBuffer.bind(); program.bindAttributes(attributeBuffers); } state.currentRenderItemId = id; } if (isDebugMode) { try { checkFramebufferStatus(ctx.gl); } catch (e) { throw new Error(`Framebuffer error rendering item id ${id}: '${e}'`); } } if (elementsBuffer) { instancedArrays.drawElementsInstanced(glDrawMode, drawCount, elementsBuffer._dataType, 0, instanceCount); } else { instancedArrays.drawArraysInstanced(glDrawMode, 0, drawCount, instanceCount); } if (isDebugMode) { try { checkError(ctx.gl); } catch (e) { throw new Error(`Draw error rendering item id ${id}: '${e}'`); } } }, update: () => { resetValueChanges(valueChanges); if (values.aVertex) { const vertexCount = values.uVertexCount.ref.value; if (values.aVertex.ref.value.length < vertexCount) { ValueCell.update(values.aVertex, fillSerial(new Float32Array(vertexCount))); } } for (let i = 0, il = defineValueEntries.length; i < il; ++i) { const [k, value] = defineValueEntries[i]; if (value.ref.version !== versions[k]) { // console.log('define version changed', k); valueChanges.defines = true; versions[k] = value.ref.version; } } if (valueChanges.defines) { // console.log('some defines changed, need to rebuild programs'); for (const k of renderVariants) { programs[k].destroy(); programs[k] = createProgramVariant(ctx, k, defineValues, shaderCode, schema); } } if (values.drawCount.ref.version !== versions.drawCount) { // console.log('drawCount version changed'); stats.drawCount += values.drawCount.ref.value - drawCount; stats.instancedDrawCount += instanceCount * values.drawCount.ref.value - instanceCount * drawCount; drawCount = values.drawCount.ref.value; versions.drawCount = values.drawCount.ref.version; } if (values.instanceCount.ref.version !== versions.instanceCount) { // console.log('instanceCount version changed'); stats.instanceCount += values.instanceCount.ref.value - instanceCount; stats.instancedDrawCount += values.instanceCount.ref.value * drawCount - instanceCount * drawCount; instanceCount = values.instanceCount.ref.value; versions.instanceCount = values.instanceCount.ref.version; } for (let i = 0, il = attributeBuffers.length; i < il; ++i) { const [k, buffer] = attributeBuffers[i]; const value = attributeValues[k]; if (value.ref.version !== versions[k]) { if (buffer.length >= value.ref.value.length) { // console.log('attribute array large enough to update', buffer.id, k, value.ref.id, value.ref.version); buffer.updateSubData(value.ref.value, 0, buffer.length); } else { // console.log('attribute array too small, need to create new attribute', buffer.id, k, value.ref.id, value.ref.version); buffer.destroy(); const { itemSize, divisor } = schema[k] as AttributeSpec<AttributeKind>; attributeBuffers[i][1] = resources.attribute(value.ref.value, itemSize, divisor); valueChanges.attributes = true; } versions[k] = value.ref.version; } } if (elementsBuffer && values.elements.ref.version !== versions.elements) { if (elementsBuffer.length >= values.elements.ref.value.length) { // console.log('elements array large enough to update', values.elements.ref.id, values.elements.ref.version); elementsBuffer.updateSubData(values.elements.ref.value, 0, elementsBuffer.length); } else { // console.log('elements array to small, need to create new elements', values.elements.ref.id, values.elements.ref.version); elementsBuffer.destroy(); elementsBuffer = resources.elements(values.elements.ref.value); valueChanges.elements = true; } versions.elements = values.elements.ref.version; } if (valueChanges.attributes || valueChanges.defines || valueChanges.elements) { // console.log('program/defines or buffers changed, update vaos'); for (const k of renderVariants) { const vertexArray = vertexArrays[k]; if (vertexArray) vertexArray.destroy(); vertexArrays[k] = vertexArrayObject ? resources.vertexArray(programs[k], attributeBuffers, elementsBuffer) : null; } } for (let i = 0, il = textures.length; i < il; ++i) { const [k, texture] = textures[i]; const value = textureValues[k]; if (value.ref.version !== versions[k]) { // update of textures with kind 'texture' is done externally if (schema[k].kind !== 'texture') { // console.log('texture version changed, uploading image', k); texture.load(value.ref.value as TextureImage<any> | TextureVolume<any>); valueChanges.textures = true; } else { textures[i][1] = value.ref.value as Texture; } versions[k] = value.ref.version; } } for (let i = 0, il = backBufferUniformValueEntries.length; i < il; ++i) { const [k, uniform] = backBufferUniformValueEntries[i]; if (uniform.ref.version !== versions[k]) { // console.log('back-buffer uniform version changed, updating front-buffer', k); ValueCell.update(frontBufferUniformValueEntries[i][1], deepClone(uniform.ref.value)); versions[k] = uniform.ref.version; } } return valueChanges; }, destroy: () => { if (!destroyed) { for (const k of renderVariants) { programs[k].destroy(); const vertexArray = vertexArrays[k]; if (vertexArray) vertexArray.destroy(); } textures.forEach(([k, texture]) => { // lifetime of textures with kind 'texture' is defined externally if (schema[k].kind !== 'texture') { texture.destroy(); } }); attributeBuffers.forEach(([_, buffer]) => buffer.destroy()); if (elementsBuffer) elementsBuffer.destroy(); stats.drawCount -= drawCount; stats.instanceCount -= instanceCount; stats.instancedDrawCount -= instanceCount * drawCount; destroyed = true; } } }; }
the_stack
import { ErrorHandler, LogHandler, LogLevel, LoggerFacade } from '@optimizely/js-sdk-logging'; import { EventProcessor } from '@optimizely/js-sdk-event-processor'; import { NotificationCenter } from '@optimizely/js-sdk-utils'; import { ProjectConfig } from './core/project_config'; export interface BucketerParams { experimentId: string; experimentKey: string; userId: string; trafficAllocationConfig: TrafficAllocation[]; experimentKeyMap: { [key: string]: Experiment }; experimentIdMap: { [id: string]: Experiment }; groupIdMap: { [key: string]: Group }; variationIdMap: { [id: string]: Variation }; logger: LogHandler; bucketingId: string; } export interface DecisionResponse<T> { readonly result: T; readonly reasons: (string | number)[][]; } export type UserAttributes = { // TODO[OASIS-6649]: Don't use any type // eslint-disable-next-line @typescript-eslint/no-explicit-any [name: string]: any; } export interface ExperimentBucketMap { [experiment_id: string]: { variation_id: string } } // Information about past bucketing decisions for a user. export interface UserProfile { user_id: string; experiment_bucket_map: ExperimentBucketMap; } export type EventTags = { [key: string]: string | number | null; }; export interface UserProfileService { lookup(userId: string): UserProfile; save(profile: UserProfile): void; } export interface DatafileManagerConfig { sdkKey: string, datafile?: string; } export interface DatafileOptions { autoUpdate?: boolean; updateInterval?: number; urlTemplate?: string; datafileAccessToken?: string; } export interface ListenerPayload { userId: string; attributes?: UserAttributes; } export type NotificationListener<T extends ListenerPayload> = (notificationData: T) => void; // An event to be submitted to Optimizely, enabling tracking the reach and impact of // tests and feature rollouts. export interface Event { // URL to which to send the HTTP request. url: string; // HTTP method with which to send the event. httpVerb: 'POST'; // Value to send in the request body, JSON-serialized. // TODO[OASIS-6649]: Don't use any type // eslint-disable-next-line @typescript-eslint/no-explicit-any params: any; } export interface EventDispatcher { /** * @param event * Event being submitted for eventual dispatch. * @param callback * After the event has at least been queued for dispatch, call this function to return * control back to the Client. */ dispatchEvent: (event: Event, callback: (response: { statusCode: number; }) => void) => void; } export interface VariationVariable { id: string; value: string; } export interface Variation { id: string; key: string; featureEnabled?: boolean; variablesMap: OptimizelyVariablesMap; variables?: VariationVariable[]; } export interface Experiment { id: string; key: string; variations: Variation[]; variationKeyMap: { [key: string]: Variation }; groupId?: string; layerId: string; status: string; audienceConditions: Array<string | string[]>; audienceIds: string[]; trafficAllocation: TrafficAllocation[]; forcedVariations?: { [key: string]: string }; } export enum VariableType { BOOLEAN = 'boolean', DOUBLE = 'double', INTEGER = 'integer', STRING = 'string', JSON = 'json', } export interface FeatureVariable { type: VariableType; key: string; id: string; defaultValue: string; subType?: string; } export interface FeatureFlag { rolloutId: string; key: string; id: string; experimentIds: string[], variables: FeatureVariable[], variableKeyMap: { [key: string]: FeatureVariable } groupId?: string; } export type Condition = { name: string; type: string; match?: string; value: string | number | boolean | null; } export interface Audience { id: string; name: string; conditions: unknown[] | string; } export interface TrafficAllocation { entityId: string; endOfRange: number; } export interface Group { id: string; policy: string; trafficAllocation: TrafficAllocation[]; experiments: Experiment[]; } export interface TrafficAllocation { entityId: string; endOfRange: number; } export interface Group { id: string; policy: string; trafficAllocation: TrafficAllocation[]; experiments: Experiment[]; } export interface FeatureKeyMap { [key: string]: FeatureFlag } export interface OnReadyResult { success: boolean; reason?: string; } export type ObjectWithUnknownProperties = { [key: string]: unknown; } export interface Rollout { id: string; experiments: Experiment[]; } //TODO: Move OptimizelyDecideOption to @optimizely/optimizely-sdk/lib/utils/enums export enum OptimizelyDecideOption { DISABLE_DECISION_EVENT = 'DISABLE_DECISION_EVENT', ENABLED_FLAGS_ONLY = 'ENABLED_FLAGS_ONLY', IGNORE_USER_PROFILE_SERVICE = 'IGNORE_USER_PROFILE_SERVICE', INCLUDE_REASONS = 'INCLUDE_REASONS', EXCLUDE_VARIABLES = 'EXCLUDE_VARIABLES' } /** * options required to create optimizely object */ export interface OptimizelyOptions { UNSTABLE_conditionEvaluators?: unknown; clientEngine: string; clientVersion?: string; datafile?: string; datafileManager?: DatafileManager; errorHandler: ErrorHandler; eventProcessor: EventProcessor; isValidInstance: boolean; jsonSchemaValidator?: { validate(jsonObject: unknown): boolean, }; logger: LoggerFacade; sdkKey?: string; userProfileService?: UserProfileService | null; defaultDecideOptions?: OptimizelyDecideOption[]; notificationCenter: NotificationCenter; } /** * Optimizely Config Entities */ export interface OptimizelyExperiment { id: string; key: string; audiences: string; variationsMap: { [variationKey: string]: OptimizelyVariation; }; } export interface OptimizelyVariable { id: string; key: string; type: string; value: string; } /** * Entry level Config Entities */ export interface SDKOptions { // Datafile string datafile?: string; // options for Datafile Manager datafileOptions?: DatafileOptions; // errorHandler object for logging error errorHandler?: ErrorHandler; // limit of events to dispatch in a batch eventBatchSize?: number; // event dispatcher function eventDispatcher?: EventDispatcher; // maximum time for an event to stay in the queue eventFlushInterval?: number; // maximum size for the event queue eventMaxQueueSize?: number; // flag to validate if this instance is valid isValidInstance: boolean; // level of logging i.e debug, info, error, warning etc logLevel?: LogLevel | string; // LogHandler object for logging logger?: LogHandler; // sdk key sdkKey?: string; // user profile that contains user information userProfileService?: UserProfileService; // dafault options for decide API defaultDecideOptions?: OptimizelyDecideOption[]; } export type OptimizelyExperimentsMap = { [experimentKey: string]: OptimizelyExperiment; } export type OptimizelyVariablesMap = { [variableKey: string]: OptimizelyVariable; } export type OptimizelyFeaturesMap = { [featureKey: string]: OptimizelyFeature; } export type OptimizelyAttribute = { id: string; key: string; }; export type OptimizelyAudience = { id: string; name: string; conditions: string; }; export type OptimizelyEvent = { id: string; key: string; experimentsIds: string[]; }; export interface OptimizelyFeature { id: string; key: string; experimentRules: OptimizelyExperiment[]; deliveryRules: OptimizelyExperiment[]; variablesMap: OptimizelyVariablesMap; /** * @deprecated Use experimentRules and deliveryRules */ experimentsMap: OptimizelyExperimentsMap; } export interface OptimizelyVariation { id: string; key: string; featureEnabled?: boolean; variablesMap: OptimizelyVariablesMap; } export interface OptimizelyConfig { environmentKey: string; sdkKey: string; revision: string; /** * This experimentsMap is for experiments of legacy projects only. * For flag projects, experiment keys are not guaranteed to be unique * across multiple flags, so this map may not include all experiments * when keys conflict. */ experimentsMap: OptimizelyExperimentsMap; featuresMap: OptimizelyFeaturesMap; attributes: OptimizelyAttribute[]; audiences: OptimizelyAudience[]; events: OptimizelyEvent[]; getDatafile(): string; } export interface OptimizelyUserContext { getUserId(): string; getAttributes(): UserAttributes; setAttribute(key: string, value: unknown): void; decide( key: string, options: OptimizelyDecideOption[] ): OptimizelyDecision; decideForKeys( keys: string[], options: OptimizelyDecideOption[], ): { [key: string]: OptimizelyDecision }; decideAll( options: OptimizelyDecideOption[], ): { [key: string]: OptimizelyDecision }; trackEvent(eventName: string, eventTags?: EventTags): void; findValidatedForcedDecision( config: ProjectConfig, flagKey: string, ruleKey?: string ): DecisionResponse<Variation | null>; setForcedDecision(context: OptimizelyDecisionContext, decision: OptimizelyForcedDecision): boolean; getForcedDecision(context: OptimizelyDecisionContext): OptimizelyForcedDecision | null; removeForcedDecision(context: OptimizelyDecisionContext): boolean; removeAllForcedDecisions(): boolean; } export interface OptimizelyDecision { variationKey: string | null; // The boolean value indicating if the flag is enabled or not enabled: boolean; // The collection of variables associated with the decision variables: { [variableKey: string]: unknown }; // The rule key of the decision ruleKey: string | null; // The flag key for which the decision has been made for flagKey: string; // A copy of the user context for which the decision has been made for userContext: OptimizelyUserContext; // An array of error/info messages describing why the decision has been made. reasons: string[]; } export interface DatafileUpdate { datafile: string; } export interface DatafileUpdateListener { (datafileUpdate: DatafileUpdate): void; } // TODO: Replace this with the one from js-sdk-models interface Managed { start(): void; stop(): Promise<unknown>; } export interface DatafileManager extends Managed { get: () => string; on(eventName: string, listener: DatafileUpdateListener): () => void; onReady: () => Promise<void>; } export interface OptimizelyDecisionContext { flagKey: string; ruleKey?: string; } export interface OptimizelyForcedDecision { variationKey: string; }
the_stack
import Client from '../client' import Event from '../event' import Session from '../session' import breadcrumbTypes from '../lib/breadcrumb-types' import { BreadcrumbType } from '../types/common' const noop = () => {} const id = <T>(a: T) => a describe('@bugsnag/core/client', () => { describe('constructor', () => { it('can handle bad input', () => { // @ts-ignore expect(() => new Client()).toThrow() // @ts-ignore expect(() => new Client('foo')).toThrow() }) }) describe('configure()', () => { it('handles bad/good input', () => { expect(() => { // no opts supplied // @ts-ignore const client = new Client({}) expect(client).toBe(client) }).toThrow() // bare minimum opts supplied const client = new Client({ apiKey: 'API_KEY_YEAH' }) expect(client._config.apiKey).toBe('API_KEY_YEAH') }) it('extends partial options', () => { const client = new Client({ apiKey: 'API_KEY', enabledErrorTypes: { unhandledExceptions: false } }) expect(client._config.enabledErrorTypes).toEqual({ unhandledExceptions: false, unhandledRejections: true }) }) it('warns with a valid but incorrect-looking api key', () => { const logger = { debug: jest.fn(), warn: jest.fn(), info: jest.fn(), error: jest.fn() } const client = new Client({ apiKey: 'API_KEY', logger }) expect(client).toBeTruthy() expect(logger.warn.mock.calls.length).toBe(1) expect(logger.warn.mock.calls[0][0].message).toBe('Invalid configuration\n - apiKey should be a string of 32 hexadecimal characters, got "API_KEY"') }) it('does not warn with a valid api key', () => { const logger = { debug: jest.fn(), warn: jest.fn(), info: jest.fn(), error: jest.fn() } const client = new Client({ apiKey: '123456abcdef123456abcdef123456ab', logger }) expect(client).toBeTruthy() expect(logger.warn.mock.calls.length).toBe(0) }) }) describe('use()', () => { it('supports plugins', done => { let pluginClient const client = new Client({ apiKey: '123', plugins: [{ name: 'test plugin', load: (c) => { pluginClient = c done() } }] }) expect(pluginClient).toEqual(client) }) }) describe('logger()', () => { it('can supply a different logger', done => { const client = new Client({ apiKey: 'API_KEY_YEAH' }) const log = (msg: any) => { expect(msg).toBeTruthy() done() } client._logger = { debug: log, info: log, warn: log, error: log } client._logger.debug('hey') }) it('can supply a different logger via config', done => { const log = (msg: any) => { expect(msg).toBeTruthy() done() } const client = new Client({ apiKey: 'API_KEY_YEAH', logger: { debug: log, info: log, warn: log, error: log } }) client._logger.debug('hey') }) it('is ok with a null logger', () => { const client = new Client({ apiKey: 'API_KEY_YEAH', logger: null }) expect(() => { client._logger.debug('hey') }).not.toThrow() }) }) describe('notify()', () => { it('delivers an error event', done => { const client = new Client({ apiKey: 'API_KEY_YEAH' }) client._setDelivery(client => ({ sendEvent: payload => { expect(payload).toBeTruthy() expect(Array.isArray(payload.events)).toBe(true) const event = payload.events[0].toJSON() expect(event.severity).toBe('warning') expect(event.severityReason).toEqual({ type: 'handledException' }) process.nextTick(() => done()) }, sendSession: () => {} })) client.notify(new Error('oh em gee')) }) it('supports setting severity via callback', done => { const client = new Client({ apiKey: 'API_KEY_YEAH' }) client._setDelivery(client => ({ sendEvent: (payload) => { expect(payload).toBeTruthy() expect(Array.isArray(payload.events)).toBe(true) const event = payload.events[0].toJSON() expect(event.severity).toBe('info') expect(event.severityReason).toEqual({ type: 'userCallbackSetSeverity' }) done() }, sendSession: () => {} })) client.notify(new Error('oh em gee'), event => { event.severity = 'info' }) }) it('supports setting unhandled via callback', done => { const client = new Client({ apiKey: 'API_KEY_YEAH' }) const session = new Session() // @ts-ignore client._session = session client._setDelivery(client => ({ sendEvent: (payload) => { expect(payload).toBeTruthy() expect(Array.isArray(payload.events)).toBe(true) const event = payload.events[0].toJSON() expect(event.unhandled).toBe(true) expect(event.severityReason).toEqual({ type: 'handledException', unhandledOverridden: true }) expect(event.session).toEqual(session) expect((event.session as Session)._handled).toBe(0) expect((event.session as Session)._unhandled).toBe(1) done() }, sendSession: () => {} })) client.notify(new Error('oh em gee'), event => { event.unhandled = true }) }) // eslint-disable-next-line jest/expect-expect it('supports preventing send by returning false in onError callback', done => { const client = new Client({ apiKey: 'API_KEY_YEAH', onError: () => false }) client._setDelivery(client => ({ sendEvent: (payload) => { done('sendEvent() should not be called') }, sendSession: () => {} })) client.notify(new Error('oh em gee')) // give the event loop a tick to see if the event gets sent process.nextTick(() => done()) }) // eslint-disable-next-line jest/expect-expect it('supports preventing send by returning a Promise that resolves to false in onError callback', done => { const client = new Client({ apiKey: 'API_KEY_YEAH', onError: () => Promise.resolve(false) }) client._setDelivery(client => ({ sendEvent: (payload) => { done('sendEvent() should not be called') }, sendSession: () => {} })) client.notify(new Error('oh em gee'), () => {}, () => { // give the event loop a tick to see if the event gets sent process.nextTick(() => done()) }) }) // eslint-disable-next-line jest/expect-expect it('supports preventing send by returning false in notify callback', done => { const client = new Client({ apiKey: 'API_KEY_YEAH' }) client._setDelivery(client => ({ sendEvent: (payload) => { done('sendEvent() should not be called') }, sendSession: () => {} })) client.notify(new Error('oh em gee'), () => false) // give the event loop a tick to see if the event gets sent process.nextTick(() => done()) }) it('tolerates errors in callbacks', done => { expect.assertions(2) const onErrorSpy = jest.fn() const client = new Client({ apiKey: 'API_KEY_YEAH', onError: [ event => { throw new Error('Ooops') }, onErrorSpy ] }) client._setDelivery(client => ({ sendEvent: (payload) => { expect(payload.events[0].errors[0].errorMessage).toBe('oh no!') expect(onErrorSpy).toHaveBeenCalledTimes(1) done() }, sendSession: () => {} })) client.notify(new Error('oh no!')) }) // eslint-disable-next-line jest/expect-expect it('supports preventing send with enabledReleaseStages', done => { const client = new Client({ apiKey: 'API_KEY_YEAH', enabledReleaseStages: ['qa'] }) client._setDelivery(client => ({ sendEvent: (payload) => { done('sendEvent() should not be called') }, sendSession: () => {} })) client.notify(new Error('oh em eff gee')) // give the event loop a tick to see if the event gets sent process.nextTick(() => done()) }) // eslint-disable-next-line jest/expect-expect it('supports setting releaseStage via config.releaseStage', done => { const client = new Client({ apiKey: 'API_KEY_YEAH', releaseStage: 'staging', enabledReleaseStages: ['production'] }) client._setDelivery(client => ({ sendEvent: (payload) => { done('sendEvent() should not be called') }, sendSession: () => {} })) client.notify(new Error('oh em eff gee')) // give the event loop a tick to see if the event gets sent process.nextTick(() => done()) }) it('includes releaseStage in event.app', done => { const client = new Client({ apiKey: 'API_KEY_YEAH', enabledReleaseStages: ['staging'], releaseStage: 'staging' }) client._setDelivery(client => ({ sendEvent: (payload) => { expect(payload.events[0].app.releaseStage).toBe('staging') done() }, sendSession: () => {} })) client.notify(new Error('oh em eff gee')) }) it('populates app.version if config.appVersion is supplied', done => { const client = new Client({ apiKey: 'API_KEY_YEAH', appVersion: '1.2.3' }) client._setDelivery(client => ({ sendEvent: (payload) => { expect(payload.events[0].app.version).toBe('1.2.3') done() }, sendSession: () => {} })) client.notify(new Error('oh em eff gee')) }) it('can handle all kinds of bad input', () => { const payloads: any[] = [] const client = new Client({ apiKey: 'API_KEY_YEAH' }) client._setDelivery(client => ({ sendEvent: (payload) => payloads.push(payload), sendSession: () => {} })) // @ts-ignore client.notify(undefined) // @ts-ignore client.notify(null) // @ts-ignore client.notify(() => {}) // @ts-ignore client.notify({ name: 'some message' }) // @ts-ignore client.notify(1) client.notify('errrororor') // @ts-ignore client.notify('str1', 'str2') // @ts-ignore client.notify('str1', null) expect(payloads[0].events[0].toJSON().exceptions[0].message).toBe('notify() received a non-error. See "notify()" tab for more detail.') expect(payloads[0].events[0].toJSON().metaData).toEqual({ 'notify()': { 'non-error parameter': 'undefined' } }) expect(payloads[1].events[0].toJSON().exceptions[0].message).toBe('notify() received a non-error. See "notify()" tab for more detail.') expect(payloads[1].events[0].toJSON().metaData).toEqual({ 'notify()': { 'non-error parameter': 'null' } }) expect(payloads[2].events[0].toJSON().exceptions[0].message).toBe('notify() received a non-error. See "notify()" tab for more detail.') expect(payloads[3].events[0].toJSON().exceptions[0].message).toBe('notify() received a non-error. See "notify()" tab for more detail.') expect(payloads[4].events[0].toJSON().exceptions[0].message).toBe('1') expect(payloads[5].events[0].toJSON().exceptions[0].message).toBe('errrororor') expect(payloads[6].events[0].toJSON().exceptions[0].message).toBe('str1') expect(payloads[7].events[0].toJSON().exceptions[0].message).toBe('str1') expect(payloads[7].events[0].toJSON().metaData).toEqual({}) }) it('supports { name, message } usage', () => { const payloads: any[] = [] const client = new Client({ apiKey: 'API_KEY_YEAH' }) client._setDelivery(client => ({ sendEvent: (payload) => payloads.push(payload), sendSession: () => {} })) client.notify({ name: 'UnknownThing', message: 'found a thing that couldn’t be dealt with' }) expect(payloads.length).toBe(1) expect(payloads[0].events[0].toJSON().exceptions[0].errorClass).toBe('UnknownThing') expect(payloads[0].events[0].toJSON().exceptions[0].message).toBe('found a thing that couldn’t be dealt with') expect(payloads[0].events[0].toJSON().exceptions[0].stacktrace[0].method).not.toMatch(/Client/) expect(payloads[0].events[0].toJSON().exceptions[0].stacktrace[0].file).not.toMatch(/core\/client\.js/) }) it('leaves a breadcrumb of the error', () => { const payloads: any[] = [] const client = new Client({ apiKey: 'API_KEY_YEAH' }) client._setDelivery(client => ({ sendEvent: (payload) => payloads.push(payload), sendSession: () => {} })) client.notify(new Error('foobar')) expect(client._breadcrumbs.length).toBe(1) expect(client._breadcrumbs[0].type).toBe('error') expect(client._breadcrumbs[0].message).toBe('Error') expect(client._breadcrumbs[0].metadata.stacktrace).toBe(undefined) // the error shouldn't appear as a breadcrumb for itself expect(payloads[0].events[0].breadcrumbs.length).toBe(0) }) it('leaves a breadcrumb of the error when enabledBreadcrumbTypes=null', () => { const payloads: any[] = [] const client = new Client({ apiKey: 'API_KEY_YEAH', enabledBreadcrumbTypes: null }) client._setDelivery(client => ({ sendEvent: (payload) => payloads.push(payload), sendSession: () => {} })) client.notify(new Error('foobar')) expect(client._breadcrumbs).toHaveLength(1) expect(client._breadcrumbs[0].type).toBe('error') expect(client._breadcrumbs[0].message).toBe('Error') expect(client._breadcrumbs[0].metadata.stacktrace).toBe(undefined) // the error shouldn't appear as a breadcrumb for itself expect(payloads[0].events[0].breadcrumbs).toHaveLength(0) }) it('doesn’t modify global client.metadata when using addMetadata() method', () => { const client = new Client({ apiKey: 'API_KEY_YEAH' }) client.addMetadata('foo', 'bar', [1, 2, 3]) client.notify(new Error('changes afoot'), (event) => { event.addMetadata('foo', '3', 1) }) expect(client._metadata.foo['3']).toBe(undefined) }) it('should call the callback (success)', done => { const client = new Client({ apiKey: 'API_KEY' }) client._setDelivery(client => ({ sendSession: () => {}, sendEvent: (payload, cb) => cb(null) })) const session = new Session() // @ts-ignore client._session = session client.notify(new Error('111'), () => {}, (err, event) => { expect(err).toBe(null) expect(event).toBeTruthy() expect(event.errors[0].errorMessage).toBe('111') expect((event as Event)._session).toBe(session) expect(session.toJSON().events.handled).toBe(1) done() }) }) it('should call the callback (err)', done => { const client = new Client({ apiKey: 'API_KEY' }) client._setDelivery(client => ({ sendSession: () => {}, sendEvent: (payload, cb) => cb(new Error('flerp')) })) const session = new Session() // @ts-ignore client._session = session client.notify(new Error('111'), () => {}, (err, event) => { expect(err).toBeTruthy() expect(err.message).toBe('flerp') expect(event).toBeTruthy() expect(event.errors[0].errorMessage).toBe('111') expect((event as Event)._session).toBe(session) expect(session.toJSON().events.handled).toBe(1) done() }) }) it('should call the callback even if the event doesn’t send (enabledReleaseStages)', done => { const client = new Client({ apiKey: 'API_KEY', enabledReleaseStages: ['production'], releaseStage: 'development' }) client._setDelivery(client => ({ sendSession: () => {}, sendEvent: () => { done('sendEvent() should not be called') } })) const session = new Session() // @ts-ignore client._session = session client.notify(new Error('111'), () => {}, (err, event) => { expect(err).toBe(null) expect(event).toBeTruthy() expect(event.errors[0].errorMessage).toBe('111') expect((event as Event)._session).toBe(undefined) done() }) }) it('should call the callback even if the event doesn’t send (onError)', done => { const client = new Client({ apiKey: 'API_KEY', onError: () => false }) client._setDelivery(client => ({ sendSession: () => {}, sendEvent: () => { done('sendEvent() should not be called') } })) const session = new Session() // @ts-ignore client._session = session client.notify(new Error('111'), () => {}, (err, event) => { expect(err).toBe(null) expect(event).toBeTruthy() expect(event.errors[0].errorMessage).toBe('111') expect((event as Event)._session).toBe(undefined) done() }) }) it('should attach the original error to the event object', done => { const client = new Client({ apiKey: 'API_KEY', onError: () => false }) client._setDelivery(client => ({ sendSession: () => {}, sendEvent: (payload, cb) => cb(null) })) const orig = new Error('111') // @ts-ignore client.notify(orig, {}, (err, event) => { expect(err).toBe(null) expect(event).toBeTruthy() expect(event.originalError).toBe(orig) done() }) }) }) describe('leaveBreadcrumb()', () => { it('creates a manual breadcrumb when a list of arguments are supplied', () => { const client = new Client({ apiKey: 'API_KEY_YEAH' }) client.leaveBreadcrumb('french stick') expect(client._breadcrumbs.length).toBe(1) expect(client._breadcrumbs[0].type).toBe('manual') expect(client._breadcrumbs[0].message).toBe('french stick') expect(client._breadcrumbs[0].metadata).toEqual({}) }) it('caps the length of breadcrumbs at the configured limit', () => { const client = new Client({ apiKey: 'API_KEY_YEAH', maxBreadcrumbs: 3 }) client.leaveBreadcrumb('malted rye') expect(client._breadcrumbs.length).toBe(1) client.leaveBreadcrumb('medium sliced white hovis') expect(client._breadcrumbs.length).toBe(2) client.leaveBreadcrumb('pumperninkel') expect(client._breadcrumbs.length).toBe(3) client.leaveBreadcrumb('seedy farmhouse') expect(client._breadcrumbs.length).toBe(3) expect(client._breadcrumbs.map(b => b.message)).toEqual([ 'medium sliced white hovis', 'pumperninkel', 'seedy farmhouse' ]) }) it('doesn’t add the breadcrumb if it didn’t contain a message', () => { const client = new Client({ apiKey: 'API_KEY_YEAH' }) // @ts-ignore client.leaveBreadcrumb(undefined) // @ts-ignore client.leaveBreadcrumb(null, { data: 'is useful' }) // @ts-ignore client.leaveBreadcrumb(null, {}, null) // @ts-ignore client.leaveBreadcrumb(null, { t: 10 }, null, 4) expect(client._breadcrumbs.length).toBe(0) }) it('allows maxBreadcrumbs to be set to 0', () => { const client = new Client({ apiKey: 'API_KEY_YEAH', maxBreadcrumbs: 0 }) client.leaveBreadcrumb('toast') expect(client._breadcrumbs.length).toBe(0) client.leaveBreadcrumb('toast') client.leaveBreadcrumb('toast') client.leaveBreadcrumb('toast') client.leaveBreadcrumb('toast') expect(client._breadcrumbs.length).toBe(0) }) it('doesn’t store the breadcrumb if an onBreadcrumb callback returns false', () => { let calls = 0 const client = new Client({ apiKey: 'API_KEY_YEAH', onBreadcrumb: () => { calls++ return false } }) client.leaveBreadcrumb('message') expect(calls).toBe(1) expect(client._breadcrumbs.length).toBe(0) }) it('tolerates errors in onBreadcrumb callbacks', () => { let calls = 0 const client = new Client({ apiKey: 'API_KEY_YEAH', onBreadcrumb: () => { calls++ throw new Error('uh oh') } }) client.leaveBreadcrumb('message') expect(calls).toBe(1) expect(client._breadcrumbs.length).toBe(1) }) it('coerces breadcrumb types that aren’t valid to "manual"', () => { const client = new Client({ apiKey: 'API_KEY_YEAH' }) // @ts-ignore client.leaveBreadcrumb('GET /jim', {}, 'requeeest') expect(client._breadcrumbs.length).toBe(1) expect(client._breadcrumbs[0].type).toBe('manual') }) it('only leaves an error breadcrumb if enabledBreadcrumbTypes contains "error"', (done) => { const client = new Client({ apiKey: 'API_KEY_YEAH', enabledBreadcrumbTypes: [] }) client._setDelivery(client => ({ sendSession: () => {}, sendEvent: (payload, cb) => cb(null) })) client.notify(new Error('oh no'), () => {}, () => { expect(client._breadcrumbs.length).toBe(0) done() }) }) }) describe('_isBreadcrumbTypeEnabled()', () => { it.each(breadcrumbTypes)('returns true for "%s" when enabledBreadcrumbTypes is not configured', (type) => { const client = new Client({ apiKey: 'API_KEY_YEAH' }) expect(client._isBreadcrumbTypeEnabled(type)).toBe(true) }) it.each(breadcrumbTypes)('returns true for "%s" when enabledBreadcrumbTypes=null', (type) => { const client = new Client({ apiKey: 'API_KEY_YEAH', enabledBreadcrumbTypes: null }) expect(client._isBreadcrumbTypeEnabled(type)).toBe(true) }) it.each(breadcrumbTypes)('returns false for "%s" when enabledBreadcrumbTypes=[]', (type) => { const client = new Client({ apiKey: 'API_KEY_YEAH', enabledBreadcrumbTypes: [] }) expect(client._isBreadcrumbTypeEnabled(type)).toBe(false) }) it.each(breadcrumbTypes)('returns true for "%s" when enabledBreadcrumbTypes only contains it', (type) => { const client = new Client({ apiKey: 'API_KEY_YEAH', enabledBreadcrumbTypes: [type as BreadcrumbType] }) expect(client._isBreadcrumbTypeEnabled(type)).toBe(true) }) it.each(breadcrumbTypes)('returns false for "%s" when enabledBreadcrumbTypes does not contain it', (type) => { const enabledBreadcrumbTypes = breadcrumbTypes.filter(enabledType => enabledType !== type) const client = new Client({ apiKey: 'API_KEY_YEAH', enabledBreadcrumbTypes: enabledBreadcrumbTypes as BreadcrumbType[] }) expect(client._isBreadcrumbTypeEnabled(type)).toBe(false) }) }) describe('startSession()', () => { it('calls the provided session delegate and return delegate’s return value', () => { const client = new Client({ apiKey: 'API_KEY' }) client._sessionDelegate = { startSession: c => { expect(c).toBe(client) return c }, pauseSession: noop, resumeSession: id } expect(client.startSession()).toBe(client) }) it('tracks error counts using the session delegate and sends them in error payloads', (done) => { const client = new Client({ apiKey: 'API_KEY' }) let i = 0 client._sessionDelegate = { startSession: (client) => { client._session = new Session() return client }, pauseSession: noop, resumeSession: id } client._setDelivery(client => ({ sendSession: () => {}, sendEvent: (payload: any, cb: any) => { if (++i < 10) return const r = JSON.parse(JSON.stringify(payload.events[0])) expect(r.session).toBeDefined() expect(r.session.events.handled).toBe(6) expect(r.session.events.unhandled).toBe(4) done() } })) const sessionClient = client.startSession() sessionClient.notify(new Error('broke')) sessionClient._notify(new Event('err', 'bad', [], { unhandled: true, severity: 'error', severityReason: { type: 'unhandledException' } })) sessionClient.notify(new Error('broke')) sessionClient.notify(new Error('broke')) sessionClient._notify(new Event('err', 'bad', [], { unhandled: true, severity: 'error', severityReason: { type: 'unhandledException' } })) sessionClient.notify(new Error('broke')) sessionClient.notify(new Error('broke')) sessionClient.notify(new Error('broke')) sessionClient._notify(new Event('err', 'bad', [], { unhandled: true, severity: 'error', severityReason: { type: 'unhandledException' } })) sessionClient._notify(new Event('err', 'bad', [], { unhandled: true, severity: 'error', severityReason: { type: 'unhandledException' } })) }) it('does not start the session if onSession returns false', () => { const client = new Client({ apiKey: 'API_KEY', onSession: () => false }) const sessionDelegate = { startSession: id, pauseSession: noop, resumeSession: id } client._sessionDelegate = sessionDelegate const startSpy = jest.spyOn(sessionDelegate, 'startSession') client.startSession() expect(startSpy).toHaveBeenCalledTimes(0) }) it('tolerates errors in onSession callbacks', () => { const client = new Client({ apiKey: 'API_KEY', onSession: () => { throw new Error('oh no') } }) const sessionDelegate = { startSession: id, pauseSession: noop, resumeSession: id } client._sessionDelegate = sessionDelegate const startSpy = jest.spyOn(sessionDelegate, 'startSession') client.startSession() expect(startSpy).toHaveBeenCalledTimes(1) }) }) describe('callbacks', () => { it('supports adding and removing onError/onSession/onBreadcrumb callbacks', (done) => { const c = new Client({ apiKey: 'API_KEY' }) c._setDelivery(client => ({ sendEvent: (p, cb) => cb(null), sendSession: (s: any, cb: any) => cb(null) })) c._logger = console const sessionDelegate = { startSession: id, pauseSession: noop, resumeSession: id } c._sessionDelegate = sessionDelegate const eSpy = jest.fn() const bSpy = jest.fn() const sSpy = jest.fn() c.addOnError(eSpy) c.addOnSession(sSpy) c.addOnBreadcrumb(bSpy) expect(c._cbs.e.length).toBe(1) expect(c._cbs.s.length).toBe(1) expect(c._cbs.b.length).toBe(1) c.startSession() expect(sSpy).toHaveBeenCalledTimes(1) c.notify(new Error(), () => {}, () => { expect(bSpy).toHaveBeenCalledTimes(1) expect(eSpy).toHaveBeenCalledTimes(1) c.removeOnError(eSpy) c.removeOnSession(sSpy) c.removeOnBreadcrumb(bSpy) c.startSession() expect(sSpy).toHaveBeenCalledTimes(1) c.notify(new Error(), () => {}, () => { expect(bSpy).toHaveBeenCalledTimes(1) expect(eSpy).toHaveBeenCalledTimes(1) done() }) }) }) }) describe('get/setContext()', () => { it('modifies and retreives context', () => { const c = new Client({ apiKey: 'API_KEY' }) c.setContext('str') expect(c.getContext()).toBe('str') }) it('can be set via config', () => { const c = new Client({ apiKey: 'API_KEY', context: 'str' }) expect(c.getContext()).toBe('str') }) }) describe('add/get/clearMetadata()', () => { it('modifies and retrieves metadata', () => { const client = new Client({ apiKey: 'API_KEY' }) client.addMetadata('a', 'b', 'c') expect(client.getMetadata('a')).toEqual({ b: 'c' }) expect(client.getMetadata('a', 'b')).toBe('c') client.clearMetadata('a', 'b') expect(client.getMetadata('a', 'b')).toBe(undefined) client.clearMetadata('a') expect(client.getMetadata('a')).toBe(undefined) }) it('can be set in config', () => { const client = new Client({ apiKey: 'API_KEY', metadata: { 'system metrics': { ms_since_last_jolt: 10032 } } }) expect(client.getMetadata('system metrics', 'ms_since_last_jolt')).toBe(10032) }) }) describe('pause/resumeSession()', () => { it('forwards on calls to the session delegate', () => { const client = new Client({ apiKey: 'API_KEY' }) const sessionDelegate = { startSession: id, pauseSession: noop, resumeSession: id } client._sessionDelegate = sessionDelegate const startSpy = jest.spyOn(sessionDelegate, 'startSession') const pauseSpy = jest.spyOn(sessionDelegate, 'pauseSession') const resumeSpy = jest.spyOn(sessionDelegate, 'resumeSession') client._sessionDelegate = sessionDelegate client.startSession() expect(startSpy).toHaveBeenCalledTimes(1) client.pauseSession() expect(pauseSpy).toHaveBeenCalledTimes(1) client.resumeSession() expect(resumeSpy).toHaveBeenCalledTimes(1) }) }) describe('getUser() / setUser()', () => { it('sets and retrieves user properties', () => { const c = new Client({ apiKey: 'aaaa' }) c.setUser('123') expect(c.getUser()).toEqual({ id: '123', email: undefined, name: undefined }) c.setUser('123', 'bug@sn.ag') expect(c.getUser()).toEqual({ id: '123', email: 'bug@sn.ag', name: undefined }) c.setUser('123', 'bug@sn.ag', 'Bug S. Nag') expect(c.getUser()).toEqual({ id: '123', email: 'bug@sn.ag', name: 'Bug S. Nag' }) c.setUser() expect(c.getUser()).toEqual({ id: undefined, email: undefined, name: undefined }) }) it('can be set via config', () => { const c = new Client({ apiKey: 'API_KEY', user: { id: '123', email: 'bug@sn.ag', name: 'Bug S. Nag' } }) expect(c.getUser()).toEqual({ id: '123', email: 'bug@sn.ag', name: 'Bug S. Nag' }) }) }) })
the_stack
* Autogenerated by @creditkarma/thrift-typescript v3.7.2 * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING */ import * as thrift from 'thrift'; import * as BsonType from './BsonType'; import * as DateType from './DateType'; import * as DecimalType from './DecimalType'; import * as EnumType from './EnumType'; import * as IntType from './IntType'; import * as JsonType from './JsonType'; import * as ListType from './ListType'; import * as MapType from './MapType'; import * as NullType from './NullType'; import * as StringType from './StringType'; import * as TimestampType from './TimestampType'; import * as TimeType from './TimeType'; import * as UUIDType from './UUIDType'; export interface ILogicalTypeArgs { STRING?: StringType.StringType; MAP?: MapType.MapType; LIST?: ListType.ListType; ENUM?: EnumType.EnumType; DECIMAL?: DecimalType.DecimalType; DATE?: DateType.DateType; TIME?: TimeType.TimeType; TIMESTAMP?: TimestampType.TimestampType; INTEGER?: IntType.IntType; UNKNOWN?: NullType.NullType; JSON?: JsonType.JsonType; BSON?: BsonType.BsonType; UUID?: UUIDType.UUIDType; } export class LogicalType { public STRING?: StringType.StringType; public MAP?: MapType.MapType; public LIST?: ListType.ListType; public ENUM?: EnumType.EnumType; public DECIMAL?: DecimalType.DecimalType; public DATE?: DateType.DateType; public TIME?: TimeType.TimeType; public TIMESTAMP?: TimestampType.TimestampType; public INTEGER?: IntType.IntType; public UNKNOWN?: NullType.NullType; public JSON?: JsonType.JsonType; public BSON?: BsonType.BsonType; public UUID?: UUIDType.UUIDType; constructor(args?: ILogicalTypeArgs) { let _fieldsSet: number = 0; if (args != null) { if (args.STRING != null) { _fieldsSet++; this.STRING = args.STRING; } if (args.MAP != null) { _fieldsSet++; this.MAP = args.MAP; } if (args.LIST != null) { _fieldsSet++; this.LIST = args.LIST; } if (args.ENUM != null) { _fieldsSet++; this.ENUM = args.ENUM; } if (args.DECIMAL != null) { _fieldsSet++; this.DECIMAL = args.DECIMAL; } if (args.DATE != null) { _fieldsSet++; this.DATE = args.DATE; } if (args.TIME != null) { _fieldsSet++; this.TIME = args.TIME; } if (args.TIMESTAMP != null) { _fieldsSet++; this.TIMESTAMP = args.TIMESTAMP; } if (args.INTEGER != null) { _fieldsSet++; this.INTEGER = args.INTEGER; } if (args.UNKNOWN != null) { _fieldsSet++; this.UNKNOWN = args.UNKNOWN; } if (args.JSON != null) { _fieldsSet++; this.JSON = args.JSON; } if (args.BSON != null) { _fieldsSet++; this.BSON = args.BSON; } if (args.UUID != null) { _fieldsSet++; this.UUID = args.UUID; } if (_fieldsSet > 1) { throw new thrift.Thrift.TProtocolException( thrift.Thrift.TProtocolExceptionType.INVALID_DATA, 'Cannot read a TUnion with more than one set value!' ); } else if (_fieldsSet < 1) { throw new thrift.Thrift.TProtocolException( thrift.Thrift.TProtocolExceptionType.INVALID_DATA, 'Cannot read a TUnion with no set value!' ); } } } public static fromSTRING(STRING: StringType.StringType): LogicalType { return new LogicalType({STRING}); } public static fromMAP(MAP: MapType.MapType): LogicalType { return new LogicalType({MAP}); } public static fromLIST(LIST: ListType.ListType): LogicalType { return new LogicalType({LIST}); } public static fromENUM(ENUM: EnumType.EnumType): LogicalType { return new LogicalType({ENUM}); } public static fromDECIMAL(DECIMAL: DecimalType.DecimalType): LogicalType { return new LogicalType({DECIMAL}); } public static fromDATE(DATE: DateType.DateType): LogicalType { return new LogicalType({DATE}); } public static fromTIME(TIME: TimeType.TimeType): LogicalType { return new LogicalType({TIME}); } public static fromTIMESTAMP(TIMESTAMP: TimestampType.TimestampType): LogicalType { return new LogicalType({TIMESTAMP}); } public static fromINTEGER(INTEGER: IntType.IntType): LogicalType { return new LogicalType({INTEGER}); } public static fromUNKNOWN(UNKNOWN: NullType.NullType): LogicalType { return new LogicalType({UNKNOWN}); } public static fromJSON(JSON: JsonType.JsonType): LogicalType { return new LogicalType({JSON}); } public static fromBSON(BSON: BsonType.BsonType): LogicalType { return new LogicalType({BSON}); } public static fromUUID(UUID: UUIDType.UUIDType): LogicalType { return new LogicalType({UUID}); } public write(output: thrift.TProtocol): void { output.writeStructBegin('LogicalType'); if (this.STRING != null) { output.writeFieldBegin('STRING', thrift.Thrift.Type.STRUCT, 1); this.STRING.write(output); output.writeFieldEnd(); } if (this.MAP != null) { output.writeFieldBegin('MAP', thrift.Thrift.Type.STRUCT, 2); this.MAP.write(output); output.writeFieldEnd(); } if (this.LIST != null) { output.writeFieldBegin('LIST', thrift.Thrift.Type.STRUCT, 3); this.LIST.write(output); output.writeFieldEnd(); } if (this.ENUM != null) { output.writeFieldBegin('ENUM', thrift.Thrift.Type.STRUCT, 4); this.ENUM.write(output); output.writeFieldEnd(); } if (this.DECIMAL != null) { output.writeFieldBegin('DECIMAL', thrift.Thrift.Type.STRUCT, 5); this.DECIMAL.write(output); output.writeFieldEnd(); } if (this.DATE != null) { output.writeFieldBegin('DATE', thrift.Thrift.Type.STRUCT, 6); this.DATE.write(output); output.writeFieldEnd(); } if (this.TIME != null) { output.writeFieldBegin('TIME', thrift.Thrift.Type.STRUCT, 7); this.TIME.write(output); output.writeFieldEnd(); } if (this.TIMESTAMP != null) { output.writeFieldBegin('TIMESTAMP', thrift.Thrift.Type.STRUCT, 8); this.TIMESTAMP.write(output); output.writeFieldEnd(); } if (this.INTEGER != null) { output.writeFieldBegin('INTEGER', thrift.Thrift.Type.STRUCT, 10); this.INTEGER.write(output); output.writeFieldEnd(); } if (this.UNKNOWN != null) { output.writeFieldBegin('UNKNOWN', thrift.Thrift.Type.STRUCT, 11); this.UNKNOWN.write(output); output.writeFieldEnd(); } if (this.JSON != null) { output.writeFieldBegin('JSON', thrift.Thrift.Type.STRUCT, 12); this.JSON.write(output); output.writeFieldEnd(); } if (this.BSON != null) { output.writeFieldBegin('BSON', thrift.Thrift.Type.STRUCT, 13); this.BSON.write(output); output.writeFieldEnd(); } if (this.UUID != null) { output.writeFieldBegin('UUID', thrift.Thrift.Type.STRUCT, 14); this.UUID.write(output); output.writeFieldEnd(); } output.writeFieldStop(); output.writeStructEnd(); return; } public static read(input: thrift.TProtocol): LogicalType { let _fieldsSet: number = 0; let _returnValue: LogicalType | null = null; input.readStructBegin(); while (true) { const ret: thrift.TField = input.readFieldBegin(); const fieldType: thrift.Thrift.Type = ret.ftype; const fieldId: number = ret.fid; if (fieldType === thrift.Thrift.Type.STOP) { break; } switch (fieldId) { case 1: if (fieldType === thrift.Thrift.Type.STRUCT) { _fieldsSet++; const value_1: StringType.StringType = StringType.StringType.read(input); _returnValue = LogicalType.fromSTRING(value_1); } else { input.skip(fieldType); } break; case 2: if (fieldType === thrift.Thrift.Type.STRUCT) { _fieldsSet++; const value_2: MapType.MapType = MapType.MapType.read(input); _returnValue = LogicalType.fromMAP(value_2); } else { input.skip(fieldType); } break; case 3: if (fieldType === thrift.Thrift.Type.STRUCT) { _fieldsSet++; const value_3: ListType.ListType = ListType.ListType.read(input); _returnValue = LogicalType.fromLIST(value_3); } else { input.skip(fieldType); } break; case 4: if (fieldType === thrift.Thrift.Type.STRUCT) { _fieldsSet++; const value_4: EnumType.EnumType = EnumType.EnumType.read(input); _returnValue = LogicalType.fromENUM(value_4); } else { input.skip(fieldType); } break; case 5: if (fieldType === thrift.Thrift.Type.STRUCT) { _fieldsSet++; const value_5: DecimalType.DecimalType = DecimalType.DecimalType.read(input); _returnValue = LogicalType.fromDECIMAL(value_5); } else { input.skip(fieldType); } break; case 6: if (fieldType === thrift.Thrift.Type.STRUCT) { _fieldsSet++; const value_6: DateType.DateType = DateType.DateType.read(input); _returnValue = LogicalType.fromDATE(value_6); } else { input.skip(fieldType); } break; case 7: if (fieldType === thrift.Thrift.Type.STRUCT) { _fieldsSet++; const value_7: TimeType.TimeType = TimeType.TimeType.read(input); _returnValue = LogicalType.fromTIME(value_7); } else { input.skip(fieldType); } break; case 8: if (fieldType === thrift.Thrift.Type.STRUCT) { _fieldsSet++; const value_8: TimestampType.TimestampType = TimestampType.TimestampType.read(input); _returnValue = LogicalType.fromTIMESTAMP(value_8); } else { input.skip(fieldType); } break; case 10: if (fieldType === thrift.Thrift.Type.STRUCT) { _fieldsSet++; const value_9: IntType.IntType = IntType.IntType.read(input); _returnValue = LogicalType.fromINTEGER(value_9); } else { input.skip(fieldType); } break; case 11: if (fieldType === thrift.Thrift.Type.STRUCT) { _fieldsSet++; const value_10: NullType.NullType = NullType.NullType.read(input); _returnValue = LogicalType.fromUNKNOWN(value_10); } else { input.skip(fieldType); } break; case 12: if (fieldType === thrift.Thrift.Type.STRUCT) { _fieldsSet++; const value_11: JsonType.JsonType = JsonType.JsonType.read(input); _returnValue = LogicalType.fromJSON(value_11); } else { input.skip(fieldType); } break; case 13: if (fieldType === thrift.Thrift.Type.STRUCT) { _fieldsSet++; const value_12: BsonType.BsonType = BsonType.BsonType.read(input); _returnValue = LogicalType.fromBSON(value_12); } else { input.skip(fieldType); } break; case 14: if (fieldType === thrift.Thrift.Type.STRUCT) { _fieldsSet++; const value_13: UUIDType.UUIDType = UUIDType.UUIDType.read(input); _returnValue = LogicalType.fromUUID(value_13); } else { input.skip(fieldType); } break; default: { input.skip(fieldType); } } input.readFieldEnd(); } input.readStructEnd(); if (_fieldsSet > 1) { throw new thrift.Thrift.TProtocolException( thrift.Thrift.TProtocolExceptionType.INVALID_DATA, 'Cannot read a TUnion with more than one set value!' ); } else if (_fieldsSet < 1) { throw new thrift.Thrift.TProtocolException( thrift.Thrift.TProtocolExceptionType.INVALID_DATA, 'Cannot read a TUnion with no set value!' ); } if (_returnValue !== null) { return _returnValue; } else { throw new thrift.Thrift.TProtocolException( thrift.Thrift.TProtocolExceptionType.UNKNOWN, 'Unable to read data for TUnion' ); } } }
the_stack
import { Path } from './util/Path'; import { ChildrenNode } from './snap/ChildrenNode'; import { NamedNode, Node } from './snap/Node'; import { CacheNode } from './view/CacheNode'; import { Index } from './snap/indexes/Index'; /** * Defines a single user-initiated write operation. May be the result of a set(), transaction(), or update() call. In * the case of a set() or transaction, snap wil be non-null. In the case of an update(), children will be non-null. */ export interface WriteRecord { writeId: number; path: Path; snap?: Node | null; children?: { [k: string]: Node; } | null; visible: boolean; } /** * WriteTree tracks all pending user-initiated writes and has methods to calculate the result of merging them * with underlying server data (to create "event cache" data). Pending writes are added with addOverwrite() * and addMerge(), and removed with removeWrite(). * * @constructor */ export declare class WriteTree { /** * A tree tracking the result of applying all visible writes. This does not include transactions with * applyLocally=false or writes that are completely shadowed by other writes. * * @type {!CompoundWrite} * @private */ private visibleWrites_; /** * A list of all pending writes, regardless of visibility and shadowed-ness. Used to calculate arbitrary * sets of the changed data, such as hidden writes (from transactions) or changes with certain writes excluded (also * used by transactions). * * @type {!Array.<!WriteRecord>} * @private */ private allWrites_; private lastWriteId_; /** * Create a new WriteTreeRef for the given path. For use with a new sync point at the given path. * * @param {!Path} path * @return {!WriteTreeRef} */ childWrites(path: Path): WriteTreeRef; /** * Record a new overwrite from user code. * * @param {!Path} path * @param {!Node} snap * @param {!number} writeId * @param {boolean=} visible This is set to false by some transactions. It should be excluded from event caches */ addOverwrite(path: Path, snap: Node, writeId: number, visible?: boolean): void; /** * Record a new merge from user code. * * @param {!Path} path * @param {!Object.<string, !Node>} changedChildren * @param {!number} writeId */ addMerge(path: Path, changedChildren: { [k: string]: Node; }, writeId: number): void; /** * @param {!number} writeId * @return {?WriteRecord} */ getWrite(writeId: number): WriteRecord | null; /** * Remove a write (either an overwrite or merge) that has been successfully acknowledge by the server. Recalculates * the tree if necessary. We return true if it may have been visible, meaning views need to reevaluate. * * @param {!number} writeId * @return {boolean} true if the write may have been visible (meaning we'll need to reevaluate / raise * events as a result). */ removeWrite(writeId: number): boolean; /** * Return a complete snapshot for the given path if there's visible write data at that path, else null. * No server data is considered. * * @param {!Path} path * @return {?Node} */ getCompleteWriteData(path: Path): Node | null; /** * Given optional, underlying server data, and an optional set of constraints (exclude some sets, include hidden * writes), attempt to calculate a complete snapshot for the given path * * @param {!Path} treePath * @param {?Node} completeServerCache * @param {Array.<number>=} writeIdsToExclude An optional set to be excluded * @param {boolean=} includeHiddenWrites Defaults to false, whether or not to layer on writes with visible set to false * @return {?Node} */ calcCompleteEventCache(treePath: Path, completeServerCache: Node | null, writeIdsToExclude?: number[], includeHiddenWrites?: boolean): Node | null; /** * With optional, underlying server data, attempt to return a children node of children that we have complete data for. * Used when creating new views, to pre-fill their complete event children snapshot. * * @param {!Path} treePath * @param {?ChildrenNode} completeServerChildren * @return {!ChildrenNode} */ calcCompleteEventChildren(treePath: Path, completeServerChildren: ChildrenNode | null): Node; /** * Given that the underlying server data has updated, determine what, if anything, needs to be * applied to the event cache. * * Possibilities: * * 1. No writes are shadowing. Events should be raised, the snap to be applied comes from the server data * * 2. Some write is completely shadowing. No events to be raised * * 3. Is partially shadowed. Events * * Either existingEventSnap or existingServerSnap must exist * * @param {!Path} treePath * @param {!Path} childPath * @param {?Node} existingEventSnap * @param {?Node} existingServerSnap * @return {?Node} */ calcEventCacheAfterServerOverwrite(treePath: Path, childPath: Path, existingEventSnap: Node | null, existingServerSnap: Node | null): Node | null; /** * Returns a complete child for a given server snap after applying all user writes or null if there is no * complete child for this ChildKey. * * @param {!Path} treePath * @param {!string} childKey * @param {!CacheNode} existingServerSnap * @return {?Node} */ calcCompleteChild(treePath: Path, childKey: string, existingServerSnap: CacheNode): Node | null; /** * Returns a node if there is a complete overwrite for this path. More specifically, if there is a write at * a higher path, this will return the child of that write relative to the write and this path. * Returns null if there is no write at this path. */ shadowingWrite(path: Path): Node | null; /** * This method is used when processing child remove events on a query. If we can, we pull in children that were outside * the window, but may now be in the window. */ calcIndexedSlice(treePath: Path, completeServerData: Node | null, startPost: NamedNode, count: number, reverse: boolean, index: Index): NamedNode[]; private recordContainsPath_; /** * Re-layer the writes and merges into a tree so we can efficiently calculate event snapshots */ private resetTree_; /** * The default filter used when constructing the tree. Keep everything that's visible. */ private static DefaultFilter_; /** * Static method. Given an array of WriteRecords, a filter for which ones to include, and a path, construct the tree of * event data at that path. */ private static layerTree_; } /** * A WriteTreeRef wraps a WriteTree and a path, for convenient access to a particular subtree. All of the methods * just proxy to the underlying WriteTree. * * @constructor */ export declare class WriteTreeRef { /** * The path to this particular write tree ref. Used for calling methods on writeTree_ while exposing a simpler * interface to callers. * * @type {!Path} * @private * @const */ private readonly treePath_; /** * * A reference to the actual tree of write data. All methods are pass-through to the tree, but with the appropriate * path prefixed. * * This lets us make cheap references to points in the tree for sync points without having to copy and maintain all of * the data. * * @type {!WriteTree} * @private * @const */ private readonly writeTree_; /** * @param {!Path} path * @param {!WriteTree} writeTree */ constructor(path: Path, writeTree: WriteTree); /** * If possible, returns a complete event cache, using the underlying server data if possible. In addition, can be used * to get a cache that includes hidden writes, and excludes arbitrary writes. Note that customizing the returned node * can lead to a more expensive calculation. * * @param {?Node} completeServerCache * @param {Array.<number>=} writeIdsToExclude Optional writes to exclude. * @param {boolean=} includeHiddenWrites Defaults to false, whether or not to layer on writes with visible set to false * @return {?Node} */ calcCompleteEventCache(completeServerCache: Node | null, writeIdsToExclude?: number[], includeHiddenWrites?: boolean): Node | null; /** * If possible, returns a children node containing all of the complete children we have data for. The returned data is a * mix of the given server data and write data. * * @param {?ChildrenNode} completeServerChildren * @return {!ChildrenNode} */ calcCompleteEventChildren(completeServerChildren: ChildrenNode | null): ChildrenNode; /** * Given that either the underlying server data has updated or the outstanding writes have updated, determine what, * if anything, needs to be applied to the event cache. * * Possibilities: * * 1. No writes are shadowing. Events should be raised, the snap to be applied comes from the server data * * 2. Some write is completely shadowing. No events to be raised * * 3. Is partially shadowed. Events should be raised * * Either existingEventSnap or existingServerSnap must exist, this is validated via an assert * * @param {!Path} path * @param {?Node} existingEventSnap * @param {?Node} existingServerSnap * @return {?Node} */ calcEventCacheAfterServerOverwrite(path: Path, existingEventSnap: Node | null, existingServerSnap: Node | null): Node | null; /** * Returns a node if there is a complete overwrite for this path. More specifically, if there is a write at * a higher path, this will return the child of that write relative to the write and this path. * Returns null if there is no write at this path. * * @param {!Path} path * @return {?Node} */ shadowingWrite(path: Path): Node | null; /** * This method is used when processing child remove events on a query. If we can, we pull in children that were outside * the window, but may now be in the window * * @param {?Node} completeServerData * @param {!NamedNode} startPost * @param {!number} count * @param {boolean} reverse * @param {!Index} index * @return {!Array.<!NamedNode>} */ calcIndexedSlice(completeServerData: Node | null, startPost: NamedNode, count: number, reverse: boolean, index: Index): NamedNode[]; /** * Returns a complete child for a given server snap after applying all user writes or null if there is no * complete child for this ChildKey. * * @param {!string} childKey * @param {!CacheNode} existingServerCache * @return {?Node} */ calcCompleteChild(childKey: string, existingServerCache: CacheNode): Node | null; /** * Return a WriteTreeRef for a child. * * @param {string} childName * @return {!WriteTreeRef} */ child(childName: string): WriteTreeRef; }
the_stack
import * as vscode from "vscode"; import { Packager } from "../common/packager"; import { RNPackageVersions } from "../common/projectVersionHelper"; import { ExponentHelper } from "./exponent/exponentHelper"; import { ReactDirManager } from "./reactDirManager"; import { SettingsHelper } from "./settingsHelper"; import { PackagerStatusIndicator } from "./packagerStatusIndicator"; import { CommandExecutor } from "../common/commandExecutor"; import { isNullOrUndefined } from "../common/utils"; import { OutputChannelLogger } from "./log/OutputChannelLogger"; import { GeneralPlatform, MobilePlatformDeps, TargetType } from "./generalPlatform"; import { PlatformResolver } from "./platformResolver"; import { ProjectVersionHelper } from "../common/projectVersionHelper"; import { TelemetryHelper } from "../common/telemetryHelper"; import { ErrorHelper } from "../common/error/errorHelper"; import { InternalErrorCode } from "../common/error/internalErrorCode"; import { TargetPlatformHelper } from "../common/targetPlatformHelper"; import { getNodeModulesInFolderHierarchy } from "../common/extensionHelper"; import { ProjectsStorage } from "./projectsStorage"; import { ReactNativeCDPProxy } from "../cdp-proxy/reactNativeCDPProxy"; import { generateRandomPortNumber } from "../common/extensionHelper"; import { DEBUG_TYPES } from "./debuggingConfiguration/debugConfigTypesAndConstants"; import * as nls from "vscode-nls"; import { MultipleLifetimesAppWorker } from "../debugger/appWorker"; import { PlatformType } from "./launchArgs"; import { LaunchScenariosManager } from "./launchScenariosManager"; import { createAdditionalWorkspaceFolder, onFolderAdded } from "./rn-extension"; import { GeneralMobilePlatform } from "./generalMobilePlatform"; nls.config({ messageFormat: nls.MessageFormat.bundle, bundleFormat: nls.BundleFormat.standalone, })(); const localize = nls.loadMessageBundle(); export class AppLauncher { private readonly cdpProxyPort: number; private readonly cdpProxyHostAddress: string; private appWorker: MultipleLifetimesAppWorker | null; private packager: Packager; private exponentHelper: ExponentHelper; private reactDirManager: ReactDirManager; private workspaceFolder: vscode.WorkspaceFolder; private reactNativeVersions?: RNPackageVersions; private rnCdpProxy: ReactNativeCDPProxy; private logger: OutputChannelLogger = OutputChannelLogger.getMainChannel(); private mobilePlatform: GeneralPlatform; private launchScenariosManager: LaunchScenariosManager; private debugConfigurationRoot: string; private nodeModulesRoot?: string; public static getAppLauncherByProjectRootPath(projectRootPath: string): AppLauncher { const appLauncher = ProjectsStorage.projectsCache[projectRootPath.toLowerCase()]; if (!appLauncher) { throw new Error( `Could not find AppLauncher by the project root path ${projectRootPath}`, ); } return appLauncher; } public static async getOrCreateAppLauncherByProjectRootPath( projectRootPath: string, ): Promise<AppLauncher> { let appLauncher = ProjectsStorage.projectsCache[projectRootPath.toLowerCase()]; if (!appLauncher) { const appLauncherFolder = createAdditionalWorkspaceFolder(projectRootPath); if (appLauncherFolder) { await onFolderAdded(appLauncherFolder); appLauncher = ProjectsStorage.projectsCache[appLauncherFolder.uri.fsPath.toLocaleLowerCase()]; } if (!appLauncher) { throw new Error( `Could not find AppLauncher by the project root path ${projectRootPath}`, ); } } return appLauncher; } public static getNodeModulesRootByProjectPath(projectRootPath: string): string { const appLauncher: AppLauncher = AppLauncher.getAppLauncherByProjectRootPath( projectRootPath, ); return appLauncher.getOrUpdateNodeModulesRoot(); } constructor(reactDirManager: ReactDirManager, workspaceFolder: vscode.WorkspaceFolder) { // constants definition this.cdpProxyPort = generateRandomPortNumber(); this.cdpProxyHostAddress = "127.0.0.1"; // localhost const rootPath = workspaceFolder.uri.fsPath; this.debugConfigurationRoot = rootPath; this.launchScenariosManager = new LaunchScenariosManager(this.debugConfigurationRoot); const projectRootPath = SettingsHelper.getReactNativeProjectRoot(rootPath); this.exponentHelper = new ExponentHelper(rootPath, projectRootPath); const packagerStatusIndicator: PackagerStatusIndicator = new PackagerStatusIndicator( rootPath, ); this.packager = new Packager( rootPath, projectRootPath, SettingsHelper.getPackagerPort(workspaceFolder.uri.fsPath), packagerStatusIndicator, ); this.packager.setExponentHelper(this.exponentHelper); this.reactDirManager = reactDirManager; this.workspaceFolder = workspaceFolder; this.rnCdpProxy = new ReactNativeCDPProxy(this.cdpProxyHostAddress, this.cdpProxyPort); } public updateDebugConfigurationRoot(debugConfigurationRoot: string): void { if (this.debugConfigurationRoot !== debugConfigurationRoot) { this.debugConfigurationRoot = debugConfigurationRoot; this.launchScenariosManager = new LaunchScenariosManager(this.debugConfigurationRoot); } } public getCdpProxyPort(): number { return this.cdpProxyPort; } public getRnCdpProxy(): ReactNativeCDPProxy { return this.rnCdpProxy; } public getPackager(): Packager { return this.packager; } public getWorkspaceFolderUri(): vscode.Uri { return this.workspaceFolder.uri; } public getWorkspaceFolder(): vscode.WorkspaceFolder { return this.workspaceFolder; } public getReactNativeVersions(): RNPackageVersions | undefined { return this.reactNativeVersions; } public getExponentHelper(): ExponentHelper { return this.exponentHelper; } public getReactDirManager(): ReactDirManager { return this.reactDirManager; } public setReactNativeVersions(reactNativeVersions: RNPackageVersions): void { this.reactNativeVersions = reactNativeVersions; } public setAppWorker(appWorker: MultipleLifetimesAppWorker): void { this.appWorker = appWorker; } public getAppWorker(): MultipleLifetimesAppWorker | null { return this.appWorker; } public getMobilePlatform(): GeneralPlatform { return this.mobilePlatform; } public getOrUpdateNodeModulesRoot(forceUpdate: boolean = false): string { if (!this.nodeModulesRoot || forceUpdate) { const nodeModulesRootPath: string | null = getNodeModulesInFolderHierarchy( this.packager.getProjectPath(), ); if (!nodeModulesRootPath) { throw ErrorHelper.getInternalError( InternalErrorCode.ReactNativePackageIsNotInstalled, ); } this.nodeModulesRoot = nodeModulesRootPath; } return <string>this.nodeModulesRoot; } public dispose(): void { this.packager.getStatusIndicator().dispose(); this.packager.stop(true); this.mobilePlatform.dispose(); } public async openFileAtLocation(filename: string, lineNumber: number): Promise<void> { const document = await vscode.workspace.openTextDocument(vscode.Uri.file(filename)); const editor = await vscode.window.showTextDocument(document); let range = editor.document.lineAt(lineNumber - 1).range; editor.selection = new vscode.Selection(range.start, range.end); editor.revealRange(range, vscode.TextEditorRevealType.InCenter); } public getPackagerPort(projectFolder: string): number { return SettingsHelper.getPackagerPort(projectFolder); } public async launch(launchArgs: any): Promise<any> { let mobilePlatformOptions = this.requestSetup(launchArgs); // We add the parameter if it's defined (adapter crashes otherwise) if (!isNullOrUndefined(launchArgs.logCatArguments)) { mobilePlatformOptions.logCatArguments = [ this.parseLogCatArguments(launchArgs.logCatArguments), ]; } if (!isNullOrUndefined(launchArgs.variant)) { mobilePlatformOptions.variant = launchArgs.variant; } if (!isNullOrUndefined(launchArgs.scheme)) { mobilePlatformOptions.scheme = launchArgs.scheme; } if (!isNullOrUndefined(launchArgs.productName)) { mobilePlatformOptions.productName = launchArgs.productName; } if (!isNullOrUndefined(launchArgs.launchActivity)) { mobilePlatformOptions.debugLaunchActivity = launchArgs.launchActivity; } if (launchArgs.type === DEBUG_TYPES.REACT_NATIVE_DIRECT) { mobilePlatformOptions.isDirect = true; } mobilePlatformOptions.packagerPort = SettingsHelper.getPackagerPort( launchArgs.cwd || launchArgs.program, ); const platformDeps: MobilePlatformDeps = { packager: this.packager, }; this.mobilePlatform = new PlatformResolver().resolveMobilePlatform( launchArgs.platform, mobilePlatformOptions, platformDeps, ); let extProps: any = { platform: { value: launchArgs.platform, isPii: false, }, }; if (mobilePlatformOptions.isDirect) { extProps.isDirect = { value: true, isPii: false, }; } try { const versions = await ProjectVersionHelper.getReactNativePackageVersionsFromNodeModules( mobilePlatformOptions.nodeModulesRoot, ProjectVersionHelper.generateAdditionalPackagesToCheckByPlatform(launchArgs), ); mobilePlatformOptions.reactNativeVersions = versions; extProps = TelemetryHelper.addPlatformPropertiesToTelemetryProperties( launchArgs, versions, extProps, ); await TelemetryHelper.generate("launch", extProps, async generator => { try { if (this.mobilePlatform instanceof GeneralMobilePlatform) { generator.step("resolveMobileTarget"); await this.resolveAndSaveMobileTarget(launchArgs, this.mobilePlatform); } await this.mobilePlatform.beforeStartPackager(); generator.step("checkPlatformCompatibility"); TargetPlatformHelper.checkTargetPlatformSupport(mobilePlatformOptions.platform); generator.step("startPackager"); await this.mobilePlatform.startPackager(); // We've seen that if we don't prewarm the bundle cache, the app fails on the first attempt to connect to the debugger logic // and the user needs to Reload JS manually. We prewarm it to prevent that issue generator.step("prewarmBundleCache"); this.logger.info( localize( "PrewarmingBundleCache", "Prewarming bundle cache. This may take a while ...", ), ); await this.mobilePlatform.prewarmBundleCache(); generator .step("mobilePlatform.runApp") .add("target", mobilePlatformOptions.target, false); this.logger.info( localize( "BuildingAndRunningApplication", "Building and running application.", ), ); await this.mobilePlatform.runApp(); if (mobilePlatformOptions.isDirect) { if (launchArgs.useHermesEngine) { generator.step("mobilePlatform.enableHermesDebuggingMode"); if (mobilePlatformOptions.enableDebug) { this.logger.info( localize( "PrepareHermesDebugging", "Prepare Hermes debugging (experimental)", ), ); } else { this.logger.info( localize( "PrepareHermesLaunch", "Prepare Hermes launch (experimental)", ), ); } } else if (launchArgs.platform === PlatformType.iOS) { generator.step("mobilePlatform.enableIosDirectDebuggingMode"); if (mobilePlatformOptions.enableDebug) { this.logger.info( localize( "PrepareDirectIosDebugging", "Prepare direct iOS debugging (experimental)", ), ); } else { this.logger.info( localize( "PrepareDirectIosLaunch", "Prepare direct iOS launch (experimental)", ), ); } } generator.step("mobilePlatform.disableJSDebuggingMode"); this.logger.info(localize("DisableJSDebugging", "Disable JS Debugging")); await this.mobilePlatform.disableJSDebuggingMode(); } else { generator.step("mobilePlatform.enableJSDebuggingMode"); this.logger.info(localize("EnableJSDebugging", "Enable JS Debugging")); await this.mobilePlatform.enableJSDebuggingMode(); } } catch (error) { if ( !mobilePlatformOptions.enableDebug && launchArgs.platform === PlatformType.iOS && launchArgs.type === DEBUG_TYPES.REACT_NATIVE ) { // If we disable debugging mode for iOS scenarios, we'll we ignore the error and run the 'run-ios' command anyway, // since the error doesn't affects an application launch process return; } generator.addError(error); this.logger.error(error); throw error; } }); } catch (error) { if (error && error.errorCode) { if (error.errorCode === InternalErrorCode.ReactNativePackageIsNotInstalled) { TelemetryHelper.sendErrorEvent( "ReactNativePackageIsNotInstalled", ErrorHelper.getInternalError( InternalErrorCode.ReactNativePackageIsNotInstalled, ), ); } else if (error.errorCode === InternalErrorCode.ReactNativeWindowsIsNotInstalled) { TelemetryHelper.sendErrorEvent( "ReactNativeWindowsPackageIsNotInstalled", ErrorHelper.getInternalError( InternalErrorCode.ReactNativeWindowsIsNotInstalled, ), ); } } this.logger.error(error); throw error; } } private async resolveAndSaveMobileTarget( launchArgs: any, mobilePlatform: GeneralMobilePlatform, ): Promise<void> { if (launchArgs.target && !(await mobilePlatform.getTargetFromRunArgs())) { const isAnyTarget = launchArgs.target.toLowerCase() === TargetType.Simulator || launchArgs.target.toLowerCase() === TargetType.Device; const resultTarget = await mobilePlatform.resolveMobileTarget(launchArgs.target); // Save the result to config in case there are more than one possible target with this type (simulator/device) if (resultTarget && isAnyTarget) { const targetsCount = await mobilePlatform.getTargetsCountByFilter( target => target.isVirtualTarget === resultTarget.isVirtualTarget, ); if (targetsCount > 1) { this.launchScenariosManager.updateLaunchScenario(launchArgs, { target: launchArgs.platform === PlatformType.Android ? resultTarget.name : resultTarget.id, }); } } } } private requestSetup(args: any): any { const workspaceFolder: vscode.WorkspaceFolder = <vscode.WorkspaceFolder>( vscode.workspace.getWorkspaceFolder(vscode.Uri.file(args.cwd || args.program)) ); const projectRootPath = this.getProjectRoot(args); let mobilePlatformOptions: any = { workspaceRoot: workspaceFolder.uri.fsPath, projectRoot: projectRootPath, platform: args.platform, env: args.env, envFile: args.envFile, target: args.target || "simulator", enableDebug: args.enableDebug, nodeModulesRoot: this.getOrUpdateNodeModulesRoot(), }; if (args.platform === PlatformType.Exponent) { mobilePlatformOptions.expoHostType = args.expoHostType || "lan"; mobilePlatformOptions.openExpoQR = typeof args.openExpoQR !== "boolean" ? true : args.openExpoQR; } CommandExecutor.ReactNativeCommand = SettingsHelper.getReactNativeGlobalCommandName( workspaceFolder.uri, ); if (!args.runArguments) { let runArgs = SettingsHelper.getRunArgs( args.platform, args.target || "simulator", workspaceFolder.uri, ); mobilePlatformOptions.runArguments = runArgs; } else { mobilePlatformOptions.runArguments = args.runArguments; } return mobilePlatformOptions; } private getProjectRoot(args: any): string { return SettingsHelper.getReactNativeProjectRoot(args.cwd || args.program); } /** * Parses log cat arguments to a string */ private parseLogCatArguments(userProvidedLogCatArguments: any): string { return Array.isArray(userProvidedLogCatArguments) ? userProvidedLogCatArguments.join(" ") // If it's an array, we join the arguments : userProvidedLogCatArguments; // If not, we leave it as-is } }
the_stack
import { DebugElement } from '@angular/core'; import { ComponentFixture, TestBed } from '@angular/core/testing'; import { By } from '@angular/platform-browser'; import { of } from 'rxjs'; import { FormService } from '../../../services/form.service'; import { ProcessContentService } from '../../../services/process-content.service'; import { FormFieldTypes } from '../core/form-field-types'; import { FormModel } from '../core/form.model'; import { FormFieldModel } from './../core/form-field.model'; import { UploadWidgetComponent } from './upload.widget'; import { setupTestBed } from '../../../../testing/setup-test-bed'; import { CoreTestingModule } from '../../../../testing/core.testing.module'; import { TranslateModule } from '@ngx-translate/core'; import { RelatedContentRepresentation } from '@alfresco/js-api'; const fakePngAnswer = new RelatedContentRepresentation({ 'id': 1155, 'name': 'a_png_file.png', 'created': '2017-07-25T17:17:37.099Z', 'createdBy': { 'id': 1001, 'firstName': 'Admin', 'lastName': 'admin', 'email': 'admin' }, 'relatedContent': false, 'contentAvailable': true, 'link': false, 'mimeType': 'image/png', 'simpleType': 'image', 'previewStatus': 'queued', 'thumbnailStatus': 'queued' }); const fakeJpgAnswer = { 'id': 1156, 'name': 'a_jpg_file.jpg', 'created': '2017-07-25T17:17:37.118Z', 'createdBy': { 'id': 1001, 'firstName': 'Admin', 'lastName': 'admin', 'email': 'admin' }, 'relatedContent': false, 'contentAvailable': true, 'link': false, 'mimeType': 'image/jpeg', 'simpleType': 'image', 'previewStatus': 'queued', 'thumbnailStatus': 'queued' }; describe('UploadWidgetComponent', () => { function fakeCreationFile (name: string, id: string | number) { return { 'id': id, 'name': name, 'created': '2017-07-25T17:17:37.118Z', 'createdBy': { 'id': 1001, 'firstName': 'Admin', 'lastName': 'admin', 'email': 'admin' }, 'relatedContent': false, 'contentAvailable': true, 'link': false, 'mimeType': 'image/jpeg', 'simpleType': 'image', 'previewStatus': 'queued', 'thumbnailStatus': 'queued' }; } let contentService: ProcessContentService; const filePngFake = new File(['fakePng'], 'file-fake.png', { type: 'image/png' }); const filJpgFake = new File(['fakeJpg'], 'file-fake.jpg', { type: 'image/jpg' }); setupTestBed({ imports: [ TranslateModule.forRoot(), CoreTestingModule ] }); describe('when template is ready', () => { let uploadWidgetComponent: UploadWidgetComponent; let fixture: ComponentFixture<UploadWidgetComponent>; let element: HTMLInputElement; let debugElement: DebugElement; let inputElement: HTMLInputElement; let formServiceInstance: FormService; beforeEach(() => { fixture = TestBed.createComponent(UploadWidgetComponent); uploadWidgetComponent = fixture.componentInstance; element = fixture.nativeElement; debugElement = fixture.debugElement; contentService = TestBed.inject(ProcessContentService); }); it('should setup with field data', () => { const fileName = 'hello world'; const encodedFileName = encodeURI(fileName); uploadWidgetComponent.field = new FormFieldModel(null, { type: FormFieldTypes.UPLOAD, value: [ { name: encodedFileName } ] }); uploadWidgetComponent.ngOnInit(); expect(uploadWidgetComponent.hasFile).toBeTruthy(); }); it('should require form field to setup', () => { uploadWidgetComponent.field = null; uploadWidgetComponent.ngOnInit(); expect(uploadWidgetComponent.hasFile).toBeFalsy(); }); it('should reset field value', () => { uploadWidgetComponent.field = new FormFieldModel(new FormModel(), { type: FormFieldTypes.UPLOAD, value: [ { name: 'filename' } ] }); uploadWidgetComponent.removeFile(uploadWidgetComponent.field.value[0]); expect(uploadWidgetComponent.field.value).toBeNull(); expect(uploadWidgetComponent.field.json.value).toBeNull(); expect(uploadWidgetComponent.hasFile).toBeFalsy(); }); beforeEach(() => { uploadWidgetComponent.field = new FormFieldModel(new FormModel({ taskId: 'fake-upload-id' }), { id: 'upload-id', name: 'upload-name', value: '', type: FormFieldTypes.UPLOAD, readOnly: false }); formServiceInstance = TestBed.inject(FormService); uploadWidgetComponent.field.value = []; }); it('should be not present in readonly forms', async () => { uploadWidgetComponent.field.form.readOnly = true; fixture.detectChanges(); inputElement = element.querySelector<HTMLInputElement>('#upload-id'); fixture.detectChanges(); await fixture.whenStable(); expect(inputElement).toBeNull(); }); it('should have the multiple attribute when is selected in parameters', async () => { uploadWidgetComponent.field.params.multiple = true; fixture.detectChanges(); inputElement = element.querySelector<HTMLInputElement>('#upload-id'); fixture.detectChanges(); await fixture.whenStable(); expect(inputElement).toBeDefined(); expect(inputElement).not.toBeNull(); expect(inputElement.getAttributeNode('multiple')).toBeTruthy(); }); it('should not have the multiple attribute if multiple is false', async () => { uploadWidgetComponent.field.params.multiple = false; fixture.detectChanges(); inputElement = element.querySelector<HTMLInputElement>('#upload-id'); fixture.detectChanges(); await fixture.whenStable(); expect(inputElement).toBeDefined(); expect(inputElement).not.toBeNull(); expect(inputElement.getAttributeNode('multiple')).toBeFalsy(); }); it('should show the list file after upload a new content', async () => { spyOn(contentService, 'createTemporaryRawRelatedContent').and.returnValue(of(fakePngAnswer)); uploadWidgetComponent.field.params.multiple = false; fixture.detectChanges(); await fixture.whenStable(); const inputDebugElement = fixture.debugElement.query(By.css('#upload-id')); inputDebugElement.triggerEventHandler('change', { target: { files: [filJpgFake] } }); const filesList = fixture.debugElement.query(By.css('#file-1156')); expect(filesList).toBeDefined(); }); it('should update the form after deleted a file', async () => { spyOn(contentService, 'createTemporaryRawRelatedContent').and.callFake((file: any) => { if (file.name === 'file-fake.png') { return of(fakePngAnswer); } if (file.name === 'file-fake.jpg') { return of(fakeJpgAnswer); } return of(null); }); uploadWidgetComponent.field.params.multiple = true; spyOn(uploadWidgetComponent.field, 'updateForm'); fixture.detectChanges(); await fixture.whenStable(); const inputDebugElement = fixture.debugElement.query(By.css('#upload-id')); inputDebugElement.triggerEventHandler('change', { target: { files: [filePngFake, filJpgFake] } }); fixture.detectChanges(); await fixture.whenStable(); const deleteButton = <HTMLInputElement> element.querySelector('#file-1155-remove'); deleteButton.click(); expect(uploadWidgetComponent.field.updateForm).toHaveBeenCalled(); }); it('should set has field value all the files uploaded', async () => { spyOn(contentService, 'createTemporaryRawRelatedContent').and.callFake((file: any) => { if (file.name === 'file-fake.png') { return of(fakePngAnswer); } if (file.name === 'file-fake.jpg') { return of(fakeJpgAnswer); } return of(null); }); uploadWidgetComponent.field.params.multiple = true; fixture.detectChanges(); await fixture.whenStable(); const inputDebugElement = fixture.debugElement.query(By.css('#upload-id')); inputDebugElement.triggerEventHandler('change', { target: { files: [filePngFake, filJpgFake] } }); fixture.detectChanges(); await fixture.whenStable(); inputElement = <HTMLInputElement> element.querySelector('#upload-id'); expect(inputElement).toBeDefined(); expect(inputElement).not.toBeNull(); expect(uploadWidgetComponent.field.value).not.toBeNull(); expect(uploadWidgetComponent.field.value.length).toBe(2); expect(uploadWidgetComponent.field.value[0].id).toBe(1155); expect(uploadWidgetComponent.field.value[1].id).toBe(1156); expect(uploadWidgetComponent.field.json.value.length).toBe(2); }); it('should show all the file uploaded on multiple field', async () => { uploadWidgetComponent.field.params.multiple = true; uploadWidgetComponent.field.value.push(fakeJpgAnswer); uploadWidgetComponent.field.value.push(fakePngAnswer); fixture.detectChanges(); await fixture.whenStable(); const jpegElement = element.querySelector('#file-1156'); const pngElement = element.querySelector('#file-1155'); expect(jpegElement).not.toBeNull(); expect(pngElement).not.toBeNull(); expect(jpegElement.textContent).toBe('a_jpg_file.jpg'); expect(pngElement.textContent).toBe('a_png_file.png'); }); it('should show correctly the file name when is formed with special characters', async () => { uploadWidgetComponent.field.value.push(fakeCreationFile('±!@#$%^&*()_+{}:”|<>?§™£-=[];’\\,./.jpg', 10)); fixture.detectChanges(); await fixture.whenStable(); const jpegElement = element.querySelector('#file-10'); expect(jpegElement).not.toBeNull(); expect(jpegElement.textContent).toBe(`±!@#$%^&*()_+{}:”|<>?§™£-=[];’\\,./.jpg`); }); it('should show correctly the file name when is formed with Arabic characters', async () => { const name = 'غ ظ ض ذ خ ث ت ش ر ق ص ف ع س ن م ل ك ي ط ح ز و ه د ج ب ا.jpg'; uploadWidgetComponent.field.value.push(fakeCreationFile(name, 11)); fixture.detectChanges(); await fixture.whenStable(); const jpegElement = element.querySelector('#file-11'); expect(jpegElement).not.toBeNull(); expect(jpegElement.textContent).toBe('غ ظ ض ذ خ ث ت ش ر ق ص ف ع س ن م ل ك ي ط ح ز و ه د ج ب ا.jpg'); }); it('should show correctly the file name when is formed with French characters', async () => { // cspell: disable-next uploadWidgetComponent.field.value.push(fakeCreationFile('Àâæçéèêëïîôœùûüÿ.jpg', 12)); fixture.detectChanges(); await fixture.whenStable(); const jpegElement = element.querySelector('#file-12'); expect(jpegElement).not.toBeNull(); // cspell: disable-next expect(jpegElement.textContent).toBe('Àâæçéèêëïîôœùûüÿ.jpg'); }); it('should show correctly the file name when is formed with Greek characters', async () => { // cspell: disable-next uploadWidgetComponent.field.value.push(fakeCreationFile('άέήίϊϊΐόύϋΰώθωερτψυιοπασδφγηςκλζχξωβνμ.jpg', 13)); fixture.detectChanges(); await fixture.whenStable(); const jpegElement = element.querySelector('#file-13'); expect(jpegElement).not.toBeNull(); // cspell: disable-next expect(jpegElement.textContent).toBe('άέήίϊϊΐόύϋΰώθωερτψυιοπασδφγηςκλζχξωβνμ.jpg'); }); it('should show correctly the file name when is formed with Polish accented characters', async () => { uploadWidgetComponent.field.value.push(fakeCreationFile('Ą Ć Ę Ł Ń Ó Ś Ź Żą ć ę ł ń ó ś ź ż.jpg', 14)); fixture.detectChanges(); await fixture.whenStable(); const jpegElement = element.querySelector('#file-14'); expect(jpegElement).not.toBeNull(); expect(jpegElement.textContent).toBe('Ą Ć Ę Ł Ń Ó Ś Ź Żą ć ę ł ń ó ś ź ż.jpg'); }); it('should show correctly the file name when is formed with Spanish accented characters', async () => { uploadWidgetComponent.field.value.push(fakeCreationFile('á, é, í, ó, ú, ñ, Ñ, ü, Ü, ¿, ¡. Á, É, Í, Ó, Ú.jpg', 15)); fixture.detectChanges(); await fixture.whenStable(); const jpegElement = element.querySelector('#file-15'); expect(jpegElement).not.toBeNull(); expect(jpegElement.textContent).toBe('á, é, í, ó, ú, ñ, Ñ, ü, Ü, ¿, ¡. Á, É, Í, Ó, Ú.jpg'); }); it('should show correctly the file name when is formed with Swedish characters', async () => { // cspell: disable-next uploadWidgetComponent.field.value.push(fakeCreationFile('Äåéö.jpg', 16)); fixture.detectChanges(); await fixture.whenStable(); const jpegElement = element.querySelector('#file-16'); expect(jpegElement).not.toBeNull(); // cspell: disable-next expect(jpegElement.textContent).toBe('Äåéö.jpg'); }); it('should remove file from field value', async () => { uploadWidgetComponent.field.params.multiple = true; uploadWidgetComponent.field.value.push(fakeJpgAnswer); uploadWidgetComponent.field.value.push(fakePngAnswer); fixture.detectChanges(); await fixture.whenStable(); const buttonElement = <HTMLButtonElement> element.querySelector('#file-1156-remove'); buttonElement.click(); fixture.detectChanges(); const jpegElement = element.querySelector('#file-1156'); expect(jpegElement).toBeNull(); expect(uploadWidgetComponent.field.value.length).toBe(1); }); it('should emit form content clicked event on icon click', (done) => { spyOn(contentService, 'getContentPreview').and.returnValue(of(new Blob())); spyOn(contentService, 'getFileRawContent').and.returnValue(of(new Blob())); formServiceInstance.formContentClicked.subscribe((content: any) => { expect(content.name).toBe(fakeJpgAnswer.name); expect(content.id).toBe(fakeJpgAnswer.id); expect(content.contentBlob).not.toBeNull(); done(); }); uploadWidgetComponent.field.params.multiple = true; uploadWidgetComponent.field.value.push(fakeJpgAnswer); uploadWidgetComponent.field.value.push(fakePngAnswer); fixture.detectChanges(); fixture.whenStable().then(() => { const fileJpegIcon = debugElement.query(By.css('#file-1156-icon')); fileJpegIcon.nativeElement.dispatchEvent(new MouseEvent('click')); }); }); }); });
the_stack
// PRIVATE FUNCTIONS // ----------------- function _x86Multiply(m: number, n: number) { // // Given two 32bit ints, returns the two multiplied together as a // 32bit int. // return (m & 0xffff) * n + ((((m >>> 16) * n) & 0xffff) << 16); } function _x86Rotl(m: number, n: number) { // // Given a 32bit int and an int representing a number of bit positions, // returns the 32bit int rotated left by that number of positions. // return (m << n) | (m >>> (32 - n)); } function _x86Fmix(h: number) { // // Given a block, returns murmurHash3's final x86 mix of that block. // h ^= h >>> 16; h = _x86Multiply(h, 0x85ebca6b); h ^= h >>> 13; h = _x86Multiply(h, 0xc2b2ae35); h ^= h >>> 16; return h; } function _x64Add(m: number[], n: number[]) { // // Given two 64bit ints (as an array of two 32bit ints) returns the two // added together as a 64bit int (as an array of two 32bit ints). // m = [m[0] >>> 16, m[0] & 0xffff, m[1] >>> 16, m[1] & 0xffff]; n = [n[0] >>> 16, n[0] & 0xffff, n[1] >>> 16, n[1] & 0xffff]; const o = [0, 0, 0, 0]; o[3] += m[3] + n[3]; o[2] += o[3] >>> 16; o[3] &= 0xffff; o[2] += m[2] + n[2]; o[1] += o[2] >>> 16; o[2] &= 0xffff; o[1] += m[1] + n[1]; o[0] += o[1] >>> 16; o[1] &= 0xffff; o[0] += m[0] + n[0]; o[0] &= 0xffff; return [(o[0] << 16) | o[1], (o[2] << 16) | o[3]]; } function _x64Multiply(m: number[], n: number[]) { // // Given two 64bit ints (as an array of two 32bit ints) returns the two // multiplied together as a 64bit int (as an array of two 32bit ints). // m = [m[0] >>> 16, m[0] & 0xffff, m[1] >>> 16, m[1] & 0xffff]; n = [n[0] >>> 16, n[0] & 0xffff, n[1] >>> 16, n[1] & 0xffff]; const o = [0, 0, 0, 0]; o[3] += m[3] * n[3]; o[2] += o[3] >>> 16; o[3] &= 0xffff; o[2] += m[2] * n[3]; o[1] += o[2] >>> 16; o[2] &= 0xffff; o[2] += m[3] * n[2]; o[1] += o[2] >>> 16; o[2] &= 0xffff; o[1] += m[1] * n[3]; o[0] += o[1] >>> 16; o[1] &= 0xffff; o[1] += m[2] * n[2]; o[0] += o[1] >>> 16; o[1] &= 0xffff; o[1] += m[3] * n[1]; o[0] += o[1] >>> 16; o[1] &= 0xffff; o[0] += m[0] * n[3] + m[1] * n[2] + m[2] * n[1] + m[3] * n[0]; o[0] &= 0xffff; return [(o[0] << 16) | o[1], (o[2] << 16) | o[3]]; } function _x64Rotl(m: number[], n: number) { // // Given a 64bit int (as an array of two 32bit ints) and an int // representing a number of bit positions, returns the 64bit int (as an // array of two 32bit ints) rotated left by that number of positions. // n %= 64; if (n === 32) { return [m[1], m[0]]; } else if (n < 32) { return [(m[0] << n) | (m[1] >>> (32 - n)), (m[1] << n) | (m[0] >>> (32 - n))]; } else { n -= 32; return [(m[1] << n) | (m[0] >>> (32 - n)), (m[0] << n) | (m[1] >>> (32 - n))]; } } function _x64LeftShift(m: number[], n: number) { // // Given a 64bit int (as an array of two 32bit ints) and an int // representing a number of bit positions, returns the 64bit int (as an // array of two 32bit ints) shifted left by that number of positions. // n %= 64; if (n === 0) { return m; } else if (n < 32) { return [(m[0] << n) | (m[1] >>> (32 - n)), m[1] << n]; } else { return [m[1] << (n - 32), 0]; } } function _x64Xor(m: number[], n: number[]) { // // Given two 64bit ints (as an array of two 32bit ints) returns the two // xored together as a 64bit int (as an array of two 32bit ints). // return [m[0] ^ n[0], m[1] ^ n[1]]; } function _x64Fmix(h: number[]) { // // Given a block, returns murmurHash3's final x64 mix of that block. // (`[0, h[0] >>> 1]` is a 33 bit unsigned right shift. This is the // only place where we need to right shift 64bit ints.) // h = _x64Xor(h, [0, h[0] >>> 1]); h = _x64Multiply(h, [0xff51afd7, 0xed558ccd]); h = _x64Xor(h, [0, h[0] >>> 1]); h = _x64Multiply(h, [0xc4ceb9fe, 0x1a85ec53]); h = _x64Xor(h, [0, h[0] >>> 1]); return h; } // PUBLIC FUNCTIONS // ---------------- function x86Hash32(bytes: Buffer, seed?: number) { // // Given a string and an optional seed as an int, returns a 32 bit hash // using the x86 flavor of MurmurHash3, as an unsigned int. // seed = seed || 0; const remainder = bytes.length % 4; const blocks = bytes.length - remainder; let h1 = seed; let k1 = 0; const c1 = 0xcc9e2d51; const c2 = 0x1b873593; let j = 0; for (let i = 0; i < blocks; i = i + 4) { k1 = bytes[i] | (bytes[i + 1] << 8) | (bytes[i + 2] << 16) | (bytes[i + 3] << 24); k1 = _x86Multiply(k1, c1); k1 = _x86Rotl(k1, 15); k1 = _x86Multiply(k1, c2); h1 ^= k1; h1 = _x86Rotl(h1, 13); h1 = _x86Multiply(h1, 5) + 0xe6546b64; j = i + 4; } k1 = 0; switch (remainder) { case 3: k1 ^= bytes[j + 2] << 16; case 2: k1 ^= bytes[j + 1] << 8; case 1: k1 ^= bytes[j]; k1 = _x86Multiply(k1, c1); k1 = _x86Rotl(k1, 15); k1 = _x86Multiply(k1, c2); h1 ^= k1; } h1 ^= bytes.length; h1 = _x86Fmix(h1); return h1 >>> 0; } function x86Hash128(bytes: Buffer, seed?: number) { // // Given a string and an optional seed as an int, returns a 128 bit // hash using the x86 flavor of MurmurHash3, as an unsigned hex. // seed = seed || 0; const remainder = bytes.length % 16; const blocks = bytes.length - remainder; let h1 = seed; let h2 = seed; let h3 = seed; let h4 = seed; let k1 = 0; let k2 = 0; let k3 = 0; let k4 = 0; const c1 = 0x239b961b; const c2 = 0xab0e9789; const c3 = 0x38b34ae5; const c4 = 0xa1e38b93; let j = 0; for (let i = 0; i < blocks; i = i + 16) { k1 = bytes[i] | (bytes[i + 1] << 8) | (bytes[i + 2] << 16) | (bytes[i + 3] << 24); k2 = bytes[i + 4] | (bytes[i + 5] << 8) | (bytes[i + 6] << 16) | (bytes[i + 7] << 24); k3 = bytes[i + 8] | (bytes[i + 9] << 8) | (bytes[i + 10] << 16) | (bytes[i + 11] << 24); k4 = bytes[i + 12] | (bytes[i + 13] << 8) | (bytes[i + 14] << 16) | (bytes[i + 15] << 24); k1 = _x86Multiply(k1, c1); k1 = _x86Rotl(k1, 15); k1 = _x86Multiply(k1, c2); h1 ^= k1; h1 = _x86Rotl(h1, 19); h1 += h2; h1 = _x86Multiply(h1, 5) + 0x561ccd1b; k2 = _x86Multiply(k2, c2); k2 = _x86Rotl(k2, 16); k2 = _x86Multiply(k2, c3); h2 ^= k2; h2 = _x86Rotl(h2, 17); h2 += h3; h2 = _x86Multiply(h2, 5) + 0x0bcaa747; k3 = _x86Multiply(k3, c3); k3 = _x86Rotl(k3, 17); k3 = _x86Multiply(k3, c4); h3 ^= k3; h3 = _x86Rotl(h3, 15); h3 += h4; h3 = _x86Multiply(h3, 5) + 0x96cd1c35; k4 = _x86Multiply(k4, c4); k4 = _x86Rotl(k4, 18); k4 = _x86Multiply(k4, c1); h4 ^= k4; h4 = _x86Rotl(h4, 13); h4 += h1; h4 = _x86Multiply(h4, 5) + 0x32ac3b17; j = i + 16; } k1 = 0; k2 = 0; k3 = 0; k4 = 0; switch (remainder) { case 15: k4 ^= bytes[j + 14] << 16; case 14: k4 ^= bytes[j + 13] << 8; case 13: k4 ^= bytes[j + 12]; k4 = _x86Multiply(k4, c4); k4 = _x86Rotl(k4, 18); k4 = _x86Multiply(k4, c1); h4 ^= k4; case 12: k3 ^= bytes[j + 11] << 24; case 11: k3 ^= bytes[j + 10] << 16; case 10: k3 ^= bytes[j + 9] << 8; case 9: k3 ^= bytes[j + 8]; k3 = _x86Multiply(k3, c3); k3 = _x86Rotl(k3, 17); k3 = _x86Multiply(k3, c4); h3 ^= k3; case 8: k2 ^= bytes[j + 7] << 24; case 7: k2 ^= bytes[j + 6] << 16; case 6: k2 ^= bytes[j + 5] << 8; case 5: k2 ^= bytes[j + 4]; k2 = _x86Multiply(k2, c2); k2 = _x86Rotl(k2, 16); k2 = _x86Multiply(k2, c3); h2 ^= k2; case 4: k1 ^= bytes[j + 3] << 24; case 3: k1 ^= bytes[j + 2] << 16; case 2: k1 ^= bytes[j + 1] << 8; case 1: k1 ^= bytes[j]; k1 = _x86Multiply(k1, c1); k1 = _x86Rotl(k1, 15); k1 = _x86Multiply(k1, c2); h1 ^= k1; } h1 ^= bytes.length; h2 ^= bytes.length; h3 ^= bytes.length; h4 ^= bytes.length; h1 += h2; h1 += h3; h1 += h4; h2 += h1; h3 += h1; h4 += h1; h1 = _x86Fmix(h1); h2 = _x86Fmix(h2); h3 = _x86Fmix(h3); h4 = _x86Fmix(h4); h1 += h2; h1 += h3; h1 += h4; h2 += h1; h3 += h1; h4 += h1; return ( ("00000000" + (h1 >>> 0).toString(16)).slice(-8) + ("00000000" + (h2 >>> 0).toString(16)).slice(-8) + ("00000000" + (h3 >>> 0).toString(16)).slice(-8) + ("00000000" + (h4 >>> 0).toString(16)).slice(-8) ); } function x64Hash128(bytes: Buffer, seed?: number) { // // Given a string and an optional seed as an int, returns a 128 bit // hash using the x64 flavor of MurmurHash3, as an unsigned hex. // seed = seed || 0; const remainder = bytes.length % 16; const blocks = bytes.length - remainder; let h1 = [0, seed]; let h2 = [0, seed]; let k1 = [0, 0]; let k2 = [0, 0]; const c1 = [0x87c37b91, 0x114253d5]; const c2 = [0x4cf5ad43, 0x2745937f]; let j = 0; for (let i = 0; i < blocks; i = i + 16) { k1 = [ bytes[i + 4] | (bytes[i + 5] << 8) | (bytes[i + 6] << 16) | (bytes[i + 7] << 24), bytes[i] | (bytes[i + 1] << 8) | (bytes[i + 2] << 16) | (bytes[i + 3] << 24) ]; k2 = [ bytes[i + 12] | (bytes[i + 13] << 8) | (bytes[i + 14] << 16) | (bytes[i + 15] << 24), bytes[i + 8] | (bytes[i + 9] << 8) | (bytes[i + 10] << 16) | (bytes[i + 11] << 24) ]; k1 = _x64Multiply(k1, c1); k1 = _x64Rotl(k1, 31); k1 = _x64Multiply(k1, c2); h1 = _x64Xor(h1, k1); h1 = _x64Rotl(h1, 27); h1 = _x64Add(h1, h2); h1 = _x64Add(_x64Multiply(h1, [0, 5]), [0, 0x52dce729]); k2 = _x64Multiply(k2, c2); k2 = _x64Rotl(k2, 33); k2 = _x64Multiply(k2, c1); h2 = _x64Xor(h2, k2); h2 = _x64Rotl(h2, 31); h2 = _x64Add(h2, h1); h2 = _x64Add(_x64Multiply(h2, [0, 5]), [0, 0x38495ab5]); j = i + 16; } k1 = [0, 0]; k2 = [0, 0]; switch (remainder) { case 15: k2 = _x64Xor(k2, _x64LeftShift([0, bytes[j + 14]], 48)); case 14: k2 = _x64Xor(k2, _x64LeftShift([0, bytes[j + 13]], 40)); case 13: k2 = _x64Xor(k2, _x64LeftShift([0, bytes[j + 12]], 32)); case 12: k2 = _x64Xor(k2, _x64LeftShift([0, bytes[j + 11]], 24)); case 11: k2 = _x64Xor(k2, _x64LeftShift([0, bytes[j + 10]], 16)); case 10: k2 = _x64Xor(k2, _x64LeftShift([0, bytes[j + 9]], 8)); case 9: k2 = _x64Xor(k2, [0, bytes[j + 8]]); k2 = _x64Multiply(k2, c2); k2 = _x64Rotl(k2, 33); k2 = _x64Multiply(k2, c1); h2 = _x64Xor(h2, k2); case 8: k1 = _x64Xor(k1, _x64LeftShift([0, bytes[j + 7]], 56)); case 7: k1 = _x64Xor(k1, _x64LeftShift([0, bytes[j + 6]], 48)); case 6: k1 = _x64Xor(k1, _x64LeftShift([0, bytes[j + 5]], 40)); case 5: k1 = _x64Xor(k1, _x64LeftShift([0, bytes[j + 4]], 32)); case 4: k1 = _x64Xor(k1, _x64LeftShift([0, bytes[j + 3]], 24)); case 3: k1 = _x64Xor(k1, _x64LeftShift([0, bytes[j + 2]], 16)); case 2: k1 = _x64Xor(k1, _x64LeftShift([0, bytes[j + 1]], 8)); case 1: k1 = _x64Xor(k1, [0, bytes[j]]); k1 = _x64Multiply(k1, c1); k1 = _x64Rotl(k1, 31); k1 = _x64Multiply(k1, c2); h1 = _x64Xor(h1, k1); } h1 = _x64Xor(h1, [0, bytes.length]); h2 = _x64Xor(h2, [0, bytes.length]); h1 = _x64Add(h1, h2); h2 = _x64Add(h2, h1); h1 = _x64Fmix(h1); h2 = _x64Fmix(h2); h1 = _x64Add(h1, h2); h2 = _x64Add(h2, h1); // Here we reverse h1 and h2 in Cosmos // This is an implementation detail and not part of the public spec const h1Buff = Buffer.from( ("00000000" + (h1[0] >>> 0).toString(16)).slice(-8) + ("00000000" + (h1[1] >>> 0).toString(16)).slice(-8), "hex" ); const h1Reversed = reverse(h1Buff).toString("hex"); const h2Buff = Buffer.from( ("00000000" + (h2[0] >>> 0).toString(16)).slice(-8) + ("00000000" + (h2[1] >>> 0).toString(16)).slice(-8), "hex" ); const h2Reversed = reverse(h2Buff).toString("hex"); return h1Reversed + h2Reversed; } export function reverse(buff: Buffer) { const buffer = Buffer.allocUnsafe(buff.length); for (let i = 0, j = buff.length - 1; i <= j; ++i, --j) { buffer[i] = buff[j]; buffer[j] = buff[i]; } return buffer; } export default { version: "3.0.0", x86: { hash32: x86Hash32, hash128: x86Hash128 }, x64: { hash128: x64Hash128 }, inputValidation: true };
the_stack
import { distinct } from 'vs/base/common/arrays'; import { RunOnceWorker } from 'vs/base/common/async'; import { CancellationToken } from 'vs/base/common/cancellation'; import { Emitter, Event } from 'vs/base/common/event'; import { match } from 'vs/base/common/glob'; import { Disposable } from 'vs/base/common/lifecycle'; import { equals } from 'vs/base/common/objects'; import { language, OperatingSystem, OS } from 'vs/base/common/platform'; import { isDefined } from 'vs/base/common/types'; import { IConfigurationService } from 'vs/platform/configuration/common/configuration'; import { IExtensionManagementService } from 'vs/platform/extensionManagement/common/extensionManagement'; import { ExtensionType } from 'vs/platform/extensions/common/extensions'; import { createDecorator } from 'vs/platform/instantiation/common/instantiation'; import { IProductService } from 'vs/platform/product/common/productService'; import { asJson, IRequestService } from 'vs/platform/request/common/request'; import { IStorageService, StorageScope, StorageTarget } from 'vs/platform/storage/common/storage'; import { ITelemetryService, lastSessionDateStorageKey } from 'vs/platform/telemetry/common/telemetry'; import { IWorkspaceTagsService } from 'vs/workbench/contrib/tags/common/workspaceTags'; import { IWorkbenchEnvironmentService } from 'vs/workbench/services/environment/common/environmentService'; import { IExtensionService } from 'vs/workbench/services/extensions/common/extensions'; import { ILifecycleService, LifecyclePhase } from 'vs/workbench/services/lifecycle/common/lifecycle'; import { ITextFileEditorModel, ITextFileService } from 'vs/workbench/services/textfile/common/textfiles'; export const enum ExperimentState { Evaluating, NoRun, Run, Complete } export interface IExperimentAction { type: ExperimentActionType; properties: any; } export enum ExperimentActionType { Custom = 'Custom', Prompt = 'Prompt', AddToRecommendations = 'AddToRecommendations', ExtensionSearchResults = 'ExtensionSearchResults' } export type LocalizedPromptText = { [locale: string]: string }; export interface IExperimentActionPromptProperties { promptText: string | LocalizedPromptText; commands: IExperimentActionPromptCommand[]; } export interface IExperimentActionPromptCommand { text: string | { [key: string]: string }; externalLink?: string; curatedExtensionsKey?: string; curatedExtensionsList?: string[]; codeCommand?: { id: string; arguments: unknown[]; }; } export interface IExperiment { id: string; enabled: boolean; raw: IRawExperiment | undefined; state: ExperimentState; action?: IExperimentAction; } export interface IExperimentService { readonly _serviceBrand: undefined; getExperimentById(id: string): Promise<IExperiment>; getExperimentsByType(type: ExperimentActionType): Promise<IExperiment[]>; getCuratedExtensionsList(curatedExtensionsKey: string): Promise<string[]>; markAsCompleted(experimentId: string): void; onExperimentEnabled: Event<IExperiment>; } export const IExperimentService = createDecorator<IExperimentService>('experimentService'); interface IExperimentStorageState { enabled: boolean; state: ExperimentState; editCount?: number; lastEditedDate?: string; } /** * Current version of the experiment schema in this VS Code build. This *must* * be incremented when adding a condition, otherwise experiments might activate * on older versions of VS Code where not intended. */ export const currentSchemaVersion = 5; interface IRawExperiment { id: string; schemaVersion: number; enabled?: boolean; condition?: { insidersOnly?: boolean; newUser?: boolean; displayLanguage?: string; // Evaluates to true iff all the given user settings are deeply equal userSetting?: { [key: string]: unknown }; // Start the experiment if the number of activation events have happened over the last week: activationEvent?: { event: string | string[]; uniqueDays?: number; minEvents: number; }; os: OperatingSystem[]; installedExtensions?: { excludes?: string[]; includes?: string[]; }; fileEdits?: { filePathPattern?: string; workspaceIncludes?: string[]; workspaceExcludes?: string[]; minEditCount: number; }; experimentsPreviouslyRun?: { excludes?: string[]; includes?: string[]; }; userProbability?: number; }; action?: IExperimentAction; action2?: IExperimentAction; } interface IActivationEventRecord { count: number[]; mostRecentBucket: number; } const experimentEventStorageKey = (event: string) => 'experimentEventRecord-' + event.replace(/[^0-9a-z]/ig, '-'); /** * Updates the activation record to shift off days outside the window * we're interested in. */ export const getCurrentActivationRecord = (previous?: IActivationEventRecord, dayWindow = 7): IActivationEventRecord => { const oneDay = 1000 * 60 * 60 * 24; const now = Date.now(); if (!previous) { return { count: new Array(dayWindow).fill(0), mostRecentBucket: now }; } // get the number of days, up to dayWindow, that passed since the last bucket update const shift = Math.min(dayWindow, Math.floor((now - previous.mostRecentBucket) / oneDay)); if (!shift) { return previous; } return { count: new Array(shift).fill(0).concat(previous.count.slice(0, -shift)), mostRecentBucket: previous.mostRecentBucket + shift * oneDay, }; }; export class ExperimentService extends Disposable implements IExperimentService { declare readonly _serviceBrand: undefined; private _experiments: IExperiment[] = []; private _loadExperimentsPromise: Promise<void>; private _curatedMapping = Object.create(null); private readonly _onExperimentEnabled = this._register(new Emitter<IExperiment>()); onExperimentEnabled: Event<IExperiment> = this._onExperimentEnabled.event; constructor( @IStorageService private readonly storageService: IStorageService, @IExtensionManagementService private readonly extensionManagementService: IExtensionManagementService, @ITextFileService private readonly textFileService: ITextFileService, @ITelemetryService private readonly telemetryService: ITelemetryService, @ILifecycleService private readonly lifecycleService: ILifecycleService, @IRequestService private readonly requestService: IRequestService, @IConfigurationService private readonly configurationService: IConfigurationService, @IProductService private readonly productService: IProductService, @IWorkspaceTagsService private readonly workspaceTagsService: IWorkspaceTagsService, @IExtensionService private readonly extensionService: IExtensionService, @IWorkbenchEnvironmentService private readonly environmentService: IWorkbenchEnvironmentService ) { super(); this._loadExperimentsPromise = Promise.resolve(this.lifecycleService.when(LifecyclePhase.Eventually)).then(() => this.loadExperiments()); } public getExperimentById(id: string): Promise<IExperiment> { return this._loadExperimentsPromise.then(() => { return this._experiments.filter(x => x.id === id)[0]; }); } public getExperimentsByType(type: ExperimentActionType): Promise<IExperiment[]> { return this._loadExperimentsPromise.then(() => { if (type === ExperimentActionType.Custom) { return this._experiments.filter(x => x.enabled && (!x.action || x.action.type === type)); } return this._experiments.filter(x => x.enabled && x.action && x.action.type === type); }); } public getCuratedExtensionsList(curatedExtensionsKey: string): Promise<string[]> { return this._loadExperimentsPromise.then(() => { for (const experiment of this._experiments) { if (experiment.enabled && experiment.state === ExperimentState.Run && this._curatedMapping[experiment.id] && this._curatedMapping[experiment.id].curatedExtensionsKey === curatedExtensionsKey) { return this._curatedMapping[experiment.id].curatedExtensionsList; } } return []; }); } public markAsCompleted(experimentId: string): void { const storageKey = 'experiments.' + experimentId; const experimentState: IExperimentStorageState = safeParse(this.storageService.get(storageKey, StorageScope.GLOBAL), {}); experimentState.state = ExperimentState.Complete; this.storageService.store(storageKey, JSON.stringify(experimentState), StorageScope.GLOBAL, StorageTarget.MACHINE); } protected async getExperiments(): Promise<IRawExperiment[] | null> { if (this.environmentService.enableSmokeTestDriver || this.environmentService.extensionTestsLocationURI) { return []; // TODO@sbatten add CLI argument (https://github.com/microsoft/vscode-internalbacklog/issues/2855) } const experimentsUrl = this.configurationService.getValue<string>('_workbench.experimentsUrl') || this.productService.experimentsUrl; if (!experimentsUrl || this.configurationService.getValue('workbench.enableExperiments') === false) { return []; } try { const context = await this.requestService.request({ type: 'GET', url: experimentsUrl }, CancellationToken.None); if (context.res.statusCode !== 200) { return null; } const result = await asJson<{ experiments?: IRawExperiment }>(context); return result && Array.isArray(result.experiments) ? result.experiments : []; } catch (_e) { // Bad request or invalid JSON return null; } } private loadExperiments(): Promise<any> { return this.getExperiments().then(rawExperiments => { // Offline mode if (!rawExperiments) { const allExperimentIdsFromStorage = safeParse(this.storageService.get('allExperiments', StorageScope.GLOBAL), []); if (Array.isArray(allExperimentIdsFromStorage)) { allExperimentIdsFromStorage.forEach(experimentId => { const storageKey = 'experiments.' + experimentId; const experimentState: IExperimentStorageState = safeParse(this.storageService.get(storageKey, StorageScope.GLOBAL), null); if (experimentState) { this._experiments.push({ id: experimentId, raw: undefined, enabled: experimentState.enabled, state: experimentState.state }); } }); } return Promise.resolve(null); } // Don't look at experiments with newer schema versions. We can't // understand them, trying to process them might even cause errors. rawExperiments = rawExperiments.filter(e => (e.schemaVersion || 0) <= currentSchemaVersion); // Clear disbaled/deleted experiments from storage const allExperimentIdsFromStorage = safeParse(this.storageService.get('allExperiments', StorageScope.GLOBAL), []); const enabledExperiments = rawExperiments.filter(experiment => !!experiment.enabled).map(experiment => experiment.id.toLowerCase()); if (Array.isArray(allExperimentIdsFromStorage)) { allExperimentIdsFromStorage.forEach(experiment => { if (enabledExperiments.indexOf(experiment) === -1) { this.storageService.remove(`experiments.${experiment}`, StorageScope.GLOBAL); } }); } if (enabledExperiments.length) { this.storageService.store('allExperiments', JSON.stringify(enabledExperiments), StorageScope.GLOBAL, StorageTarget.MACHINE); } else { this.storageService.remove('allExperiments', StorageScope.GLOBAL); } const activationEvents = new Set(rawExperiments.map(exp => exp.condition?.activationEvent?.event) .filter(isDefined).flatMap(evt => typeof evt === 'string' ? [evt] : [])); if (activationEvents.size) { this._register(this.extensionService.onWillActivateByEvent(evt => { if (activationEvents.has(evt.event)) { this.recordActivatedEvent(evt.event); } })); } const promises = rawExperiments.map(experiment => this.evaluateExperiment(experiment)); return Promise.all(promises).then(() => { type ExperimentsClassification = { experiments: { classification: 'SystemMetaData'; purpose: 'FeatureInsight' }; }; this.telemetryService.publicLog2<{ experiments: string[] }, ExperimentsClassification>('experiments', { experiments: this._experiments.map(e => e.id) }); }); }); } private evaluateExperiment(experiment: IRawExperiment) { const processedExperiment: IExperiment = { id: experiment.id, raw: experiment, enabled: !!experiment.enabled, state: !!experiment.enabled ? ExperimentState.Evaluating : ExperimentState.NoRun }; const action = experiment.action2 || experiment.action; if (action) { processedExperiment.action = { type: ExperimentActionType[action.type] || ExperimentActionType.Custom, properties: action.properties }; if (processedExperiment.action.type === ExperimentActionType.Prompt) { ((<IExperimentActionPromptProperties>processedExperiment.action.properties).commands || []).forEach(x => { if (x.curatedExtensionsKey && Array.isArray(x.curatedExtensionsList)) { this._curatedMapping[experiment.id] = x; } }); } if (!processedExperiment.action.properties) { processedExperiment.action.properties = {}; } } this._experiments = this._experiments.filter(e => e.id !== processedExperiment.id); this._experiments.push(processedExperiment); if (!processedExperiment.enabled) { return Promise.resolve(null); } const storageKey = 'experiments.' + experiment.id; const experimentState: IExperimentStorageState = safeParse(this.storageService.get(storageKey, StorageScope.GLOBAL), {}); if (!experimentState.hasOwnProperty('enabled')) { experimentState.enabled = processedExperiment.enabled; } if (!experimentState.hasOwnProperty('state')) { experimentState.state = processedExperiment.enabled ? ExperimentState.Evaluating : ExperimentState.NoRun; } else { processedExperiment.state = experimentState.state; } return this.shouldRunExperiment(experiment, processedExperiment).then((state: ExperimentState) => { experimentState.state = processedExperiment.state = state; this.storageService.store(storageKey, JSON.stringify(experimentState), StorageScope.GLOBAL, StorageTarget.MACHINE); if (state === ExperimentState.Run) { this.fireRunExperiment(processedExperiment); } return Promise.resolve(null); }); } private fireRunExperiment(experiment: IExperiment) { this._onExperimentEnabled.fire(experiment); const runExperimentIdsFromStorage: string[] = safeParse(this.storageService.get('currentOrPreviouslyRunExperiments', StorageScope.GLOBAL), []); if (runExperimentIdsFromStorage.indexOf(experiment.id) === -1) { runExperimentIdsFromStorage.push(experiment.id); } // Ensure we dont store duplicates const distinctExperiments = distinct(runExperimentIdsFromStorage); if (runExperimentIdsFromStorage.length !== distinctExperiments.length) { this.storageService.store('currentOrPreviouslyRunExperiments', JSON.stringify(distinctExperiments), StorageScope.GLOBAL, StorageTarget.MACHINE); } } private checkExperimentDependencies(experiment: IRawExperiment): boolean { const experimentsPreviouslyRun = experiment.condition?.experimentsPreviouslyRun; if (experimentsPreviouslyRun) { const runExperimentIdsFromStorage: string[] = safeParse(this.storageService.get('currentOrPreviouslyRunExperiments', StorageScope.GLOBAL), []); let includeCheck = true; let excludeCheck = true; const includes = experimentsPreviouslyRun.includes; if (Array.isArray(includes)) { includeCheck = runExperimentIdsFromStorage.some(x => includes.indexOf(x) > -1); } const excludes = experimentsPreviouslyRun.excludes; if (includeCheck && Array.isArray(excludes)) { excludeCheck = !runExperimentIdsFromStorage.some(x => excludes.indexOf(x) > -1); } if (!includeCheck || !excludeCheck) { return false; } } return true; } private recordActivatedEvent(event: string) { const key = experimentEventStorageKey(event); const record = getCurrentActivationRecord(safeParse(this.storageService.get(key, StorageScope.GLOBAL), undefined)); record.count[0]++; this.storageService.store(key, JSON.stringify(record), StorageScope.GLOBAL, StorageTarget.MACHINE); this._experiments .filter(e => { const lookingFor = e.raw?.condition?.activationEvent?.event; if (e.state !== ExperimentState.Evaluating || !lookingFor) { return false; } return typeof lookingFor === 'string' ? lookingFor === event : lookingFor?.includes(event); }) .forEach(e => this.evaluateExperiment(e.raw!)); } private checkActivationEventFrequency(experiment: IRawExperiment) { const setting = experiment.condition?.activationEvent; if (!setting) { return true; } let total = 0; let uniqueDays = 0; const events = typeof setting.event === 'string' ? [setting.event] : setting.event; for (const event of events) { const { count } = getCurrentActivationRecord(safeParse(this.storageService.get(experimentEventStorageKey(event), StorageScope.GLOBAL), undefined)); for (const entry of count) { if (entry > 0) { uniqueDays++; total += entry; } } } return total >= setting.minEvents && (!setting.uniqueDays || uniqueDays >= setting.uniqueDays); } private shouldRunExperiment(experiment: IRawExperiment, processedExperiment: IExperiment): Promise<ExperimentState> { if (processedExperiment.state !== ExperimentState.Evaluating) { return Promise.resolve(processedExperiment.state); } if (!experiment.enabled) { return Promise.resolve(ExperimentState.NoRun); } const condition = experiment.condition; if (!condition) { return Promise.resolve(ExperimentState.Run); } if (experiment.condition?.os && !experiment.condition.os.includes(OS)) { return Promise.resolve(ExperimentState.NoRun); } if (!this.checkExperimentDependencies(experiment)) { return Promise.resolve(ExperimentState.NoRun); } for (const [key, value] of Object.entries(experiment.condition?.userSetting || {})) { if (!equals(this.configurationService.getValue(key), value)) { return Promise.resolve(ExperimentState.NoRun); } } if (!this.checkActivationEventFrequency(experiment)) { return Promise.resolve(ExperimentState.Evaluating); } if (this.productService.quality === 'stable' && condition.insidersOnly === true) { return Promise.resolve(ExperimentState.NoRun); } const isNewUser = !this.storageService.get(lastSessionDateStorageKey, StorageScope.GLOBAL); if ((condition.newUser === true && !isNewUser) || (condition.newUser === false && isNewUser)) { return Promise.resolve(ExperimentState.NoRun); } if (typeof condition.displayLanguage === 'string') { let localeToCheck = condition.displayLanguage.toLowerCase(); let displayLanguage = language!.toLowerCase(); if (localeToCheck !== displayLanguage) { const a = displayLanguage.indexOf('-'); const b = localeToCheck.indexOf('-'); if (a > -1) { displayLanguage = displayLanguage.substr(0, a); } if (b > -1) { localeToCheck = localeToCheck.substr(0, b); } if (displayLanguage !== localeToCheck) { return Promise.resolve(ExperimentState.NoRun); } } } if (!condition.userProbability) { condition.userProbability = 1; } let extensionsCheckPromise = Promise.resolve(true); const installedExtensions = condition.installedExtensions; if (installedExtensions) { extensionsCheckPromise = this.extensionManagementService.getInstalled(ExtensionType.User).then(locals => { let includesCheck = true; let excludesCheck = true; const localExtensions = locals.map(local => `${local.manifest.publisher.toLowerCase()}.${local.manifest.name.toLowerCase()}`); if (Array.isArray(installedExtensions.includes) && installedExtensions.includes.length) { const extensionIncludes = installedExtensions.includes.map(e => e.toLowerCase()); includesCheck = localExtensions.some(e => extensionIncludes.indexOf(e) > -1); } if (Array.isArray(installedExtensions.excludes) && installedExtensions.excludes.length) { const extensionExcludes = installedExtensions.excludes.map(e => e.toLowerCase()); excludesCheck = !localExtensions.some(e => extensionExcludes.indexOf(e) > -1); } return includesCheck && excludesCheck; }); } const storageKey = 'experiments.' + experiment.id; const experimentState: IExperimentStorageState = safeParse(this.storageService.get(storageKey, StorageScope.GLOBAL), {}); return extensionsCheckPromise.then(success => { const fileEdits = condition.fileEdits; if (!success || !fileEdits || typeof fileEdits.minEditCount !== 'number') { const runExperiment = success && typeof condition.userProbability === 'number' && Math.random() < condition.userProbability; return runExperiment ? ExperimentState.Run : ExperimentState.NoRun; } experimentState.editCount = experimentState.editCount || 0; if (experimentState.editCount >= fileEdits.minEditCount) { return ExperimentState.Run; } // Process model-save event every 250ms to reduce load const onModelsSavedWorker = this._register(new RunOnceWorker<ITextFileEditorModel>(models => { const date = new Date().toDateString(); const latestExperimentState: IExperimentStorageState = safeParse(this.storageService.get(storageKey, StorageScope.GLOBAL), {}); if (latestExperimentState.state !== ExperimentState.Evaluating) { onSaveHandler.dispose(); onModelsSavedWorker.dispose(); return; } models.forEach(async model => { if (latestExperimentState.state !== ExperimentState.Evaluating || date === latestExperimentState.lastEditedDate || (typeof latestExperimentState.editCount === 'number' && latestExperimentState.editCount >= fileEdits.minEditCount) ) { return; } let filePathCheck = true; let workspaceCheck = true; if (typeof fileEdits.filePathPattern === 'string') { filePathCheck = match(fileEdits.filePathPattern, model.resource.fsPath); } if (Array.isArray(fileEdits.workspaceIncludes) && fileEdits.workspaceIncludes.length) { const tags = await this.workspaceTagsService.getTags(); workspaceCheck = !!tags && fileEdits.workspaceIncludes.some(x => !!tags[x]); } if (workspaceCheck && Array.isArray(fileEdits.workspaceExcludes) && fileEdits.workspaceExcludes.length) { const tags = await this.workspaceTagsService.getTags(); workspaceCheck = !!tags && !fileEdits.workspaceExcludes.some(x => !!tags[x]); } if (filePathCheck && workspaceCheck) { latestExperimentState.editCount = (latestExperimentState.editCount || 0) + 1; latestExperimentState.lastEditedDate = date; this.storageService.store(storageKey, JSON.stringify(latestExperimentState), StorageScope.GLOBAL, StorageTarget.MACHINE); } }); if (typeof latestExperimentState.editCount === 'number' && latestExperimentState.editCount >= fileEdits.minEditCount) { processedExperiment.state = latestExperimentState.state = (typeof condition.userProbability === 'number' && Math.random() < condition.userProbability && this.checkExperimentDependencies(experiment)) ? ExperimentState.Run : ExperimentState.NoRun; this.storageService.store(storageKey, JSON.stringify(latestExperimentState), StorageScope.GLOBAL, StorageTarget.MACHINE); if (latestExperimentState.state === ExperimentState.Run && processedExperiment.action && ExperimentActionType[processedExperiment.action.type] === ExperimentActionType.Prompt) { this.fireRunExperiment(processedExperiment); } } }, 250)); const onSaveHandler = this._register(this.textFileService.files.onDidSave(e => onModelsSavedWorker.work(e.model))); return ExperimentState.Evaluating; }); } } function safeParse(text: string | undefined, defaultObject: any) { try { return text ? JSON.parse(text) || defaultObject : defaultObject; } catch (e) { return defaultObject; } }
the_stack
type Pow2 = number; const STATE_FREE = 0; const STATE_USED = 1; const STATE_TOP = 2; const STATE_END = 3; const STATE_ALIGN = 4; const STATE_FLAGS = 5; const STATE_MIN_SPLIT = 6; const MASK_COMPACT = 1; const MASK_SPLIT = 2; const SIZEOF_STATE = 7 * 4; const MEM_BLOCK_SIZE = 0; const MEM_BLOCK_NEXT = 1; const SIZEOF_MEM_BLOCK = 2 * 4; // export function allocatorInit( // // options: Readonly<MemPoolOpts> // size = 0x1000, // start = 0, // align = 8, // end = 0x1000, // minSplit = 16, // compact = true, // split = true // ): Uint32Array { // invariant(align >= 8, "align must be >= 8"); // invariant(align % 8 === 0, "align must be multiplication of 8"); // const buf = new ArrayBuffer(size || 0x1000); // const state = Uint32Array.wrap(buf, start, SIZEOF_STATE / 4); // const u32 = Uint32Array.wrap(buf); // const top = initialTop(start, align as Pow2); // const resolvedEnd = // end != null ? Math.min(end, buf.byteLength) : buf.byteLength; // set_align(state, align); // set_doCompact(state, compact); // set_doSplit(state, split); // set_minSplit(state, minSplit); // set_end(state, resolvedEnd); // set_top(state, top); // set__free(state, 0); // set__used(state, 0); // if (top >= resolvedEnd) { // throw new Error( // `insufficient address range (0x${start.toString( // 16 // )} - 0x${resolvedEnd.toString(16)})` // ); // } // return u32; // } // export function calloc( // state: Uint32Array, // u32: Uint32Array, // u8: Uint32Array, // bytes: number, // fill = 0 // ): number { // const addr = malloc(state, u32, bytes); // addr && u8.fill(fill, addr, addr + bytes); // return addr; // } export function malloc( state: Uint32Array, u32: Uint32Array, bytes: number ): number { if (bytes <= 0) { return 0; } const paddedSize = align(bytes + SIZEOF_MEM_BLOCK, get_align(state)); const end = get_end(state); let top = get_top(state); let block = get__free(state); let prev = 0; while (block) { const itrBlockSize = blockSize(u32, block); const isTop = block + itrBlockSize >= top; if (isTop || itrBlockSize >= paddedSize) { if (isTop && block + paddedSize > end) { return 0; } if (prev) { unlinkBlock(u32, prev, block); } else { set__free(state, blockNext(u32, block)); } setBlockNext(u32, block, get__used(state)); set__used(state, block); if (isTop) { set_top(state, block + setBlockSize(u32, block, paddedSize)); // this.top = block + this.setBlockSize(block, paddedSize); } else if (get_doSplit(state)) { const excess = itrBlockSize - paddedSize; excess >= get_minSplit(state) && splitBlock(state, u32, block, paddedSize, excess); } return blockDataAddress(block); } prev = block; block = blockNext(u32, block); } block = top; top = block + paddedSize; if (top <= end) { initBlock(u32, block, paddedSize, get__used(state)); set__used(state, block); set_top(state, top); return blockDataAddress(block); } return 0; } // export function realloc( // state: Uint32Array, // u32: Uint32Array, // u8: Uint8Array, // ptr: number, // bytes: number // ): number { // if (bytes <= 0) { // return 0; // } // const oldAddr = blockSelfAddress(ptr); // let newAddr = 0; // let block = get__used(state); // let blockEnd = 0; // while (block) { // if (block === oldAddr) { // const itrBlockSize = blockSize(u32, block); // blockEnd = oldAddr + itrBlockSize; // const isTop = blockEnd >= get_top(state); // const paddedSize = align(bytes + SIZEOF_MEM_BLOCK, get_align(state)); // // shrink & possibly split existing block // if (paddedSize <= itrBlockSize) { // if (get_doSplit(state)) { // const excess = itrBlockSize - paddedSize; // if (excess >= get_minSplit(state)) { // splitBlock(state, u32, block, paddedSize, excess); // } else if (isTop) { // set_top(state, oldAddr + paddedSize); // // this.top = oldAddr + paddedSize; // } // } else if (isTop) { // set_top(state, oldAddr + paddedSize); // // this.top = oldAddr + paddedSize; // } // newAddr = oldAddr; // break; // } // // try to enlarge block if current top // if (isTop && oldAddr + paddedSize < get_end(state)) { // set_top(state, oldAddr + setBlockSize(u32, block, paddedSize)); // newAddr = oldAddr; // break; // } // // fallback to free & malloc // free(state, u32, oldAddr); // newAddr = blockSelfAddress(malloc(state, u32, bytes)); // break; // } // block = blockNext(u32, block); // } // // copy old block contents to new addr // if (newAddr && newAddr !== oldAddr) { // u8.copyWithin( // blockDataAddress(newAddr), // blockDataAddress(oldAddr), // blockEnd // ); // } // return blockDataAddress(newAddr); // } export function free( state: Uint32Array, u32: Uint32Array, ptr: number ): boolean { let addr: number = ptr; addr = blockSelfAddress(addr); let block = get__used(state); let prev = 0; while (block) { if (block === addr) { if (prev) { unlinkBlock(u32, prev, block); } else { set__used(state, blockNext(u32, block)); } insert(state, u32, block); get_doCompact(state) && compact(state, u32); return true; } prev = block; block = blockNext(u32, block); } return false; } export function freeAll(state: Uint32Array, start: number): void { set__free(state, 0); set__used(state, 0); set_top(state, initialTop(start, get_align(state) as Pow2)); } // export function stats( // allocatorState: Readonly<AllocatorState> // ): Readonly<MemPoolStats> { // const listStats = (block: number) => { // let count = 0; // let size = 0; // while (block) { // count++; // size += blockSize(allocatorState.u32, block); // block = blockNext(allocatorState.u32, block); // } // return { count, size }; // }; // const free = listStats(get__free(allocatorState.state)); // return { // free, // used: listStats(get__used(allocatorState.state)), // top: get_top(allocatorState.state), // available: // get_end(allocatorState.state) - get_top(allocatorState.state) + free.size, // total: allocatorState.u8.buffer.byteLength, // }; // } // export function release() { // // NOOP // // delete this.u8; // // delete this.u32; // // delete this.state; // // delete this.buf; // // return true; // } function invariant(assertionResult: boolean, message: string): void { if (!assertionResult) { throw new Error("Invariant: " + message); } } /** * Exported for testing proposes only * @private * @param state */ function get_align(state: Uint32Array): Pow2 { return <Pow2>state[STATE_ALIGN]; } function set_align(state: Uint32Array, x: Pow2): void { state[STATE_ALIGN] = x; } /** * Exported for testing proposes only * @private * @param state */ export function get_end(state: Uint32Array): number { return state[STATE_END]; } function set_end(state: Uint32Array, x: number): void { state[STATE_END] = x; } /** * Exported for testing proposes only * @private * @param state */ export function get_top(state: Uint32Array): number { return state[STATE_TOP]; } function set_top(state: Uint32Array, x: number): void { state[STATE_TOP] = x; } /** * Exported for testing proposes only * @private * @param state */ export function get__free(state: Uint32Array): number { return state[STATE_FREE]; } function set__free(state: Uint32Array, block: number): void { state[STATE_FREE] = block; } /** * Exported for testing proposes only * @private * @param state */ export function get__used(state: Uint32Array): number { return state[STATE_USED]; } function set__used(state: Uint32Array, block: number): void { state[STATE_USED] = block; } /** * Exported for testing proposes only * @private * @param state */ export function get_doCompact(state: Uint32Array): boolean { return !!(state[STATE_FLAGS] & MASK_COMPACT); } function set_doCompact(state: Uint32Array, flag: boolean): void { if (flag) { state[STATE_FLAGS] |= 1 << (MASK_COMPACT - 1); } else { state[STATE_FLAGS] &= ~MASK_COMPACT; } } /** * Exported for testing proposes only * @private * @param state */ export function get_doSplit(state: Uint32Array): boolean { return !!(state[STATE_FLAGS] & MASK_SPLIT); } function set_doSplit(state: Uint32Array, flag: boolean): void { if (flag) { state[STATE_FLAGS] |= 1 << (MASK_SPLIT - 1); } else { state[STATE_FLAGS] &= ~MASK_SPLIT; } } /** * Exported for testing proposes only * @private * @param state */ function get_minSplit(state: Uint32Array): number { return state[STATE_MIN_SPLIT]; } function set_minSplit(state: Uint32Array, x: number): void { invariant( x > SIZEOF_MEM_BLOCK, `illegal min split threshold: ${x}, require at least ${ SIZEOF_MEM_BLOCK + 1 }` ); state[STATE_MIN_SPLIT] = x; } function initialTop(start: number, _align: Pow2): number { return ( align(start + SIZEOF_STATE + SIZEOF_MEM_BLOCK, _align) - SIZEOF_MEM_BLOCK ); } /** * Exported for testing proposes only * @private * @param state * @param u32 * @param block */ export function blockSize(u32: Uint32Array, block: number): number { return u32[(block >> 2) + MEM_BLOCK_SIZE]; } /** * Sets & returns given block size. * * @param block - * @param size - */ function setBlockSize(u32: Uint32Array, block: number, size: number): number { u32[(block >> 2) + MEM_BLOCK_SIZE] = size; return size; } /** * Exported for testing proposes only * @private * @param u32 * @param block */ export function blockNext(u32: Uint32Array, block: number): number { return u32[(block >> 2) + MEM_BLOCK_NEXT]; } /** * Sets block next pointer to `next`. Use zero to indicate list end. * * @param block - */ function setBlockNext(u32: Uint32Array, block: number, next: number): void { u32[(block >> 2) + MEM_BLOCK_NEXT] = next; } /** * Initializes block header with given `size` and `next` pointer. Returns `block`. * * @param block - * @param size - * @param next - */ function initBlock( u32: Uint32Array, block: number, size: number, next: number ): number { const idx = block >>> 2; u32[idx + MEM_BLOCK_SIZE] = size; u32[idx + MEM_BLOCK_NEXT] = next; return block; } function unlinkBlock(u32: Uint32Array, prev: number, block: number): void { setBlockNext(u32, prev, blockNext(u32, block)); } function splitBlock( stateU32: Uint32Array, u32: Uint32Array, block: number, blockSize: number, excess: number ): void { insert( stateU32, u32, initBlock(u32, block + setBlockSize(u32, block, blockSize), excess, 0) ); get_doCompact(stateU32) && compact(stateU32, u32); } /** * Traverses free list and attempts to recursively merge blocks * occupying consecutive memory regions. Returns true if any blocks * have been merged. Only called if `compact` option is enabled. */ function compact(stateU32: Uint32Array, u32: Uint32Array): boolean { let block = get__free(stateU32); let prev = 0; let scan = 0; let scanPrev: number; let res = false; while (block) { scanPrev = block; scan = blockNext(u32, block); while (scan && scanPrev + blockSize(u32, scanPrev) === scan) { // console.log("merge:", scan.addr, scan.size); scanPrev = scan; scan = blockNext(u32, scan); } if (scanPrev !== block) { const newSize = scanPrev - block + blockSize(u32, scanPrev); // console.log("merged size:", newSize); setBlockSize(u32, block, newSize); const next = blockNext(u32, scanPrev); let tmp = blockNext(u32, block); while (tmp && tmp !== next) { // console.log("release:", tmp.addr); const tn = blockNext(u32, tmp); setBlockNext(u32, tmp, 0); tmp = tn; } setBlockNext(u32, block, next); res = true; } // re-adjust top if poss if (block + blockSize(u32, block) >= get_top(stateU32)) { set_top(stateU32, block); if (prev !== 0) { unlinkBlock(u32, prev, block); } else { set__free(stateU32, blockNext(u32, block)); } // prev // ? this.unlinkBlock(prev, block) // : (this._free = this.blockNext(block)); } prev = block; block = blockNext(u32, block); } return res; } /** * Inserts given block into list of free blocks, sorted by address. * * @param block - */ function insert(stateU32: Uint32Array, u32: Uint32Array, block: number): void { let ptr = get__free(stateU32); let prev = 0; while (ptr) { if (block <= ptr) break; prev = ptr; ptr = blockNext(u32, ptr); } if (prev) { setBlockNext(u32, prev, block); } else { set__free(stateU32, block); } setBlockNext(u32, block, ptr); } /** * Returns a block's data address, based on given alignment. * * @param blockAddress - */ function blockDataAddress(blockAddress: number): number { return blockAddress + SIZEOF_MEM_BLOCK; } /** * Returns block start address for given data address and alignment. * * @param dataAddress - */ function blockSelfAddress(dataAddress: number): number { return dataAddress - SIZEOF_MEM_BLOCK; } /** * Aligns `addr` to next multiple of `size`. The latter must be a power * of 2. * * @param addr - value to align * @param size - alignment value */ export function align(addr: number, size: Pow2): number { return size--, (addr + size) & ~size; }
the_stack
import { Vector2, Vector3, Vector4 } from "@oasis-engine/math"; import { Logger } from "../base/Logger"; import { Camera } from "../Camera"; import { Engine } from "../Engine"; import { Material } from "../material/Material"; import { Renderer } from "../Renderer"; import { IHardwareRenderer } from "../renderingHardwareInterface/IHardwareRenderer"; import { Texture } from "../texture"; import { ShaderDataGroup } from "./enums/ShaderDataGroup"; import { Shader } from "./Shader"; import { ShaderData } from "./ShaderData"; import { ShaderUniform } from "./ShaderUniform"; import { ShaderUniformBlock } from "./ShaderUniformBlock"; /** * Shader program, corresponding to the GPU shader program. * @internal */ export class ShaderProgram { private static _counter: number = 0; private static _addLineNum(str: string) { const lines = str.split("\n"); const limitLength = (lines.length + 1).toString().length + 6; let prefix; return lines .map((line, index) => { prefix = `0:${index + 1}`; if (prefix.length >= limitLength) return prefix.substring(0, limitLength) + line; for (let i = 0; i < limitLength - prefix.length; i++) prefix += " "; return prefix + line; }) .join("\n"); } id: number; readonly sceneUniformBlock: ShaderUniformBlock = new ShaderUniformBlock(); readonly cameraUniformBlock: ShaderUniformBlock = new ShaderUniformBlock(); readonly rendererUniformBlock: ShaderUniformBlock = new ShaderUniformBlock(); readonly materialUniformBlock: ShaderUniformBlock = new ShaderUniformBlock(); readonly otherUniformBlock: ShaderUniformBlock = new ShaderUniformBlock(); /** @internal */ _uploadRenderCount: number = -1; /** @internal */ _uploadCamera: Camera; /** @internal */ _uploadRenderer: Renderer; /** @internal */ _uploadMaterial: Material; attributeLocation: Record<string, GLint> = Object.create(null); // @todo: move to RHI. private _isValid: boolean; private _engine: Engine; private _gl: WebGLRenderingContext; private _vertexShader: WebGLShader; private _fragmentShader: WebGLShader; private _glProgram: WebGLProgram; private _activeTextureUint: number = 0; /** * Whether this shader program is valid. */ get isValid(): boolean { return this._isValid; } constructor(engine: Engine, vertexSource: string, fragmentSource: string) { this._engine = engine; this._gl = engine._hardwareRenderer.gl; this._glProgram = this._createProgram(vertexSource, fragmentSource); if (this._glProgram) { this._isValid = true; this._recordLocation(); } else { this._isValid = false; } this.id = ShaderProgram._counter++; } /** * Upload all shader data in shader uniform block. * @param uniformBlock - shader Uniform block * @param shaderData - shader data */ uploadAll(uniformBlock: ShaderUniformBlock, shaderData: ShaderData): void { this.uploadUniforms(uniformBlock, shaderData); this.uploadTextures(uniformBlock, shaderData); } /** * Upload constant shader data in shader uniform block. * @param uniformBlock - shader Uniform block * @param shaderData - shader data */ uploadUniforms(uniformBlock: ShaderUniformBlock, shaderData: ShaderData): void { const properties = shaderData._properties; const constUniforms = uniformBlock.constUniforms; for (let i = 0, n = constUniforms.length; i < n; i++) { const uniform = constUniforms[i]; const data = properties[uniform.propertyId]; data != null && uniform.applyFunc(uniform, data); } } /** * Upload texture shader data in shader uniform block. * @param uniformBlock - shader Uniform block * @param shaderData - shader data */ uploadTextures(uniformBlock: ShaderUniformBlock, shaderData: ShaderData): void { const properties = shaderData._properties; const textureUniforms = uniformBlock.textureUniforms; // textureUniforms property maybe null if ShaderUniformBlock not contain any texture. if (textureUniforms) { for (let i = 0, n = textureUniforms.length; i < n; i++) { const uniform = textureUniforms[i]; const texture = properties[uniform.propertyId]; if (texture) { uniform.applyFunc(uniform, texture); } else { uniform.applyFunc(uniform, uniform.textureDefault); } } } } /** * Upload ungroup texture shader data in shader uniform block. */ uploadUnGroupTextures(): void { const textureUniforms = this.otherUniformBlock.textureUniforms; // textureUniforms property maybe null if ShaderUniformBlock not contain any texture. if (textureUniforms) { for (let i = 0, n = textureUniforms.length; i < n; i++) { const uniform = textureUniforms[i]; uniform.applyFunc(uniform, uniform.textureDefault); } } } /** * Grouping other data. */ groupingOtherUniformBlock() { const { constUniforms, textureUniforms } = this.otherUniformBlock; constUniforms.length > 0 && this._groupingSubOtherUniforms(constUniforms, false); textureUniforms.length > 0 && this._groupingSubOtherUniforms(textureUniforms, true); } /** * Bind this shader program. * @returns Whether the shader program is switched. */ bind(): boolean { const rhi: IHardwareRenderer = this._engine._hardwareRenderer; if (rhi._currentBind !== this) { this._gl.useProgram(this._glProgram); rhi._currentBind = this; return true; } else { return false; } } /** * Destroy this shader program. */ destroy(): void { const gl = this._gl; this._vertexShader && gl.deleteShader(this._vertexShader); this._fragmentShader && gl.deleteShader(this._fragmentShader); this._glProgram && gl.deleteProgram(this._glProgram); } private _groupingSubOtherUniforms(uniforms: ShaderUniform[], isTexture: boolean): void { for (let i = uniforms.length - 1; i >= 0; i--) { const uniform = uniforms[i]; const group = Shader._getShaderPropertyGroup(uniform.name); if (group !== undefined) { uniforms.splice(uniforms.indexOf(uniform), 1); this._groupingUniform(uniform, group, isTexture); } } } private _groupingUniform(uniform: ShaderUniform, group: ShaderDataGroup, isTexture: boolean): void { switch (group) { case ShaderDataGroup.Scene: if (isTexture) { this.sceneUniformBlock.textureUniforms.push(uniform); } else { this.sceneUniformBlock.constUniforms.push(uniform); } break; case ShaderDataGroup.Camera: if (isTexture) { this.cameraUniformBlock.textureUniforms.push(uniform); } else { this.cameraUniformBlock.constUniforms.push(uniform); } break; case ShaderDataGroup.Renderer: if (isTexture) { this.rendererUniformBlock.textureUniforms.push(uniform); } else { this.rendererUniformBlock.constUniforms.push(uniform); } break; case ShaderDataGroup.Material: if (isTexture) { this.materialUniformBlock.textureUniforms.push(uniform); } else { this.materialUniformBlock.constUniforms.push(uniform); } break; default: if (isTexture) { this.otherUniformBlock.textureUniforms.push(uniform); } else { this.otherUniformBlock.constUniforms.push(uniform); } } } /** * init and link program with shader. */ private _createProgram(vertexSource: string, fragmentSource: string): WebGLProgram | null { const gl = this._gl; // create and compile shader const vertexShader = this._createShader(gl.VERTEX_SHADER, vertexSource); if (!vertexShader) { return null; } const fragmentShader = this._createShader(gl.FRAGMENT_SHADER, fragmentSource); if (!fragmentShader) { return null; } // link program and shader const program = gl.createProgram(); gl.attachShader(program, vertexShader); gl.attachShader(program, fragmentShader); gl.linkProgram(program); gl.validateProgram(program); if (gl.isContextLost()) { Logger.error("Context lost while linking program."); gl.deleteShader(vertexShader); gl.deleteShader(fragmentShader); return null; } if (Logger.isEnabled && !gl.getProgramParameter(program, gl.LINK_STATUS)) { Logger.error("Could not link WebGL program. \n" + gl.getProgramInfoLog(program)); gl.deleteProgram(program); return null; } this._vertexShader = vertexShader; this._fragmentShader = fragmentShader; return program; } private _createShader(shaderType: number, shaderSource: string): WebGLShader | null { const gl = this._gl; const shader = gl.createShader(shaderType); if (!shader) { Logger.error("Context lost while create shader."); return null; } gl.shaderSource(shader, shaderSource); gl.compileShader(shader); if (gl.isContextLost()) { Logger.error("Context lost while compiling shader."); gl.deleteShader(shader); return null; } if (Logger.isEnabled && !gl.getShaderParameter(shader, gl.COMPILE_STATUS)) { Logger.error( `Could not compile WebGL shader.\n${gl.getShaderInfoLog(shader)}`, ShaderProgram._addLineNum(shaderSource) ); gl.deleteShader(shader); return null; } return shader; } /** * record the location of uniform/attribute. */ private _recordLocation() { const gl = this._gl; const program = this._glProgram; const uniformInfos = this._getUniformInfos(); const attributeInfos = this._getAttributeInfos(); uniformInfos.forEach(({ name, size, type }) => { const shaderUniform = new ShaderUniform(this._engine); let isArray = false; let isTexture = false; if (name.indexOf("[0]") > 0) { name = name.substr(0, name.length - 3); isArray = true; } const group = Shader._getShaderPropertyGroup(name); const location = gl.getUniformLocation(program, name); shaderUniform.name = name; shaderUniform.propertyId = Shader.getPropertyByName(name)._uniqueId; shaderUniform.location = location; switch (type) { case gl.FLOAT: if (isArray) { shaderUniform.applyFunc = shaderUniform.upload1fv; } else { shaderUniform.applyFunc = shaderUniform.upload1f; shaderUniform.cacheValue = 0; } break; case gl.FLOAT_VEC2: if (isArray) { shaderUniform.applyFunc = shaderUniform.upload2fv; } else { shaderUniform.applyFunc = shaderUniform.upload2f; shaderUniform.cacheValue = new Vector2(0, 0); } break; case gl.FLOAT_VEC3: if (isArray) { shaderUniform.applyFunc = shaderUniform.upload3fv; } else { shaderUniform.applyFunc = shaderUniform.upload3f; shaderUniform.cacheValue = new Vector3(0, 0, 0); } break; case gl.FLOAT_VEC4: if (isArray) { shaderUniform.applyFunc = shaderUniform.upload4fv; } else { shaderUniform.applyFunc = shaderUniform.upload4f; shaderUniform.cacheValue = new Vector4(0, 0, 0, 0); } break; case gl.BOOL: case gl.INT: if (isArray) { shaderUniform.applyFunc = shaderUniform.upload1iv; } else { shaderUniform.applyFunc = shaderUniform.upload1i; shaderUniform.cacheValue = 0; } break; case gl.BOOL_VEC2: case gl.INT_VEC2: if (isArray) { shaderUniform.applyFunc = shaderUniform.upload2iv; } else { shaderUniform.applyFunc = shaderUniform.upload2i; shaderUniform.cacheValue = new Vector2(0, 0); } break; case gl.BOOL_VEC3: case gl.INT_VEC3: if (isArray) { } else { } shaderUniform.applyFunc = isArray ? shaderUniform.upload3iv : shaderUniform.upload3i; shaderUniform.cacheValue = new Vector3(0, 0, 0); break; case gl.BOOL_VEC4: case gl.INT_VEC4: if (isArray) { shaderUniform.applyFunc = shaderUniform.upload4iv; } else { shaderUniform.applyFunc = shaderUniform.upload4i; shaderUniform.cacheValue = new Vector4(0, 0, 0); } break; case gl.FLOAT_MAT4: shaderUniform.applyFunc = isArray ? shaderUniform.uploadMat4v : shaderUniform.uploadMat4; break; case gl.SAMPLER_2D: case gl.SAMPLER_CUBE: const defaultTexture = type === gl.SAMPLER_2D ? this._engine._whiteTexture2D : this._engine._whiteTextureCube; isTexture = true; if (isArray) { const defaultTextures = new Array<Texture>(size); const textureIndices = new Int32Array(size); const glTextureIndices = new Array<number>(size); for (let i = 0; i < size; i++) { defaultTextures[i] = defaultTexture; textureIndices[i] = this._activeTextureUint; glTextureIndices[i] = gl.TEXTURE0 + this._activeTextureUint++; } shaderUniform.textureDefault = defaultTextures; shaderUniform.textureIndex = glTextureIndices; shaderUniform.applyFunc = shaderUniform.uploadTextureArray; this.bind(); gl.uniform1iv(location, textureIndices); shaderUniform.uploadTextureArray(shaderUniform, defaultTextures); } else { const textureIndex = gl.TEXTURE0 + this._activeTextureUint; shaderUniform.textureDefault = defaultTexture; shaderUniform.textureIndex = textureIndex; shaderUniform.applyFunc = shaderUniform.uploadTexture; this.bind(); gl.uniform1i(location, this._activeTextureUint++); shaderUniform.uploadTexture(shaderUniform, defaultTexture); } break; } this._groupingUniform(shaderUniform, group, isTexture); }); attributeInfos.forEach(({ name }) => { this.attributeLocation[name] = gl.getAttribLocation(program, name); }); } private _getUniformInfos(): WebGLActiveInfo[] { const gl = this._gl; const program = this._glProgram; const uniformInfos: WebGLActiveInfo[] = []; const uniformCount = gl.getProgramParameter(program, gl.ACTIVE_UNIFORMS); for (let i = 0; i < uniformCount; ++i) { const info = gl.getActiveUniform(program, i); uniformInfos[i] = info; } return uniformInfos; } private _getAttributeInfos(): WebGLActiveInfo[] { const gl = this._gl; const program = this._glProgram; const attributeInfos: WebGLActiveInfo[] = []; const attributeCount = gl.getProgramParameter(program, gl.ACTIVE_ATTRIBUTES); for (let i = 0; i < attributeCount; ++i) { const info = gl.getActiveAttrib(program, i); attributeInfos[i] = info; } return attributeInfos; } }
the_stack
module Kiwi.Utils { /** * Utility class used to make color management more transparent. * Color objects hold color and alpha values, and can get or set them * in a variety of ways. * * Construct this object in one of the following ways. * * - Pass 3 or 4 numbers to determine RGB or RGBA. If the numbers are in * the range 0-1, they will be parsed as normalized numbers. * If they are in the range 1-255, they will be parsed as 8-bit channels. * * - Pass 3 or 4 numbers followed by the string "hsv" or "hsl" * (lowercase) to parse HSV or HSL color space (with optional alpha). * HSV and HSL colors may be specified as normalized parameters (0-1), * or as an angle (0-360) and two percentages (0-100). * * - Pass a string containing a hexadecimal color with or without alpha * (such as "ff8040ff" or "4080ff"). You may prepend "#" or "0x", but * they are not necessary and will be stripped. * * - Pass a string containing a CSS color function, such as * "rgb(255,255,255)", "rgba( 192, 127, 64, 32 )", * "hsl(180, 100, 100)", or "hsla(360, 50, 50, 50)". * * - Pass 1 number to set a grayscale value, or 2 numbers to set grayscale * with alpha. These are interpreted as with RGB values. * * The color object stores its internal values as normalized RGBA channels. * This is the most mathematically useful format, and corresponds * with the WebGL color paradigm. When you query the color object's values, * such as with "r" or "red" properties, it will return normalized values. * You can get values in the 0-255 8-bit range by calling the * corresponding x255 value. For example, if r = 1, then r255 = 255. * * We advise that you work with normalized colors wherever possible. * While the Color object is smart enough to recognise non-normalized * ranges in most cases, it cannot tell the difference between 0.5 on a * 0-1 scale, and 0.5 on a 0-255 scale. Try to reduce ambiguity by working * in normalized color space. * * You can get HSV, HSL, and hexadecimal values with the functions * "getHsva", "getHsla", and "getHex". By default, these all include an * alpha term. You can omit alpha from the getHex result by calling the * function with the parameter "false". As getHsva and getHsla return objects * rather than strings, you can freely ignore the provided alpha. * * You can modify a Color object once created using its properties, methods, * or the "set" method as you would use the constructor. * * @class Color * @constructor * @param [...args] Any number of arguments * @since 1.2.0 */ export class Color { constructor( ...args ) { this.set.apply( this, args ); return this; } /** * Set colors from parameters, as in the class description. * If you supply invalid parameters, the color will be unchanged. * @method set * @param params {object} Composite parameter object * @return {Kiwi.Utils.Color} This object with the new color set * @public */ public set( ...params ) { if ( params.length === 3 ) { // RGB this.r = params[ 0 ]; this.g = params[ 1 ]; this.b = params[ 2 ]; } else if ( params.length === 4 ) { if ( !isNaN( params[ 3 ] ) ) { // RGBA this.r = params[ 0 ]; this.g = params[ 1 ]; this.b = params[ 2 ]; this.a = params[ 3 ]; } else if ( params[ 3 ] === "hsv" ) { // HSV this.parseHsv( params[ 0 ], params[ 1 ], params[ 2 ] ); } else if ( params[ 3 ] === "hsl" ) { // HSL this.parseHsl( params[ 0 ], params[ 1 ], params[ 2 ] ); } } else if ( params.length === 5 ) { if ( params [ 4 ] === "hsv" ) { // HSVA this.parseHsv( params[ 0 ], params[ 1 ], params[ 2 ], params[ 3 ] ); } else if ( params [ 4 ] === "hsl" ) { // HSLA this.parseHsl( params[ 0 ], params[ 1 ], params[ 2 ], params[ 3 ] ); } } else if ( params.length === 1 ) { if ( typeof params[ 0 ] === "string" ) { // String format this.parseString( params[ 0 ] ); } else if ( !isNaN( params[ 0 ] ) ) { // Grayscale this.r = params[ 0 ]; this.g = params[ 0 ]; this.b = params[ 0 ]; } } else if ( params.length === 2 ) { // Grayscale and alpha this.r = params[ 0 ]; this.g = params[ 0 ]; this.b = params[ 0 ]; this.a = params[ 1 ]; } return this; } /** * Red channel, stored as a normalized value between 0 and 1. * This is most compatible with graphics hardware. * @property _r * @type number * @default 0.5 * @private */ public _r: number = 0.5; /** * Green channel, stored as a normalized value between 0 and 1. * This is most compatible with graphics hardware. * @property _g * @type number * @default 0.5 * @private */ public _g: number = 0.5; /** * Blue channel, stored as a normalized value between 0 and 1. * This is most compatible with graphics hardware. * @property _b * @type number * @default 0.5 * @private */ public _b: number = 0.5; /** * Alpha channel, stored as a normalized value between 0 and 1. * This is most compatible with graphics hardware. * @property _a * @type number * @default 0.5 * @private */ public _a: number = 1; /** * Red channel, stored as a normalized value between 0 and 1. * @property rNorm * @type number * @public */ public get rNorm(): number { return this._r; } public set rNorm( value: number ) { if ( !isNaN( value ) ) { this._r = value; } } /** * Green channel, stored as a normalized value between 0 and 1. * @property gNorm * @type number * @public */ public get gNorm(): number { return this._g; } public set gNorm( value: number ) { if ( !isNaN( value ) ) { this._g = value; } } /** * Blue channel, stored as a normalized value between 0 and 1. * @property bNorm * @type number * @public */ public get bNorm(): number { return this._b; } public set bNorm( value: number ) { if ( !isNaN( value ) ) { this._b = value; } } /** * Alpha channel, stored as a normalized value between 0 and 1. * @property aNorm * @type number * @public */ public get aNorm(): number { return this._a; } public set aNorm( value: number ) { if ( !isNaN( value ) ) { this._a = value; } } /** * Red channel. * If set to a number in the range 0-1, is interpreted as a * normalized color (see rNorm). * If set to a number above 1, is interpreted as an 8-bit channel * (see r255). * If queried, returns a normalized number in the range 0-1. * @property r * @type number * @public */ public get r(): number { return this._r; } public set r( value: number ) { if ( value > 1 ) { this.r255 = value; } else { this.rNorm = value; } } /** * Green channel. * If set to a number in the range 0-1, is interpreted as a * normalized color (see gNorm). * If set to a number above 1, is interpreted as an 8-bit channel * (see g255). * If queried, returns a normalized number in the range 0-1. * @property g * @type number * @public */ public get g(): number { return this._g; } public set g( value: number ) { if ( value > 1 ) { this.g255 = value; } else { this.gNorm = value; } } /** * Blue channel. * If set to a number in the range 0-1, is interpreted as a * normalized color (see bNorm). * If set to a number above 1, is interpreted as an 8-bit channel * (see b255). * If queried, returns a normalized number in the range 0-1. * @property b * @type number * @public */ public get b(): number { return this._b; } public set b( value: number ) { if ( value > 1 ) { this.b255 = value; } else { this.bNorm = value; } } /** * Alpha channel. * If set to a number in the range 0-1, is interpreted as a * normalized color (see aNorm). * If set to a number above 1, is interpreted as an 8-bit channel * (see a255). * If queried, returns a normalized number in the range 0-1. * @property a * @type number * @public */ public get a(): number { return this._a; } public set a( value: number ) { if ( value > 1 ) { this.a255 = value; } else { this.aNorm = value; } } /** * Red channel, specified as an 8-bit channel in the range 0-255. * @property r255 * @type number * @public */ public get r255(): number { return Math.round( this._r * 255 ); } public set r255( value: number ) { if ( !isNaN( value ) ) { this._r = value / 255; } } /** * Green channel, specified as an 8-bit channel in the range 0-255. * @property g255 * @type number * @public */ public get g255(): number { return Math.round( this._g * 255 ); } public set g255( value: number ) { if ( !isNaN( value ) ) { this._g = value / 255; } } /** * Blue channel, specified as an 8-bit channel in the range 0-255. * @property b255 * @type number * @public */ public get b255(): number { return Math.round( this._b * 255 ); } public set b255( value: number ) { if ( !isNaN( value ) ) { this._b = value / 255; } } /** * Alpha channel, specified as an 8-bit channel in the range 0-255. * @property a255 * @type number * @public */ public get a255(): number { return Math.round( this._a * 255 ); } public set a255( value: number ) { if ( !isNaN( value ) ) { this._a = value / 255; } } /** * Red channel, alias of r * @property red * @type number * @public */ public get red(): number { return this.r; } public set red( value: number ) { this.r = value; } /** * Green channel, alias of g * @property green * @type number * @public */ public get green(): number { return this.g; } public set green( value: number ) { this.g = value; } /** * Blue channel, alias of b * @property blue * @type number * @public */ public get blue(): number { return this.b; } public set blue( value: number ) { this.b = value; } /** * Alpha channel, alias of a * @property alpha * @type number * @public */ public get alpha(): number { return this.a; } public set alpha( value: number ) { this.a = value; } /** * Parse colors from strings * @method parseString * @param color {string} A CSS color specification * @return {Kiwi.Utils.Color} This object with the new color set * @public */ public parseString( color: string ): Kiwi.Utils.Color { var colArray; color = color.toLowerCase(); // RGBA notation if ( color.slice( 0, 4 ) === "rgba" ) { color = color.replace( "rgba", "" ); color = color.replace( "(", "" ); color = color.replace( ")", "" ); colArray = color.split( "," ); this.r = +colArray[ 0 ]; this.g = +colArray[ 1 ]; this.b = +colArray[ 2 ]; this.a = +colArray[ 3 ]; } else if ( color.slice( 0, 3 ) === "rgb" ) { color = color.replace( "rgb", "" ); color = color.replace( "(", "" ); color = color.replace( ")", "" ); colArray = color.split( "," ); this.r = +colArray[ 0 ]; this.g = +colArray[ 1 ]; this.b = +colArray[ 2 ]; } else if ( color.slice( 0, 4 ) === "hsla" ) { color = color.replace( "hsla", "" ); color = color.replace( "(", "" ); color = color.replace( ")", "" ); colArray = color.split( "," ); this.parseHsl( +colArray[ 0 ], +colArray[ 1 ], +colArray[ 2 ], +colArray[ 3 ] ); } else if ( color.slice( 0, 3 ) === "hsl" ) { color = color.replace( "hsl", "" ); color = color.replace( "(", "" ); color = color.replace( ")", "" ); colArray = color.split( "," ); this.parseHsl( +colArray[ 0 ], +colArray[ 1 ], +colArray[ 2 ] ); } else { this.parseHex( color ); } return this; } /** * Parse hexadecimal colors from strings * @method parseHex * @param color {string} A hexadecimal color such as "ffffff" (no alpha) * or "ffffffff" (with alpha). Also supports "fff" and "ffff" * with 4-bit channels. * @return {Kiwi.Utils.Color} This object with the new color set * @public */ public parseHex( color: string ): Kiwi.Utils.Color { var bigint, r = this.r255, g = this.g255, b = this.b255, a = this.a255; // Strip leading signifiers if ( color.charAt( 0 ) === "#" ) { color = color.slice( 1 ); } if ( color.slice( 0, 2 ) === "0x" ) { color = color.slice( 2 ); } bigint = parseInt( color, 16 ); if ( color.length === 3 ) { r = 17 * ( ( bigint >> 8 ) & 15 ); g = 17 * ( ( bigint >> 4 ) & 15 ); b = 17 * ( bigint & 15 ); } else if ( color.length === 4 ) { r = 17 * ( ( bigint >> 12 ) & 15 ); g = 17 * ( ( bigint >> 8 ) & 15 ); b = 17 * ( ( bigint >> 4 ) & 15 ); a = 17 * ( bigint & 15 ); } else if ( color.length === 6 ) { r = ( bigint >> 16 ) & 255; g = ( bigint >> 8 ) & 255; b = bigint & 255; a = 255; } else if ( color.length === 8 ) { r = ( bigint >> 24 ) & 255; g = ( bigint >> 16 ) & 255; b = ( bigint >> 8 ) & 255; a = bigint & 255; } this.r255 = r; this.g255 = g; this.b255 = b; this.a255 = a; return this; } /** * Returns color as a hexadecimal string * @method getHex * @param [alpha=true] {boolean} Whether to include the alpha * @return {string} A hexadecimal color such as "13579bdf" * @public */ public getHex( alpha: boolean = true ): string { var subStr, str = ""; subStr = this.r255.toString( 16 ); while( subStr.length < 2 ) { subStr = "0" + subStr; } str += subStr; subStr = this.g255.toString( 16 ); while( subStr.length < 2 ) { subStr = "0" + subStr; } str += subStr; subStr = this.b255.toString( 16 ); while( subStr.length < 2 ) { subStr = "0" + subStr; } str += subStr; if ( alpha ) { subStr = this.a255.toString( 16 ); while( subStr.length < 2 ) { subStr = "0" + subStr; } str += subStr; } return str; } /** * Parses normalized HSV values into the Color. * Interprets either normalized values, or H in degrees (0-360) * and S and V in % (0-100). * * Based on algorithms at * http://axonflux.com/handy-rgb-to-hsl-and-rgb-to-hsv-color-model-c * @method parseHsv * @param h {number} Hue * @param s {number} Saturation * @param v {number} Value * @param a {number} Alpha * @return {Kiwi.Utils.Color} This object with the new color set * @public */ public parseHsv( h: number, s: number, v: number, a: number = 1 ): Kiwi.Utils.Color { var r, g, b, i, f, p, q, t; if ( isNaN( h ) || isNaN( s ) || isNaN( v ) || isNaN( a ) ) { return this; } if ( h > 1 ) { h /= 360; } if ( s > 1 ) { s /= 100; } if ( v > 1 ) { v /= 100; } if ( a > 1 ) { a /= 255; } i = Math.floor( h * 6 ); f = h * 6 - i; p = v * ( 1 - s ); q = v * ( 1 - f * s ); t = v * ( 1 - (1 - f ) * s ); switch ( i % 6 ) { case 0: r = v; g = t; b = p; break; case 1: r = q; g = v; b = p; break; case 2: r = p; g = v; b = t; break; case 3: r = p; g = q; b = v; break; case 4: r = t; g = p; b = v; break; case 5: r = v; g = p; b = q; break; } this._r = r; this._g = g; this._b = b; this._a = a; return this; } /** * Returns HSV value of the Color. * Based on algorithms at * http://axonflux.com/handy-rgb-to-hsl-and-rgb-to-hsv-color-model-c * @method getHsva * @return {object} Object with normalized h, s, v, a properties. */ public getHsva(): any { var h, s, v, d, r = this._r, g = this._g, b = this._b, max = Math.max( r, g, b ), min = Math.min( r, g, b ); h = max; s = max; v = max; d = max - min; s = max === 0 ? 0 : d / max; if ( max === min ) { // Achromatic h = 0; } else { switch( max ) { case r: h = ( g - b ) / d + ( g < b ? 6 : 0 ); break; case g: h = ( b - r ) / d + 2; break; case b: h = ( r - g ) / d + 4; break; } h /= 6; } return { h: h, s: s, v: v, a: this._a }; } /** * Parses HSL value onto the Color. * Interprets either normalized values, or H in degrees (0-360) * and S and L in % (0-100). * * Based on algorithms at * http://axonflux.com/handy-rgb-to-hsl-and-rgb-to-hsv-color-model-c * @method parseHsl * @param h {number} Hue * @param s {number} Saturation * @param l {number} Lightness * @param a {number} Alpha * @return {Kiwi.Utils.Color} This object with the new color set * @public */ public parseHsl( h: number, s: number, l: number, a: number = 1 ): Kiwi.Utils.Color { var q, p, r = this._r, g = this._g, b = this._b; // Sanitize values if ( isNaN( h ) || isNaN( s ) || isNaN( l ) || isNaN( a ) ) { return this; } if ( h > 1 ) { h /= 360; } if ( s > 1 ) { s /= 100; } if ( l > 1 ) { l /= 100; } if ( a > 1 ) { a /= 255; } if ( s === 0 ) { // Achromatic r = l; g = l; b = l; } else { q = l < 0.5 ? l * ( 1 + s) : l + s - l * s; p = 2 * l - q; r = this._hue2rgb( p, q, h + 1 / 3 ); g = this._hue2rgb( p, q, h ); b = this._hue2rgb( p, q, h - 1 / 3 ); } this._r = r; this._g = g; this._b = b; this._a = a; return this; } /** * Returns HSL value of the Color. * Based on algorithms at * http://axonflux.com/handy-rgb-to-hsl-and-rgb-to-hsv-color-model-c * @method getHsla * @return {object} Object with normalized h, s, l, a properties. * @public */ public getHsla(): any { var d, r = this._r, g = this._g, b = this._b, max = Math.max( r, g, b ), min = Math.min( r, g, b ), h = ( max + min ) / 2, s = ( max + min ) / 2, l = ( max + min ) / 2; if ( max == min ) { // Achromatic h = 0; s = 0; } else { d = max - min; s = l > 0.5 ? d / ( 2 - max - min ) : d / ( max + min ); switch( max ) { case r: h = ( g - b ) / d + ( g < b ? 6 : 0 ); break; case g: h = ( b - r ) / d + 2; break; case b: h = ( r - g ) / d + 4; break; } h /= 6; } return { h: h, s: s, l: l, a: this._a }; } /** * Method used for computing HSL values. * Based on algorithms at * http://axonflux.com/handy-rgb-to-hsl-and-rgb-to-hsv-color-model-c * @method _hue2rgb * @param p {number} * @param q {number} * @param t {number} * @return number * @private */ private _hue2rgb( p: number, q: number, t: number ): number { if ( t < 0 ) { t += 1; } if ( t > 1 ) { t -= 1; } if ( t < 1 / 6 ) { return p + ( q - p ) * 6 * t; } if ( t < 1 / 2 ) { return q; } if ( t < 2 / 3 ) { return p + ( q - p ) * ( 2 / 3 - t ) * 6; } return p; } } }
the_stack
import { Shape } from '../shapes/Shape' import { Vec3 } from '../math/Vec3' import { Transform } from '../math/Transform' import { Quaternion } from '../math/Quaternion' import { Body } from '../objects/Body' import { AABB } from '../collision/AABB' import { Ray } from '../collision/Ray' import { Vec3Pool } from '../utils/Vec3Pool' import { ContactEquation } from '../equations/ContactEquation' import { FrictionEquation } from '../equations/FrictionEquation' import type { Box } from '../shapes/Box' import type { Sphere } from '../shapes/Sphere' import type { ConvexPolyhedron, ConvexPolyhedronContactPoint } from '../shapes/ConvexPolyhedron' import type { Particle } from '../shapes/Particle' import type { Plane } from '../shapes/Plane' import type { Trimesh } from '../shapes/Trimesh' import type { Heightfield } from '../shapes/Heightfield' import { Cylinder } from '../shapes/Cylinder' import type { ContactMaterial } from '../material/ContactMaterial' import type { World } from '../world/World' // Naming rule: based of the order in SHAPE_TYPES, // the first part of the method is formed by the // shape type that comes before, in the second part // there is the shape type that comes after in the SHAPE_TYPES list export const COLLISION_TYPES = { sphereSphere: Shape.types.SPHERE as 1, spherePlane: (Shape.types.SPHERE | Shape.types.PLANE) as 3, boxBox: (Shape.types.BOX | Shape.types.BOX) as 4, sphereBox: (Shape.types.SPHERE | Shape.types.BOX) as 5, planeBox: (Shape.types.PLANE | Shape.types.BOX) as 6, convexConvex: Shape.types.CONVEXPOLYHEDRON as 16, sphereConvex: (Shape.types.SPHERE | Shape.types.CONVEXPOLYHEDRON) as 17, planeConvex: (Shape.types.PLANE | Shape.types.CONVEXPOLYHEDRON) as 18, boxConvex: (Shape.types.BOX | Shape.types.CONVEXPOLYHEDRON) as 20, sphereHeightfield: (Shape.types.SPHERE | Shape.types.HEIGHTFIELD) as 33, boxHeightfield: (Shape.types.BOX | Shape.types.HEIGHTFIELD) as 36, convexHeightfield: (Shape.types.CONVEXPOLYHEDRON | Shape.types.HEIGHTFIELD) as 48, sphereParticle: (Shape.types.PARTICLE | Shape.types.SPHERE) as 65, planeParticle: (Shape.types.PLANE | Shape.types.PARTICLE) as 66, boxParticle: (Shape.types.BOX | Shape.types.PARTICLE) as 68, convexParticle: (Shape.types.PARTICLE | Shape.types.CONVEXPOLYHEDRON) as 80, cylinderCylinder: Shape.types.CYLINDER as 128, sphereCylinder: (Shape.types.SPHERE | Shape.types.CYLINDER) as 129, planeCylinder: (Shape.types.PLANE | Shape.types.CYLINDER) as 130, boxCylinder: (Shape.types.BOX | Shape.types.CYLINDER) as 132, convexCylinder: (Shape.types.CONVEXPOLYHEDRON | Shape.types.CYLINDER) as 144, heightfieldCylinder: (Shape.types.HEIGHTFIELD | Shape.types.CYLINDER) as 160, particleCylinder: (Shape.types.PARTICLE | Shape.types.CYLINDER) as 192, sphereTrimesh: (Shape.types.SPHERE | Shape.types.TRIMESH) as 257, planeTrimesh: (Shape.types.PLANE | Shape.types.TRIMESH) as 258, } export type CollisionType = typeof COLLISION_TYPES[keyof typeof COLLISION_TYPES] /** * Helper class for the World. Generates ContactEquations. * @todo Sphere-ConvexPolyhedron contacts * @todo Contact reduction * @todo should move methods to prototype */ export class Narrowphase { /** * Internal storage of pooled contact points. */ contactPointPool: ContactEquation[] frictionEquationPool: FrictionEquation[] result: ContactEquation[] frictionResult: FrictionEquation[] /** * Pooled vectors. */ v3pool: Vec3Pool world: World currentContactMaterial: ContactMaterial enableFrictionReduction: boolean get [COLLISION_TYPES.sphereSphere]() { return this.sphereSphere } get [COLLISION_TYPES.spherePlane]() { return this.spherePlane } get [COLLISION_TYPES.boxBox]() { return this.boxBox } get [COLLISION_TYPES.sphereBox]() { return this.sphereBox } get [COLLISION_TYPES.planeBox]() { return this.planeBox } get [COLLISION_TYPES.convexConvex]() { return this.convexConvex } get [COLLISION_TYPES.sphereConvex]() { return this.sphereConvex } get [COLLISION_TYPES.planeConvex]() { return this.planeConvex } get [COLLISION_TYPES.boxConvex]() { return this.boxConvex } get [COLLISION_TYPES.sphereHeightfield]() { return this.sphereHeightfield } get [COLLISION_TYPES.boxHeightfield]() { return this.boxHeightfield } get [COLLISION_TYPES.convexHeightfield]() { return this.convexHeightfield } get [COLLISION_TYPES.sphereParticle]() { return this.sphereParticle } get [COLLISION_TYPES.planeParticle]() { return this.planeParticle } get [COLLISION_TYPES.boxParticle]() { return this.boxParticle } get [COLLISION_TYPES.convexParticle]() { return this.convexParticle } get [COLLISION_TYPES.cylinderCylinder]() { return this.convexConvex } get [COLLISION_TYPES.sphereCylinder]() { return this.sphereConvex } get [COLLISION_TYPES.planeCylinder]() { return this.planeConvex } get [COLLISION_TYPES.boxCylinder]() { return this.boxConvex } get [COLLISION_TYPES.convexCylinder]() { return this.convexConvex } get [COLLISION_TYPES.heightfieldCylinder]() { return this.heightfieldCylinder } get [COLLISION_TYPES.particleCylinder]() { return this.particleCylinder } get [COLLISION_TYPES.sphereTrimesh]() { return this.sphereTrimesh } get [COLLISION_TYPES.planeTrimesh]() { return this.planeTrimesh } // get [COLLISION_TYPES.convexTrimesh]() { // return this.convexTrimesh // } constructor(world: World) { this.contactPointPool = [] this.frictionEquationPool = [] this.result = [] this.frictionResult = [] this.v3pool = new Vec3Pool() this.world = world this.currentContactMaterial = world.defaultContactMaterial this.enableFrictionReduction = false } /** * Make a contact object, by using the internal pool or creating a new one. */ createContactEquation( bi: Body, bj: Body, si: Shape, sj: Shape, overrideShapeA?: Shape | null, overrideShapeB?: Shape | null ): ContactEquation { let c if (this.contactPointPool.length) { c = this.contactPointPool.pop()! c.bi = bi c.bj = bj } else { c = new ContactEquation(bi, bj) } c.enabled = bi.collisionResponse && bj.collisionResponse && si.collisionResponse && sj.collisionResponse const cm = this.currentContactMaterial c.restitution = cm.restitution c.setSpookParams(cm.contactEquationStiffness, cm.contactEquationRelaxation, this.world.dt) const matA = si.material || bi.material const matB = sj.material || bj.material if (matA && matB && matA.restitution >= 0 && matB.restitution >= 0) { c.restitution = matA.restitution * matB.restitution } c.si = overrideShapeA || si c.sj = overrideShapeB || sj return c } createFrictionEquationsFromContact(contactEquation: ContactEquation, outArray: FrictionEquation[]): boolean { const bodyA = contactEquation.bi const bodyB = contactEquation.bj const shapeA = contactEquation.si! const shapeB = contactEquation.sj! const world = this.world const cm = this.currentContactMaterial // If friction or restitution were specified in the material, use them let friction = cm.friction const matA = shapeA.material || bodyA.material const matB = shapeB.material || bodyB.material if (matA && matB && matA.friction >= 0 && matB.friction >= 0) { friction = matA.friction * matB.friction } if (friction > 0) { // Create 2 tangent equations const mug = friction * world.gravity.length() let reducedMass = bodyA.invMass + bodyB.invMass if (reducedMass > 0) { reducedMass = 1 / reducedMass } const pool = this.frictionEquationPool const c1 = pool.length ? pool.pop()! : new FrictionEquation(bodyA, bodyB, mug * reducedMass) const c2 = pool.length ? pool.pop()! : new FrictionEquation(bodyA, bodyB, mug * reducedMass) c1.bi = c2.bi = bodyA c1.bj = c2.bj = bodyB c1.minForce = c2.minForce = -mug * reducedMass c1.maxForce = c2.maxForce = mug * reducedMass // Copy over the relative vectors c1.ri.copy(contactEquation.ri) c1.rj.copy(contactEquation.rj) c2.ri.copy(contactEquation.ri) c2.rj.copy(contactEquation.rj) // Construct tangents contactEquation.ni.tangents(c1.t, c2.t) // Set spook params c1.setSpookParams(cm.frictionEquationStiffness, cm.frictionEquationRelaxation, world.dt) c2.setSpookParams(cm.frictionEquationStiffness, cm.frictionEquationRelaxation, world.dt) c1.enabled = c2.enabled = contactEquation.enabled outArray.push(c1, c2) return true } return false } /** * Take the average N latest contact point on the plane. */ createFrictionFromAverage(numContacts: number): void { // The last contactEquation let c = this.result[this.result.length - 1] // Create the result: two "average" friction equations if (!this.createFrictionEquationsFromContact(c, this.frictionResult) || numContacts === 1) { return } const f1 = this.frictionResult[this.frictionResult.length - 2] const f2 = this.frictionResult[this.frictionResult.length - 1] averageNormal.setZero() averageContactPointA.setZero() averageContactPointB.setZero() const bodyA = c.bi const bodyB = c.bj for (let i = 0; i !== numContacts; i++) { c = this.result[this.result.length - 1 - i] if (c.bi !== bodyA) { averageNormal.vadd(c.ni, averageNormal) averageContactPointA.vadd(c.ri, averageContactPointA) averageContactPointB.vadd(c.rj, averageContactPointB) } else { averageNormal.vsub(c.ni, averageNormal) averageContactPointA.vadd(c.rj, averageContactPointA) averageContactPointB.vadd(c.ri, averageContactPointB) } } const invNumContacts = 1 / numContacts averageContactPointA.scale(invNumContacts, f1.ri) averageContactPointB.scale(invNumContacts, f1.rj) f2.ri.copy(f1.ri) // Should be the same f2.rj.copy(f1.rj) averageNormal.normalize() averageNormal.tangents(f1.t, f2.t) // return eq; } /** * Generate all contacts between a list of body pairs * @param p1 Array of body indices * @param p2 Array of body indices * @param result Array to store generated contacts * @param oldcontacts Optional. Array of reusable contact objects */ getContacts( p1: Body[], p2: Body[], world: World, result: ContactEquation[], oldcontacts: ContactEquation[], frictionResult: FrictionEquation[], frictionPool: FrictionEquation[] ): void { // Save old contact objects this.contactPointPool = oldcontacts this.frictionEquationPool = frictionPool this.result = result this.frictionResult = frictionResult const qi = tmpQuat1 const qj = tmpQuat2 const xi = tmpVec1 const xj = tmpVec2 for (let k = 0, N = p1.length; k !== N; k++) { // Get current collision bodies const bi = p1[k] const bj = p2[k] // Get contact material let bodyContactMaterial = null if (bi.material && bj.material) { bodyContactMaterial = world.getContactMaterial(bi.material, bj.material) || null } const justTest = (bi.type & Body.KINEMATIC && bj.type & Body.STATIC) || (bi.type & Body.STATIC && bj.type & Body.KINEMATIC) || (bi.type & Body.KINEMATIC && bj.type & Body.KINEMATIC) for (let i = 0; i < bi.shapes.length; i++) { bi.quaternion.mult(bi.shapeOrientations[i], qi) bi.quaternion.vmult(bi.shapeOffsets[i], xi) xi.vadd(bi.position, xi) const si = bi.shapes[i] for (let j = 0; j < bj.shapes.length; j++) { // Compute world transform of shapes bj.quaternion.mult(bj.shapeOrientations[j], qj) bj.quaternion.vmult(bj.shapeOffsets[j], xj) xj.vadd(bj.position, xj) const sj = bj.shapes[j] if (!(si.collisionFilterMask & sj.collisionFilterGroup && sj.collisionFilterMask & si.collisionFilterGroup)) { continue } if (xi.distanceTo(xj) > si.boundingSphereRadius + sj.boundingSphereRadius) { continue } // Get collision material let shapeContactMaterial = null if (si.material && sj.material) { shapeContactMaterial = world.getContactMaterial(si.material, sj.material) || null } this.currentContactMaterial = shapeContactMaterial || bodyContactMaterial || world.defaultContactMaterial // Get contacts const resolverIndex = (si.type | sj.type) as CollisionType const resolver = this[resolverIndex] if (resolver) { let retval = false // TO DO: investigate why sphereParticle and convexParticle // resolvers expect si and sj shapes to be in reverse order // (i.e. larger integer value type first instead of smaller first) if (si.type < sj.type) { retval = (resolver as any).call(this, si, sj, xi, xj, qi, qj, bi, bj, si, sj, justTest) } else { retval = (resolver as any).call(this, sj, si, xj, xi, qj, qi, bj, bi, si, sj, justTest) } if (retval && justTest) { // Register overlap world.shapeOverlapKeeper.set(si.id, sj.id) world.bodyOverlapKeeper.set(bi.id, bj.id) } } } } } } sphereSphere( si: Sphere, sj: Sphere, xi: Vec3, xj: Vec3, qi: Quaternion, qj: Quaternion, bi: Body, bj: Body, rsi?: Shape | null, rsj?: Shape | null, justTest?: boolean ): boolean | void { if (justTest) { return xi.distanceSquared(xj) < (si.radius + sj.radius) ** 2 } // We will have only one contact in this case const contactEq = this.createContactEquation(bi, bj, si, sj, rsi, rsj) // Contact normal xj.vsub(xi, contactEq.ni) contactEq.ni.normalize() // Contact point locations contactEq.ri.copy(contactEq.ni) contactEq.rj.copy(contactEq.ni) contactEq.ri.scale(si.radius, contactEq.ri) contactEq.rj.scale(-sj.radius, contactEq.rj) contactEq.ri.vadd(xi, contactEq.ri) contactEq.ri.vsub(bi.position, contactEq.ri) contactEq.rj.vadd(xj, contactEq.rj) contactEq.rj.vsub(bj.position, contactEq.rj) this.result.push(contactEq) this.createFrictionEquationsFromContact(contactEq, this.frictionResult) } spherePlane( si: Sphere, sj: Plane, xi: Vec3, xj: Vec3, qi: Quaternion, qj: Quaternion, bi: Body, bj: Body, rsi?: Shape | null, rsj?: Shape | null, justTest?: boolean ): true | void { // We will have one contact in this case const r = this.createContactEquation(bi, bj, si, sj, rsi, rsj) // Contact normal r.ni.set(0, 0, 1) qj.vmult(r.ni, r.ni) r.ni.negate(r.ni) // body i is the sphere, flip normal r.ni.normalize() // Needed? // Vector from sphere center to contact point r.ni.scale(si.radius, r.ri) // Project down sphere on plane xi.vsub(xj, point_on_plane_to_sphere) r.ni.scale(r.ni.dot(point_on_plane_to_sphere), plane_to_sphere_ortho) point_on_plane_to_sphere.vsub(plane_to_sphere_ortho, r.rj) // The sphere position projected to plane if (-point_on_plane_to_sphere.dot(r.ni) <= si.radius) { if (justTest) { return true } // Make it relative to the body const ri = r.ri const rj = r.rj ri.vadd(xi, ri) ri.vsub(bi.position, ri) rj.vadd(xj, rj) rj.vsub(bj.position, rj) this.result.push(r) this.createFrictionEquationsFromContact(r, this.frictionResult) } } boxBox( si: Box, sj: Box, xi: Vec3, xj: Vec3, qi: Quaternion, qj: Quaternion, bi: Body, bj: Body, rsi?: Shape | null, rsj?: Shape | null, justTest?: boolean ): true | void { si.convexPolyhedronRepresentation.material = si.material sj.convexPolyhedronRepresentation.material = sj.material si.convexPolyhedronRepresentation.collisionResponse = si.collisionResponse sj.convexPolyhedronRepresentation.collisionResponse = sj.collisionResponse return this.convexConvex( si.convexPolyhedronRepresentation, sj.convexPolyhedronRepresentation, xi, xj, qi, qj, bi, bj, si, sj, justTest ) } sphereBox( si: Sphere, sj: Box, xi: Vec3, xj: Vec3, qi: Quaternion, qj: Quaternion, bi: Body, bj: Body, rsi?: Shape | null, rsj?: Shape | null, justTest?: boolean ): true | void { const v3pool = this.v3pool // we refer to the box as body j const sides = sphereBox_sides xi.vsub(xj, box_to_sphere) sj.getSideNormals(sides, qj) const R = si.radius const penetrating_sides = [] // Check side (plane) intersections let found = false // Store the resulting side penetration info const side_ns = sphereBox_side_ns const side_ns1 = sphereBox_side_ns1 const side_ns2 = sphereBox_side_ns2 let side_h: number | null = null let side_penetrations = 0 let side_dot1 = 0 let side_dot2 = 0 let side_distance = null for (let idx = 0, nsides = sides.length; idx !== nsides && found === false; idx++) { // Get the plane side normal (ns) const ns = sphereBox_ns ns.copy(sides[idx]) const h = ns.length() ns.normalize() // The normal/distance dot product tells which side of the plane we are const dot = box_to_sphere.dot(ns) if (dot < h + R && dot > 0) { // Intersects plane. Now check the other two dimensions const ns1 = sphereBox_ns1 const ns2 = sphereBox_ns2 ns1.copy(sides[(idx + 1) % 3]) ns2.copy(sides[(idx + 2) % 3]) const h1 = ns1.length() const h2 = ns2.length() ns1.normalize() ns2.normalize() const dot1 = box_to_sphere.dot(ns1) const dot2 = box_to_sphere.dot(ns2) if (dot1 < h1 && dot1 > -h1 && dot2 < h2 && dot2 > -h2) { const dist = Math.abs(dot - h - R) if (side_distance === null || dist < side_distance) { side_distance = dist side_dot1 = dot1 side_dot2 = dot2 side_h = h side_ns.copy(ns) side_ns1.copy(ns1) side_ns2.copy(ns2) side_penetrations++ if (justTest) { return true } } } } } if (side_penetrations) { found = true const r = this.createContactEquation(bi, bj, si, sj, rsi, rsj) side_ns.scale(-R, r.ri) // Sphere r r.ni.copy(side_ns) r.ni.negate(r.ni) // Normal should be out of sphere side_ns.scale(side_h!, side_ns) side_ns1.scale(side_dot1, side_ns1) side_ns.vadd(side_ns1, side_ns) side_ns2.scale(side_dot2, side_ns2) side_ns.vadd(side_ns2, r.rj) // Make relative to bodies r.ri.vadd(xi, r.ri) r.ri.vsub(bi.position, r.ri) r.rj.vadd(xj, r.rj) r.rj.vsub(bj.position, r.rj) this.result.push(r) this.createFrictionEquationsFromContact(r, this.frictionResult) } // Check corners let rj = v3pool.get() const sphere_to_corner = sphereBox_sphere_to_corner for (let j = 0; j !== 2 && !found; j++) { for (let k = 0; k !== 2 && !found; k++) { for (let l = 0; l !== 2 && !found; l++) { rj.set(0, 0, 0) if (j) { rj.vadd(sides[0], rj) } else { rj.vsub(sides[0], rj) } if (k) { rj.vadd(sides[1], rj) } else { rj.vsub(sides[1], rj) } if (l) { rj.vadd(sides[2], rj) } else { rj.vsub(sides[2], rj) } // World position of corner xj.vadd(rj, sphere_to_corner) sphere_to_corner.vsub(xi, sphere_to_corner) if (sphere_to_corner.lengthSquared() < R * R) { if (justTest) { return true } found = true const r = this.createContactEquation(bi, bj, si, sj, rsi, rsj) r.ri.copy(sphere_to_corner) r.ri.normalize() r.ni.copy(r.ri) r.ri.scale(R, r.ri) r.rj.copy(rj) // Make relative to bodies r.ri.vadd(xi, r.ri) r.ri.vsub(bi.position, r.ri) r.rj.vadd(xj, r.rj) r.rj.vsub(bj.position, r.rj) this.result.push(r) this.createFrictionEquationsFromContact(r, this.frictionResult) } } } } v3pool.release(rj) rj = null // Check edges const edgeTangent = v3pool.get() const edgeCenter = v3pool.get() const r = v3pool.get() // r = edge center to sphere center const orthogonal = v3pool.get() const dist = v3pool.get() const Nsides = sides.length for (let j = 0; j !== Nsides && !found; j++) { for (let k = 0; k !== Nsides && !found; k++) { if (j % 3 !== k % 3) { // Get edge tangent sides[k].cross(sides[j], edgeTangent) edgeTangent.normalize() sides[j].vadd(sides[k], edgeCenter) r.copy(xi) r.vsub(edgeCenter, r) r.vsub(xj, r) const orthonorm = r.dot(edgeTangent) // distance from edge center to sphere center in the tangent direction edgeTangent.scale(orthonorm, orthogonal) // Vector from edge center to sphere center in the tangent direction // Find the third side orthogonal to this one let l = 0 while (l === j % 3 || l === k % 3) { l++ } // vec from edge center to sphere projected to the plane orthogonal to the edge tangent dist.copy(xi) dist.vsub(orthogonal, dist) dist.vsub(edgeCenter, dist) dist.vsub(xj, dist) // Distances in tangent direction and distance in the plane orthogonal to it const tdist = Math.abs(orthonorm) const ndist = dist.length() if (tdist < sides[l].length() && ndist < R) { if (justTest) { return true } found = true const res = this.createContactEquation(bi, bj, si, sj, rsi, rsj) edgeCenter.vadd(orthogonal, res.rj) // box rj res.rj.copy(res.rj) dist.negate(res.ni) res.ni.normalize() res.ri.copy(res.rj) res.ri.vadd(xj, res.ri) res.ri.vsub(xi, res.ri) res.ri.normalize() res.ri.scale(R, res.ri) // Make relative to bodies res.ri.vadd(xi, res.ri) res.ri.vsub(bi.position, res.ri) res.rj.vadd(xj, res.rj) res.rj.vsub(bj.position, res.rj) this.result.push(res) this.createFrictionEquationsFromContact(res, this.frictionResult) } } } } v3pool.release(edgeTangent, edgeCenter, r, orthogonal, dist) } planeBox( si: Plane, sj: Box, xi: Vec3, xj: Vec3, qi: Quaternion, qj: Quaternion, bi: Body, bj: Body, rsi?: Shape | null, rsj?: Shape | null, justTest?: boolean ): true | void { sj.convexPolyhedronRepresentation.material = sj.material sj.convexPolyhedronRepresentation.collisionResponse = sj.collisionResponse sj.convexPolyhedronRepresentation.id = sj.id return this.planeConvex(si, sj.convexPolyhedronRepresentation, xi, xj, qi, qj, bi, bj, si, sj, justTest) } convexConvex( si: ConvexPolyhedron, sj: ConvexPolyhedron, xi: Vec3, xj: Vec3, qi: Quaternion, qj: Quaternion, bi: Body, bj: Body, rsi?: Shape | null, rsj?: Shape | null, justTest?: boolean, faceListA?: number[] | null, faceListB?: number[] | null ): true | void { const sepAxis = convexConvex_sepAxis if (xi.distanceTo(xj) > si.boundingSphereRadius + sj.boundingSphereRadius) { return } if (si.findSeparatingAxis(sj, xi, qi, xj, qj, sepAxis, faceListA, faceListB)) { const res: ConvexPolyhedronContactPoint[] = [] const q = convexConvex_q si.clipAgainstHull(xi, qi, sj, xj, qj, sepAxis, -100, 100, res) let numContacts = 0 for (let j = 0; j !== res.length; j++) { if (justTest) { return true } const r = this.createContactEquation(bi, bj, si, sj, rsi, rsj) const ri = r.ri const rj = r.rj sepAxis.negate(r.ni) res[j].normal.negate(q) q.scale(res[j].depth, q) res[j].point.vadd(q, ri) rj.copy(res[j].point) // Contact points are in world coordinates. Transform back to relative ri.vsub(xi, ri) rj.vsub(xj, rj) // Make relative to bodies ri.vadd(xi, ri) ri.vsub(bi.position, ri) rj.vadd(xj, rj) rj.vsub(bj.position, rj) this.result.push(r) numContacts++ if (!this.enableFrictionReduction) { this.createFrictionEquationsFromContact(r, this.frictionResult) } } if (this.enableFrictionReduction && numContacts) { this.createFrictionFromAverage(numContacts) } } } sphereConvex( si: Sphere, sj: ConvexPolyhedron, xi: Vec3, xj: Vec3, qi: Quaternion, qj: Quaternion, bi: Body, bj: Body, rsi?: Shape | null, rsj?: Shape | null, justTest?: boolean ): true | void { const v3pool = this.v3pool xi.vsub(xj, convex_to_sphere) const normals = sj.faceNormals const faces = sj.faces const verts = sj.vertices const R = si.radius const penetrating_sides = [] // if(convex_to_sphere.lengthSquared() > si.boundingSphereRadius + sj.boundingSphereRadius){ // return; // } let found = false // Check corners for (let i = 0; i !== verts.length; i++) { const v = verts[i] // World position of corner const worldCorner = sphereConvex_worldCorner qj.vmult(v, worldCorner) xj.vadd(worldCorner, worldCorner) const sphere_to_corner = sphereConvex_sphereToCorner worldCorner.vsub(xi, sphere_to_corner) if (sphere_to_corner.lengthSquared() < R * R) { if (justTest) { return true } found = true const r = this.createContactEquation(bi, bj, si, sj, rsi, rsj) r.ri.copy(sphere_to_corner) r.ri.normalize() r.ni.copy(r.ri) r.ri.scale(R, r.ri) worldCorner.vsub(xj, r.rj) // Should be relative to the body. r.ri.vadd(xi, r.ri) r.ri.vsub(bi.position, r.ri) // Should be relative to the body. r.rj.vadd(xj, r.rj) r.rj.vsub(bj.position, r.rj) this.result.push(r) this.createFrictionEquationsFromContact(r, this.frictionResult) return } } // Check side (plane) intersections for (let i = 0, nfaces = faces.length; i !== nfaces && found === false; i++) { const normal = normals[i] const face = faces[i] // Get world-transformed normal of the face const worldNormal = sphereConvex_worldNormal qj.vmult(normal, worldNormal) // Get a world vertex from the face const worldPoint = sphereConvex_worldPoint qj.vmult(verts[face[0]], worldPoint) worldPoint.vadd(xj, worldPoint) // Get a point on the sphere, closest to the face normal const worldSpherePointClosestToPlane = sphereConvex_worldSpherePointClosestToPlane worldNormal.scale(-R, worldSpherePointClosestToPlane) xi.vadd(worldSpherePointClosestToPlane, worldSpherePointClosestToPlane) // Vector from a face point to the closest point on the sphere const penetrationVec = sphereConvex_penetrationVec worldSpherePointClosestToPlane.vsub(worldPoint, penetrationVec) // The penetration. Negative value means overlap. const penetration = penetrationVec.dot(worldNormal) const worldPointToSphere = sphereConvex_sphereToWorldPoint xi.vsub(worldPoint, worldPointToSphere) if (penetration < 0 && worldPointToSphere.dot(worldNormal) > 0) { // Intersects plane. Now check if the sphere is inside the face polygon const faceVerts = [] // Face vertices, in world coords for (let j = 0, Nverts = face.length; j !== Nverts; j++) { const worldVertex = v3pool.get() qj.vmult(verts[face[j]], worldVertex) xj.vadd(worldVertex, worldVertex) faceVerts.push(worldVertex) } if (pointInPolygon(faceVerts, worldNormal, xi)) { // Is the sphere center in the face polygon? if (justTest) { return true } found = true const r = this.createContactEquation(bi, bj, si, sj, rsi, rsj) worldNormal.scale(-R, r.ri) // Contact offset, from sphere center to contact worldNormal.negate(r.ni) // Normal pointing out of sphere const penetrationVec2 = v3pool.get() worldNormal.scale(-penetration, penetrationVec2) const penetrationSpherePoint = v3pool.get() worldNormal.scale(-R, penetrationSpherePoint) //xi.vsub(xj).vadd(penetrationSpherePoint).vadd(penetrationVec2 , r.rj); xi.vsub(xj, r.rj) r.rj.vadd(penetrationSpherePoint, r.rj) r.rj.vadd(penetrationVec2, r.rj) // Should be relative to the body. r.rj.vadd(xj, r.rj) r.rj.vsub(bj.position, r.rj) // Should be relative to the body. r.ri.vadd(xi, r.ri) r.ri.vsub(bi.position, r.ri) v3pool.release(penetrationVec2) v3pool.release(penetrationSpherePoint) this.result.push(r) this.createFrictionEquationsFromContact(r, this.frictionResult) // Release world vertices for (let j = 0, Nfaceverts = faceVerts.length; j !== Nfaceverts; j++) { v3pool.release(faceVerts[j]) } return // We only expect *one* face contact } else { // Edge? for (let j = 0; j !== face.length; j++) { // Get two world transformed vertices const v1 = v3pool.get() const v2 = v3pool.get() qj.vmult(verts[face[(j + 1) % face.length]], v1) qj.vmult(verts[face[(j + 2) % face.length]], v2) xj.vadd(v1, v1) xj.vadd(v2, v2) // Construct edge vector const edge = sphereConvex_edge v2.vsub(v1, edge) // Construct the same vector, but normalized const edgeUnit = sphereConvex_edgeUnit edge.unit(edgeUnit) // p is xi projected onto the edge const p = v3pool.get() const v1_to_xi = v3pool.get() xi.vsub(v1, v1_to_xi) const dot = v1_to_xi.dot(edgeUnit) edgeUnit.scale(dot, p) p.vadd(v1, p) // Compute a vector from p to the center of the sphere const xi_to_p = v3pool.get() p.vsub(xi, xi_to_p) // Collision if the edge-sphere distance is less than the radius // AND if p is in between v1 and v2 if (dot > 0 && dot * dot < edge.lengthSquared() && xi_to_p.lengthSquared() < R * R) { // Collision if the edge-sphere distance is less than the radius // Edge contact! if (justTest) { return true } const r = this.createContactEquation(bi, bj, si, sj, rsi, rsj) p.vsub(xj, r.rj) p.vsub(xi, r.ni) r.ni.normalize() r.ni.scale(R, r.ri) // Should be relative to the body. r.rj.vadd(xj, r.rj) r.rj.vsub(bj.position, r.rj) // Should be relative to the body. r.ri.vadd(xi, r.ri) r.ri.vsub(bi.position, r.ri) this.result.push(r) this.createFrictionEquationsFromContact(r, this.frictionResult) // Release world vertices for (let j = 0, Nfaceverts = faceVerts.length; j !== Nfaceverts; j++) { v3pool.release(faceVerts[j]) } v3pool.release(v1) v3pool.release(v2) v3pool.release(p) v3pool.release(xi_to_p) v3pool.release(v1_to_xi) return } v3pool.release(v1) v3pool.release(v2) v3pool.release(p) v3pool.release(xi_to_p) v3pool.release(v1_to_xi) } } // Release world vertices for (let j = 0, Nfaceverts = faceVerts.length; j !== Nfaceverts; j++) { v3pool.release(faceVerts[j]) } } } } planeConvex( planeShape: Plane, convexShape: ConvexPolyhedron, planePosition: Vec3, convexPosition: Vec3, planeQuat: Quaternion, convexQuat: Quaternion, planeBody: Body, convexBody: Body, si?: Shape, sj?: Shape, justTest?: boolean ): true | void { // Simply return the points behind the plane. const worldVertex = planeConvex_v const worldNormal = planeConvex_normal worldNormal.set(0, 0, 1) planeQuat.vmult(worldNormal, worldNormal) // Turn normal according to plane orientation let numContacts = 0 const relpos = planeConvex_relpos for (let i = 0; i !== convexShape.vertices.length; i++) { // Get world convex vertex worldVertex.copy(convexShape.vertices[i]) convexQuat.vmult(worldVertex, worldVertex) convexPosition.vadd(worldVertex, worldVertex) worldVertex.vsub(planePosition, relpos) const dot = worldNormal.dot(relpos) if (dot <= 0.0) { if (justTest) { return true } const r = this.createContactEquation(planeBody, convexBody, planeShape, convexShape, si, sj) // Get vertex position projected on plane const projected = planeConvex_projected worldNormal.scale(worldNormal.dot(relpos), projected) worldVertex.vsub(projected, projected) projected.vsub(planePosition, r.ri) // From plane to vertex projected on plane r.ni.copy(worldNormal) // Contact normal is the plane normal out from plane // rj is now just the vector from the convex center to the vertex worldVertex.vsub(convexPosition, r.rj) // Make it relative to the body r.ri.vadd(planePosition, r.ri) r.ri.vsub(planeBody.position, r.ri) r.rj.vadd(convexPosition, r.rj) r.rj.vsub(convexBody.position, r.rj) this.result.push(r) numContacts++ if (!this.enableFrictionReduction) { this.createFrictionEquationsFromContact(r, this.frictionResult) } } } if (this.enableFrictionReduction && numContacts) { this.createFrictionFromAverage(numContacts) } } boxConvex( si: Box, sj: ConvexPolyhedron, xi: Vec3, xj: Vec3, qi: Quaternion, qj: Quaternion, bi: Body, bj: Body, rsi?: Shape | null, rsj?: Shape | null, justTest?: boolean ): true | void { si.convexPolyhedronRepresentation.material = si.material si.convexPolyhedronRepresentation.collisionResponse = si.collisionResponse return this.convexConvex(si.convexPolyhedronRepresentation, sj, xi, xj, qi, qj, bi, bj, si, sj, justTest) } sphereHeightfield( sphereShape: Sphere, hfShape: Heightfield, spherePos: Vec3, hfPos: Vec3, sphereQuat: Quaternion, hfQuat: Quaternion, sphereBody: Body, hfBody: Body, rsi?: Shape | null, rsj?: Shape | null, justTest?: boolean ): true | void { const data = hfShape.data const radius = sphereShape.radius const w = hfShape.elementSize const worldPillarOffset = sphereHeightfield_tmp2 // Get sphere position to heightfield local! const localSpherePos = sphereHeightfield_tmp1 Transform.pointToLocalFrame(hfPos, hfQuat, spherePos, localSpherePos) // Get the index of the data points to test against let iMinX = Math.floor((localSpherePos.x - radius) / w) - 1 let iMaxX = Math.ceil((localSpherePos.x + radius) / w) + 1 let iMinY = Math.floor((localSpherePos.y - radius) / w) - 1 let iMaxY = Math.ceil((localSpherePos.y + radius) / w) + 1 // Bail out if we are out of the terrain if (iMaxX < 0 || iMaxY < 0 || iMinX > data.length || iMinY > data[0].length) { return } // Clamp index to edges if (iMinX < 0) { iMinX = 0 } if (iMaxX < 0) { iMaxX = 0 } if (iMinY < 0) { iMinY = 0 } if (iMaxY < 0) { iMaxY = 0 } if (iMinX >= data.length) { iMinX = data.length - 1 } if (iMaxX >= data.length) { iMaxX = data.length - 1 } if (iMaxY >= data[0].length) { iMaxY = data[0].length - 1 } if (iMinY >= data[0].length) { iMinY = data[0].length - 1 } const minMax: number[] = [] hfShape.getRectMinMax(iMinX, iMinY, iMaxX, iMaxY, minMax) const min = minMax[0] const max = minMax[1] // Bail out if we can't touch the bounding height box if (localSpherePos.z - radius > max || localSpherePos.z + radius < min) { return } const result = this.result for (let i = iMinX; i < iMaxX; i++) { for (let j = iMinY; j < iMaxY; j++) { const numContactsBefore = result.length let intersecting = false // Lower triangle hfShape.getConvexTrianglePillar(i, j, false) Transform.pointToWorldFrame(hfPos, hfQuat, hfShape.pillarOffset, worldPillarOffset) if ( spherePos.distanceTo(worldPillarOffset) < hfShape.pillarConvex.boundingSphereRadius + sphereShape.boundingSphereRadius ) { intersecting = this.sphereConvex( sphereShape, hfShape.pillarConvex, spherePos, worldPillarOffset, sphereQuat, hfQuat, sphereBody, hfBody, sphereShape, hfShape, justTest ) as boolean } if (justTest && intersecting) { return true } // Upper triangle hfShape.getConvexTrianglePillar(i, j, true) Transform.pointToWorldFrame(hfPos, hfQuat, hfShape.pillarOffset, worldPillarOffset) if ( spherePos.distanceTo(worldPillarOffset) < hfShape.pillarConvex.boundingSphereRadius + sphereShape.boundingSphereRadius ) { intersecting = this.sphereConvex( sphereShape, hfShape.pillarConvex, spherePos, worldPillarOffset, sphereQuat, hfQuat, sphereBody, hfBody, sphereShape, hfShape, justTest ) as boolean } if (justTest && intersecting) { return true } const numContacts = result.length - numContactsBefore if (numContacts > 2) { return } /* // Skip all but 1 for (let k = 0; k < numContacts - 1; k++) { result.pop(); } */ } } } boxHeightfield( si: Box, sj: Heightfield, xi: Vec3, xj: Vec3, qi: Quaternion, qj: Quaternion, bi: Body, bj: Body, rsi?: Shape | null, rsj?: Shape | null, justTest?: boolean ): true | void { si.convexPolyhedronRepresentation.material = si.material si.convexPolyhedronRepresentation.collisionResponse = si.collisionResponse return this.convexHeightfield(si.convexPolyhedronRepresentation, sj, xi, xj, qi, qj, bi, bj, si, sj, justTest) } convexHeightfield( convexShape: ConvexPolyhedron, hfShape: Heightfield, convexPos: Vec3, hfPos: Vec3, convexQuat: Quaternion, hfQuat: Quaternion, convexBody: Body, hfBody: Body, rsi?: Shape | null, rsj?: Shape | null, justTest?: boolean ): true | void { const data = hfShape.data const w = hfShape.elementSize const radius = convexShape.boundingSphereRadius const worldPillarOffset = convexHeightfield_tmp2 const faceList = convexHeightfield_faceList // Get sphere position to heightfield local! const localConvexPos = convexHeightfield_tmp1 Transform.pointToLocalFrame(hfPos, hfQuat, convexPos, localConvexPos) // Get the index of the data points to test against let iMinX = Math.floor((localConvexPos.x - radius) / w) - 1 let iMaxX = Math.ceil((localConvexPos.x + radius) / w) + 1 let iMinY = Math.floor((localConvexPos.y - radius) / w) - 1 let iMaxY = Math.ceil((localConvexPos.y + radius) / w) + 1 // Bail out if we are out of the terrain if (iMaxX < 0 || iMaxY < 0 || iMinX > data.length || iMinY > data[0].length) { return } // Clamp index to edges if (iMinX < 0) { iMinX = 0 } if (iMaxX < 0) { iMaxX = 0 } if (iMinY < 0) { iMinY = 0 } if (iMaxY < 0) { iMaxY = 0 } if (iMinX >= data.length) { iMinX = data.length - 1 } if (iMaxX >= data.length) { iMaxX = data.length - 1 } if (iMaxY >= data[0].length) { iMaxY = data[0].length - 1 } if (iMinY >= data[0].length) { iMinY = data[0].length - 1 } const minMax: number[] = [] hfShape.getRectMinMax(iMinX, iMinY, iMaxX, iMaxY, minMax) const min = minMax[0] const max = minMax[1] // Bail out if we're cant touch the bounding height box if (localConvexPos.z - radius > max || localConvexPos.z + radius < min) { return } for (let i = iMinX; i < iMaxX; i++) { for (let j = iMinY; j < iMaxY; j++) { let intersecting = false // Lower triangle hfShape.getConvexTrianglePillar(i, j, false) Transform.pointToWorldFrame(hfPos, hfQuat, hfShape.pillarOffset, worldPillarOffset) if ( convexPos.distanceTo(worldPillarOffset) < hfShape.pillarConvex.boundingSphereRadius + convexShape.boundingSphereRadius ) { intersecting = this.convexConvex( convexShape, hfShape.pillarConvex, convexPos, worldPillarOffset, convexQuat, hfQuat, convexBody, hfBody, null, null, justTest, faceList, null ) as boolean } if (justTest && intersecting) { return true } // Upper triangle hfShape.getConvexTrianglePillar(i, j, true) Transform.pointToWorldFrame(hfPos, hfQuat, hfShape.pillarOffset, worldPillarOffset) if ( convexPos.distanceTo(worldPillarOffset) < hfShape.pillarConvex.boundingSphereRadius + convexShape.boundingSphereRadius ) { intersecting = this.convexConvex( convexShape, hfShape.pillarConvex, convexPos, worldPillarOffset, convexQuat, hfQuat, convexBody, hfBody, null, null, justTest, faceList, null ) as boolean } if (justTest && intersecting) { return true } } } } sphereParticle( sj: Sphere, si: Particle, xj: Vec3, xi: Vec3, qj: Quaternion, qi: Quaternion, bj: Body, bi: Body, rsi?: Shape | null, rsj?: Shape | null, justTest?: boolean ): true | void { // The normal is the unit vector from sphere center to particle center const normal = particleSphere_normal normal.set(0, 0, 1) xi.vsub(xj, normal) const lengthSquared = normal.lengthSquared() if (lengthSquared <= sj.radius * sj.radius) { if (justTest) { return true } const r = this.createContactEquation(bi, bj, si, sj, rsi, rsj) normal.normalize() r.rj.copy(normal) r.rj.scale(sj.radius, r.rj) r.ni.copy(normal) // Contact normal r.ni.negate(r.ni) r.ri.set(0, 0, 0) // Center of particle this.result.push(r) this.createFrictionEquationsFromContact(r, this.frictionResult) } } planeParticle( sj: Plane, si: Particle, xj: Vec3, xi: Vec3, qj: Quaternion, qi: Quaternion, bj: Body, bi: Body, rsi?: Shape | null, rsj?: Shape | null, justTest?: boolean ): true | void { const normal = particlePlane_normal normal.set(0, 0, 1) bj.quaternion.vmult(normal, normal) // Turn normal according to plane orientation const relpos = particlePlane_relpos xi.vsub(bj.position, relpos) const dot = normal.dot(relpos) if (dot <= 0.0) { if (justTest) { return true } const r = this.createContactEquation(bi, bj, si, sj, rsi, rsj) r.ni.copy(normal) // Contact normal is the plane normal r.ni.negate(r.ni) r.ri.set(0, 0, 0) // Center of particle // Get particle position projected on plane const projected = particlePlane_projected normal.scale(normal.dot(xi), projected) xi.vsub(projected, projected) //projected.vadd(bj.position,projected); // rj is now the projected world position minus plane position r.rj.copy(projected) this.result.push(r) this.createFrictionEquationsFromContact(r, this.frictionResult) } } boxParticle( si: Box, sj: Particle, xi: Vec3, xj: Vec3, qi: Quaternion, qj: Quaternion, bi: Body, bj: Body, rsi?: Shape | null, rsj?: Shape | null, justTest?: boolean ): true | void { si.convexPolyhedronRepresentation.material = si.material si.convexPolyhedronRepresentation.collisionResponse = si.collisionResponse return this.convexParticle(si.convexPolyhedronRepresentation, sj, xi, xj, qi, qj, bi, bj, si, sj, justTest) } convexParticle( sj: ConvexPolyhedron, si: Particle, xj: Vec3, xi: Vec3, qj: Quaternion, qi: Quaternion, bj: Body, bi: Body, rsi?: Shape | null, rsj?: Shape | null, justTest?: boolean ): true | void { let penetratedFaceIndex = -1 const penetratedFaceNormal = convexParticle_penetratedFaceNormal const worldPenetrationVec = convexParticle_worldPenetrationVec let minPenetration = null let numDetectedFaces = 0 // Convert particle position xi to local coords in the convex const local = convexParticle_local local.copy(xi) local.vsub(xj, local) // Convert position to relative the convex origin qj.conjugate(cqj) cqj.vmult(local, local) if (sj.pointIsInside(local)) { if (sj.worldVerticesNeedsUpdate) { sj.computeWorldVertices(xj, qj) } if (sj.worldFaceNormalsNeedsUpdate) { sj.computeWorldFaceNormals(qj) } // For each world polygon in the polyhedra for (let i = 0, nfaces = sj.faces.length; i !== nfaces; i++) { // Construct world face vertices const verts = [sj.worldVertices[sj.faces[i][0]]] const normal = sj.worldFaceNormals[i] // Check how much the particle penetrates the polygon plane. xi.vsub(verts[0], convexParticle_vertexToParticle) const penetration = -normal.dot(convexParticle_vertexToParticle) if (minPenetration === null || Math.abs(penetration) < Math.abs(minPenetration)) { if (justTest) { return true } minPenetration = penetration penetratedFaceIndex = i penetratedFaceNormal.copy(normal) numDetectedFaces++ } } if (penetratedFaceIndex !== -1) { // Setup contact const r = this.createContactEquation(bi, bj, si, sj, rsi, rsj) penetratedFaceNormal.scale(minPenetration!, worldPenetrationVec) // rj is the particle position projected to the face worldPenetrationVec.vadd(xi, worldPenetrationVec) worldPenetrationVec.vsub(xj, worldPenetrationVec) r.rj.copy(worldPenetrationVec) //const projectedToFace = xi.vsub(xj).vadd(worldPenetrationVec); //projectedToFace.copy(r.rj); //qj.vmult(r.rj,r.rj); penetratedFaceNormal.negate(r.ni) // Contact normal r.ri.set(0, 0, 0) // Center of particle const ri = r.ri const rj = r.rj // Make relative to bodies ri.vadd(xi, ri) ri.vsub(bi.position, ri) rj.vadd(xj, rj) rj.vsub(bj.position, rj) this.result.push(r) this.createFrictionEquationsFromContact(r, this.frictionResult) } else { console.warn('Point found inside convex, but did not find penetrating face!') } } } heightfieldCylinder( hfShape: Heightfield, convexShape: Cylinder, hfPos: Vec3, convexPos: Vec3, hfQuat: Quaternion, convexQuat: Quaternion, hfBody: Body, convexBody: Body, rsi?: Shape | null, rsj?: Shape | null, justTest?: boolean ): true | void { return this.convexHeightfield( convexShape as ConvexPolyhedron, hfShape, convexPos, hfPos, convexQuat, hfQuat, convexBody, hfBody, rsi, rsj, justTest ) } particleCylinder( si: Particle, sj: Cylinder, xi: Vec3, xj: Vec3, qi: Quaternion, qj: Quaternion, bi: Body, bj: Body, rsi?: Shape | null, rsj?: Shape | null, justTest?: boolean ): true | void { return this.convexParticle(sj as ConvexPolyhedron, si, xj, xi, qj, qi, bj, bi, rsi, rsj, justTest) } sphereTrimesh( sphereShape: Sphere, trimeshShape: Trimesh, spherePos: Vec3, trimeshPos: Vec3, sphereQuat: Quaternion, trimeshQuat: Quaternion, sphereBody: Body, trimeshBody: Body, rsi?: Shape | null, rsj?: Shape | null, justTest?: boolean ): true | void { const edgeVertexA = sphereTrimesh_edgeVertexA const edgeVertexB = sphereTrimesh_edgeVertexB const edgeVector = sphereTrimesh_edgeVector const edgeVectorUnit = sphereTrimesh_edgeVectorUnit const localSpherePos = sphereTrimesh_localSpherePos const tmp = sphereTrimesh_tmp const localSphereAABB = sphereTrimesh_localSphereAABB const v2 = sphereTrimesh_v2 const relpos = sphereTrimesh_relpos const triangles = sphereTrimesh_triangles // Convert sphere position to local in the trimesh Transform.pointToLocalFrame(trimeshPos, trimeshQuat, spherePos, localSpherePos) // Get the aabb of the sphere locally in the trimesh const sphereRadius = sphereShape.radius localSphereAABB.lowerBound.set( localSpherePos.x - sphereRadius, localSpherePos.y - sphereRadius, localSpherePos.z - sphereRadius ) localSphereAABB.upperBound.set( localSpherePos.x + sphereRadius, localSpherePos.y + sphereRadius, localSpherePos.z + sphereRadius ) trimeshShape.getTrianglesInAABB(localSphereAABB, triangles) //for (let i = 0; i < trimeshShape.indices.length / 3; i++) triangles.push(i); // All // Vertices const v = sphereTrimesh_v const radiusSquared = sphereShape.radius * sphereShape.radius for (let i = 0; i < triangles.length; i++) { for (let j = 0; j < 3; j++) { trimeshShape.getVertex(trimeshShape.indices[triangles[i] * 3 + j], v) // Check vertex overlap in sphere v.vsub(localSpherePos, relpos) if (relpos.lengthSquared() <= radiusSquared) { // Safe up v2.copy(v) Transform.pointToWorldFrame(trimeshPos, trimeshQuat, v2, v) v.vsub(spherePos, relpos) if (justTest) { return true } let r = this.createContactEquation(sphereBody, trimeshBody, sphereShape, trimeshShape, rsi, rsj) r.ni.copy(relpos) r.ni.normalize() // ri is the vector from sphere center to the sphere surface r.ri.copy(r.ni) r.ri.scale(sphereShape.radius, r.ri) r.ri.vadd(spherePos, r.ri) r.ri.vsub(sphereBody.position, r.ri) r.rj.copy(v) r.rj.vsub(trimeshBody.position, r.rj) // Store result this.result.push(r) this.createFrictionEquationsFromContact(r, this.frictionResult) } } } // Check all edges for (let i = 0; i < triangles.length; i++) { for (let j = 0; j < 3; j++) { trimeshShape.getVertex(trimeshShape.indices[triangles[i] * 3 + j], edgeVertexA) trimeshShape.getVertex(trimeshShape.indices[triangles[i] * 3 + ((j + 1) % 3)], edgeVertexB) edgeVertexB.vsub(edgeVertexA, edgeVector) // Project sphere position to the edge localSpherePos.vsub(edgeVertexB, tmp) const positionAlongEdgeB = tmp.dot(edgeVector) localSpherePos.vsub(edgeVertexA, tmp) let positionAlongEdgeA = tmp.dot(edgeVector) if (positionAlongEdgeA > 0 && positionAlongEdgeB < 0) { // Now check the orthogonal distance from edge to sphere center localSpherePos.vsub(edgeVertexA, tmp) edgeVectorUnit.copy(edgeVector) edgeVectorUnit.normalize() positionAlongEdgeA = tmp.dot(edgeVectorUnit) edgeVectorUnit.scale(positionAlongEdgeA, tmp) tmp.vadd(edgeVertexA, tmp) // tmp is now the sphere center position projected to the edge, defined locally in the trimesh frame const dist = tmp.distanceTo(localSpherePos) if (dist < sphereShape.radius) { if (justTest) { return true } const r = this.createContactEquation(sphereBody, trimeshBody, sphereShape, trimeshShape, rsi, rsj) tmp.vsub(localSpherePos, r.ni) r.ni.normalize() r.ni.scale(sphereShape.radius, r.ri) r.ri.vadd(spherePos, r.ri) r.ri.vsub(sphereBody.position, r.ri) Transform.pointToWorldFrame(trimeshPos, trimeshQuat, tmp, tmp) tmp.vsub(trimeshBody.position, r.rj) Transform.vectorToWorldFrame(trimeshQuat, r.ni, r.ni) Transform.vectorToWorldFrame(trimeshQuat, r.ri, r.ri) this.result.push(r) this.createFrictionEquationsFromContact(r, this.frictionResult) } } } } // Triangle faces const va = sphereTrimesh_va const vb = sphereTrimesh_vb const vc = sphereTrimesh_vc const normal = sphereTrimesh_normal for (let i = 0, N = triangles.length; i !== N; i++) { trimeshShape.getTriangleVertices(triangles[i], va, vb, vc) trimeshShape.getNormal(triangles[i], normal) localSpherePos.vsub(va, tmp) let dist = tmp.dot(normal) normal.scale(dist, tmp) localSpherePos.vsub(tmp, tmp) // tmp is now the sphere position projected to the triangle plane dist = tmp.distanceTo(localSpherePos) if (Ray.pointInTriangle(tmp, va, vb, vc) && dist < sphereShape.radius) { if (justTest) { return true } let r = this.createContactEquation(sphereBody, trimeshBody, sphereShape, trimeshShape, rsi, rsj) tmp.vsub(localSpherePos, r.ni) r.ni.normalize() r.ni.scale(sphereShape.radius, r.ri) r.ri.vadd(spherePos, r.ri) r.ri.vsub(sphereBody.position, r.ri) Transform.pointToWorldFrame(trimeshPos, trimeshQuat, tmp, tmp) tmp.vsub(trimeshBody.position, r.rj) Transform.vectorToWorldFrame(trimeshQuat, r.ni, r.ni) Transform.vectorToWorldFrame(trimeshQuat, r.ri, r.ri) this.result.push(r) this.createFrictionEquationsFromContact(r, this.frictionResult) } } triangles.length = 0 } planeTrimesh( planeShape: Plane, trimeshShape: Trimesh, planePos: Vec3, trimeshPos: Vec3, planeQuat: Quaternion, trimeshQuat: Quaternion, planeBody: Body, trimeshBody: Body, rsi?: Shape | null, rsj?: Shape | null, justTest?: boolean ): true | void { // Make contacts! const v = new Vec3() const normal = planeTrimesh_normal normal.set(0, 0, 1) planeQuat.vmult(normal, normal) // Turn normal according to plane for (let i = 0; i < trimeshShape.vertices.length / 3; i++) { // Get world vertex from trimesh trimeshShape.getVertex(i, v) // Safe up const v2 = new Vec3() v2.copy(v) Transform.pointToWorldFrame(trimeshPos, trimeshQuat, v2, v) // Check plane side const relpos = planeTrimesh_relpos v.vsub(planePos, relpos) const dot = normal.dot(relpos) if (dot <= 0.0) { if (justTest) { return true } const r = this.createContactEquation(planeBody, trimeshBody, planeShape, trimeshShape, rsi, rsj) r.ni.copy(normal) // Contact normal is the plane normal // Get vertex position projected on plane const projected = planeTrimesh_projected normal.scale(relpos.dot(normal), projected) v.vsub(projected, projected) // ri is the projected world position minus plane position r.ri.copy(projected) r.ri.vsub(planeBody.position, r.ri) r.rj.copy(v) r.rj.vsub(trimeshBody.position, r.rj) // Store result this.result.push(r) this.createFrictionEquationsFromContact(r, this.frictionResult) } } } // convexTrimesh( // si: ConvexPolyhedron, sj: Trimesh, xi: Vec3, xj: Vec3, qi: Quaternion, qj: Quaternion, // bi: Body, bj: Body, rsi?: Shape | null, rsj?: Shape | null, // faceListA?: number[] | null, faceListB?: number[] | null, // ) { // const sepAxis = convexConvex_sepAxis; // if(xi.distanceTo(xj) > si.boundingSphereRadius + sj.boundingSphereRadius){ // return; // } // // Construct a temp hull for each triangle // const hullB = new ConvexPolyhedron(); // hullB.faces = [[0,1,2]]; // const va = new Vec3(); // const vb = new Vec3(); // const vc = new Vec3(); // hullB.vertices = [ // va, // vb, // vc // ]; // for (let i = 0; i < sj.indices.length / 3; i++) { // const triangleNormal = new Vec3(); // sj.getNormal(i, triangleNormal); // hullB.faceNormals = [triangleNormal]; // sj.getTriangleVertices(i, va, vb, vc); // let d = si.testSepAxis(triangleNormal, hullB, xi, qi, xj, qj); // if(!d){ // triangleNormal.scale(-1, triangleNormal); // d = si.testSepAxis(triangleNormal, hullB, xi, qi, xj, qj); // if(!d){ // continue; // } // } // const res: ConvexPolyhedronContactPoint[] = []; // const q = convexConvex_q; // si.clipAgainstHull(xi,qi,hullB,xj,qj,triangleNormal,-100,100,res); // for(let j = 0; j !== res.length; j++){ // const r = this.createContactEquation(bi,bj,si,sj,rsi,rsj), // ri = r.ri, // rj = r.rj; // r.ni.copy(triangleNormal); // r.ni.negate(r.ni); // res[j].normal.negate(q); // q.mult(res[j].depth, q); // res[j].point.vadd(q, ri); // rj.copy(res[j].point); // // Contact points are in world coordinates. Transform back to relative // ri.vsub(xi,ri); // rj.vsub(xj,rj); // // Make relative to bodies // ri.vadd(xi, ri); // ri.vsub(bi.position, ri); // rj.vadd(xj, rj); // rj.vsub(bj.position, rj); // result.push(r); // } // } // } } const averageNormal = new Vec3() const averageContactPointA = new Vec3() const averageContactPointB = new Vec3() const tmpVec1 = new Vec3() const tmpVec2 = new Vec3() const tmpQuat1 = new Quaternion() const tmpQuat2 = new Quaternion() let numWarnings = 0 const maxWarnings = 10 function warn(msg: string): void { if (numWarnings > maxWarnings) { return } numWarnings++ console.warn(msg) } const planeTrimesh_normal = new Vec3() const planeTrimesh_relpos = new Vec3() const planeTrimesh_projected = new Vec3() const sphereTrimesh_normal = new Vec3() const sphereTrimesh_relpos = new Vec3() const sphereTrimesh_projected = new Vec3() const sphereTrimesh_v = new Vec3() const sphereTrimesh_v2 = new Vec3() const sphereTrimesh_edgeVertexA = new Vec3() const sphereTrimesh_edgeVertexB = new Vec3() const sphereTrimesh_edgeVector = new Vec3() const sphereTrimesh_edgeVectorUnit = new Vec3() const sphereTrimesh_localSpherePos = new Vec3() const sphereTrimesh_tmp = new Vec3() const sphereTrimesh_va = new Vec3() const sphereTrimesh_vb = new Vec3() const sphereTrimesh_vc = new Vec3() const sphereTrimesh_localSphereAABB = new AABB() const sphereTrimesh_triangles: number[] = [] const point_on_plane_to_sphere = new Vec3() const plane_to_sphere_ortho = new Vec3() // See http://bulletphysics.com/Bullet/BulletFull/SphereTriangleDetector_8cpp_source.html const pointInPolygon_edge = new Vec3() const pointInPolygon_edge_x_normal = new Vec3() const pointInPolygon_vtp = new Vec3() function pointInPolygon(verts: Vec3[], normal: Vec3, p: Vec3): boolean { let positiveResult = null const N = verts.length for (let i = 0; i !== N; i++) { const v = verts[i] // Get edge to the next vertex const edge = pointInPolygon_edge verts[(i + 1) % N].vsub(v, edge) // Get cross product between polygon normal and the edge const edge_x_normal = pointInPolygon_edge_x_normal //const edge_x_normal = new Vec3(); edge.cross(normal, edge_x_normal) // Get vector between point and current vertex const vertex_to_p = pointInPolygon_vtp p.vsub(v, vertex_to_p) // This dot product determines which side of the edge the point is const r = edge_x_normal.dot(vertex_to_p) // If all such dot products have same sign, we are inside the polygon. if (positiveResult === null || (r > 0 && positiveResult === true) || (r <= 0 && positiveResult === false)) { if (positiveResult === null) { positiveResult = r > 0 } continue } else { return false // Encountered some other sign. Exit. } } // If we got here, all dot products were of the same sign. return true } const box_to_sphere = new Vec3() const sphereBox_ns = new Vec3() const sphereBox_ns1 = new Vec3() const sphereBox_ns2 = new Vec3() const sphereBox_sides = [new Vec3(), new Vec3(), new Vec3(), new Vec3(), new Vec3(), new Vec3()] const sphereBox_sphere_to_corner = new Vec3() const sphereBox_side_ns = new Vec3() const sphereBox_side_ns1 = new Vec3() const sphereBox_side_ns2 = new Vec3() const convex_to_sphere = new Vec3() const sphereConvex_edge = new Vec3() const sphereConvex_edgeUnit = new Vec3() const sphereConvex_sphereToCorner = new Vec3() const sphereConvex_worldCorner = new Vec3() const sphereConvex_worldNormal = new Vec3() const sphereConvex_worldPoint = new Vec3() const sphereConvex_worldSpherePointClosestToPlane = new Vec3() const sphereConvex_penetrationVec = new Vec3() const sphereConvex_sphereToWorldPoint = new Vec3() const planeBox_normal = new Vec3() const plane_to_corner = new Vec3() const planeConvex_v = new Vec3() const planeConvex_normal = new Vec3() const planeConvex_relpos = new Vec3() const planeConvex_projected = new Vec3() const convexConvex_sepAxis = new Vec3() const convexConvex_q = new Vec3() const particlePlane_normal = new Vec3() const particlePlane_relpos = new Vec3() const particlePlane_projected = new Vec3() const particleSphere_normal = new Vec3() // WIP const cqj = new Quaternion() const convexParticle_local = new Vec3() const convexParticle_normal = new Vec3() const convexParticle_penetratedFaceNormal = new Vec3() const convexParticle_vertexToParticle = new Vec3() const convexParticle_worldPenetrationVec = new Vec3() const convexHeightfield_tmp1 = new Vec3() const convexHeightfield_tmp2 = new Vec3() const convexHeightfield_faceList = [0] const sphereHeightfield_tmp1 = new Vec3() const sphereHeightfield_tmp2 = new Vec3()
the_stack
"use strict"; interface WebGLSoundDeviceSoundCheckCall { (): boolean; }; interface WebGLSoundDeviceSourceMap { [id: string] : boolean; }; // // WebGLSound // class WebGLSound implements Sound { /* tslint:disable:no-unused-variable */ static version = 1; /* tslint:enable:no-unused-variable */ // Sound name : string; frequency : number; channels : number; bitrate : number; length : number; compressed : boolean; // WebGLSound buffer : any; // TODO data : any; // TODO blob : Blob; url : string; // on prototype forceUncompress: boolean; audioContext : any; // TODO constructor(params: SoundParameters) { this.name = (params.name || params.src); this.frequency = 0; this.channels = 0; this.bitrate = 0; this.length = 0; this.compressed = true; this.buffer = null; this.data = null; this.blob = null; this.url = null; } destroy() { this.buffer = null; this.data = null; if (this.blob) { URL.revokeObjectURL(this.url); this.blob = null; } this.url = null; } static audioLoaded(sound: WebGLSound, audio: HTMLAudioElement): void { sound.frequency = ((<any>audio).sampleRate || (<any>audio).mozSampleRate || 0); sound.channels = ((<any>audio).channels || (<any>audio).mozChannels || 0); sound.bitrate = (sound.frequency * sound.channels * 2 * 8); sound.length = audio.duration; if (audio.buffered && audio.buffered.length) { if (isNaN(sound.length) || sound.length === Number.POSITIVE_INFINITY) { sound.length = audio.buffered.end(0); } } } private _readUint32(data: Uint8Array, n: number): number { /* tslint:disable:no-bitwise */ return (data[n + 0] | (data[n + 1] << 8) | (data[n + 2] << 16) | (data[n + 3] << 24)); /* tslint:enable:no-bitwise */ } private _readUint64(data: Uint8Array, n: number): number { /* tslint:disable:no-bitwise */ var low = (data[n + 0] | (data[n + 1] << 8) | (data[n + 2] << 16) | (data[n + 3] << 24)); var high = (data[n + 4] | (data[n + 5] << 8) | (data[n + 6] << 16) | (data[n + 7] << 24)); /* tslint:enable:no-bitwise */ return (high ? ((high * 4294967296) + low) : low); } private _findOggInfo(data: Uint8Array) { var end = (data.length - 15); var n; for (n = 4; n < end; n += 1) { // Look for 'vorbis' marker if (data[n + 0] === 118 && data[n + 1] === 111 && data[n + 2] === 114 && data[n + 3] === 98 && data[n + 4] === 105 && data[n + 5] === 115 && data[n - 1] === 1) { this.channels = data[n + 10]; this.frequency = this._readUint32(data, (n + 11)); this.bitrate = this._readUint32(data, (n + 11 + 8)); break; } } if (this.frequency) { for (n = (data.length - 28); n >= 0; n -= 1) { // Look for 'OggS' marker if (data[n + 0] === 79 && data[n + 1] === 103 && data[n + 2] === 103 && data[n + 3] === 83) { var numSamples = this._readUint64(data, (n + 6)); if (numSamples) { this.length = (numSamples / this.frequency); break; } } } } } private _syncsafe(value: number): number { var ret = 0; /* tslint:disable:no-bitwise */ ret |= ((value & 0x7F000000) >> 24); ret |= ((value & 0x007F0000) >> 9); ret |= ((value & 0x00007F00) << 6); ret |= ((value & 0x0000007F) << 21); /* tslint:enable:no-bitwise */ return ret; } static MP3BitRates = [0, 32000, 40000, 48000, 56000, 64000, 80000, 96000, 112000, 128000, 160000, 192000, 224000, 256000, 320000, 0]; static MP3Frequencies = [44100, 48000, 32000, 0]; private _findMp3Info(data: Uint8Array) { var end = data.length; var n; // Ignore ID3v1 'TAG' from the end n = (end - 128); if (data[n + 0] === 84 && data[n + 1] === 65 && data[n + 2] === 71) { end -= 128; } // Ignore ID3v1 'TAG+' from the end n = (end - 227); if (data[n + 0] === 84 && data[n + 1] === 65 && data[n + 2] === 71 && data[n + 3] === 43) { end -= 227; } // Ignore ID3v2 'ID3' from the beginning n = 0; if (data[n + 0] === 73 && data[n + 1] === 68 && data[n + 2] === 51) { var size = this._readUint32(data, 6); size = this._syncsafe(size); n += 10 + size; } // Check that it is a MPEG 1 Layer 3 /* tslint:disable:no-bitwise */ if (data[n + 0] === 0xff && (data[n + 1] >> 5) === 0x7 && ((data[n + 1] >> 3) & 0x3) === 0x3 && ((data[n + 1] >> 1) & 0x3) === 0x1) { var bitrate = WebGLSound.MP3BitRates[(data[n + 2] >> 4)]; var frequency = WebGLSound.MP3Frequencies[((data[n + 2] >> 2) & 0x3)]; this.bitrate = bitrate; this.frequency = frequency; this.length = (end - n) / (bitrate / 8); this.channels = ((data[n + 3] >> 6) === 0x3 ? 1 : 2); } /* tslint:enable:no-bitwise */ } private _initializeFromData(data: any, extension: string, onload: { (sound: Sound, status: number): void; }): void { if (typeof Blob !== "undefined" && data instanceof Blob) { debug.assert(typeof URL !== "undefined" && URL.createObjectURL); if (data.type === 'audio/x-mpg') { data = data.slice(0, data.size, 'audio/mpeg'); } this.blob = data; this.url = URL.createObjectURL(data); } else { var dataArray; if (data instanceof Uint8Array) { dataArray = data; } else { dataArray = new Uint8Array(data); } // Check extension based on data if (typeof Blob !== "undefined" && typeof URL !== "undefined" && URL.createObjectURL) { var dataBlob; if (dataArray[0] === 79 && dataArray[1] === 103 && dataArray[2] === 103 && dataArray[3] === 83) { this._findOggInfo(dataArray); extension = 'ogg'; dataBlob = new Blob([dataArray], {type: "audio/ogg"}); } else if (dataArray[0] === 82 && dataArray[1] === 73 && dataArray[2] === 70 && dataArray[3] === 70) { extension = 'wav'; dataBlob = new Blob([dataArray], {type: "audio/wav"}); } else { if (extension === 'm4a' || extension === 'mp4') { dataBlob = new Blob([dataArray], {type: "audio/mp4"}); } else if (extension === 'aac') { dataBlob = new Blob([dataArray], {type: "audio/aac"}); } else { // Assume it's an mp3? this._findMp3Info(dataArray); extension = 'mp3'; dataBlob = new Blob([dataArray], {type: "audio/mpeg"}); } } debug.assert(dataArray.length === dataBlob.size, "Blob constructor does not support typed arrays."); this.blob = dataBlob; this.url = URL.createObjectURL(dataBlob); } else { var url; if (dataArray[0] === 79 && dataArray[1] === 103 && dataArray[2] === 103 && dataArray[3] === 83) { this._findOggInfo(dataArray); extension = 'ogg'; url = 'data:audio/ogg;base64,'; } else if (dataArray[0] === 82 && dataArray[1] === 73 && dataArray[2] === 70 && dataArray[3] === 70) { extension = 'wav'; url = 'data:audio/wav;base64,'; } else { if (extension === 'm4a' || extension === 'mp4') { url = 'data:audio/mp4;base64,'; } else if (extension === 'aac') { url = 'data:audio/aac;base64,'; } else { // Assume it's an mp3? this._findMp3Info(dataArray); extension = 'mp3'; url = 'data:audio/mpeg;base64,'; } } // Mangle data into a data URI this.url = url + (<WebGLTurbulenzEngine>TurbulenzEngine).base64Encode( dataArray); } } if (onload) { onload(this, 200); } } static create(sd: WebGLSoundDevice, params: SoundParameters): WebGLSound { var sound = new WebGLSound(params); var soundPath = params.src; var onload = params.onload; var data = params.data; var uncompress = (sound.forceUncompress || params.uncompress || (!soundPath && data)); sound.compressed = (!uncompress); var numSamples, numChannels, samplerRate; var audioContext = sd.audioContext; var xhr: XMLHttpRequest; if (audioContext && uncompress) { var buffer; if (soundPath) { if (!sd.isResourceSupported(soundPath)) { if (onload) { onload(null, undefined); } return null; } var bufferCreated = function bufferCreatedFn(buffer) { if (buffer) { sound.buffer = buffer; sound.frequency = buffer.sampleRate; sound.channels = buffer.numberOfChannels; sound.bitrate = (sound.frequency * sound.channels * 2 * 8); sound.length = buffer.duration; if (onload) { onload(sound, 200); } } else { if (onload) { onload(null, 0); } } }; var bufferFailed = function bufferFailedFn() { if (onload) { onload(null, 0); } }; if (data) { if (audioContext.decodeAudioData) { audioContext.decodeAudioData(data, bufferCreated, bufferFailed); } else { buffer = audioContext.createBuffer(data, false); bufferCreated(buffer); } } else { if (window.XMLHttpRequest) { xhr = new window.XMLHttpRequest(); } else if (window.ActiveXObject) { xhr = new window.ActiveXObject("Microsoft.XMLHTTP"); } else { if (onload) { onload(null, undefined); } return null; } xhr.onreadystatechange = function () { if (xhr.readyState === 4) { if (!TurbulenzEngine || !TurbulenzEngine.isUnloading()) { var xhrStatus = xhr.status; //var xhrStatusText = (xhrStatus !== 0 && xhr.statusText || 'No connection'); var response = xhr.response; // Sometimes the browser sets status to 200 OK when the connection is closed // before the message is sent (weird!). // In order to address this we fail any completely empty responses. // Hopefully, nobody will get a valid response with no headers and no body! if (xhr.getAllResponseHeaders() === "" && !response) { if (onload) { onload(null, 0); } } else if (xhrStatus === 200 || xhrStatus === 0) { if (audioContext.decodeAudioData) { audioContext.decodeAudioData(response, bufferCreated, bufferFailed); } else { var buffer = audioContext.createBuffer(response, false); bufferCreated(buffer); } } else { if (onload) { onload(null, xhrStatus); } } } // break circular reference xhr.onreadystatechange = null; xhr = null; } }; xhr.open("GET", soundPath, true); xhr.responseType = "arraybuffer"; xhr.send(null); } return sound; } else { if (data) { numSamples = data.length; numChannels = (params.channels || 1); samplerRate = params.frequency; var contextSampleRate = Math.min(audioContext.sampleRate, 96000); var c, channel, i, j; if (contextSampleRate === samplerRate) { buffer = audioContext.createBuffer(numChannels, (numSamples / numChannels), samplerRate); // De-interleave data for (c = 0; c < numChannels; c += 1) { channel = buffer.getChannelData(c); for (i = c, j = 0; i < numSamples; i += numChannels, j += 1) { channel[j] = data[i]; } } } else { var ratio = (samplerRate / contextSampleRate); /* tslint:disable:no-bitwise */ var bufferLength = ((numSamples / (ratio * numChannels)) | 0); /* tslint:enable:no-bitwise */ buffer = audioContext.createBuffer(numChannels, bufferLength, contextSampleRate); // De-interleave data for (c = 0; c < numChannels; c += 1) { channel = buffer.getChannelData(c); for (j = 0; j < bufferLength; j += 1) { /* tslint:disable:no-bitwise */ channel[j] = data[c + (((j * ratio) | 0) * numChannels)]; /* tslint:enable:no-bitwise */ } } } if (buffer) { sound.buffer = buffer; sound.frequency = samplerRate; sound.channels = numChannels; sound.bitrate = (samplerRate * numChannels * 2 * 8); sound.length = (numSamples / (samplerRate * numChannels)); if (onload) { onload(sound, 200); } return sound; } } } } else { if (soundPath) { var extension = soundPath.slice(-3); if (!sd.supportedExtensions[extension]) { if (onload) { onload(null, undefined); } return null; } if (data) { sound._initializeFromData(data, extension, onload); } else { xhr = new XMLHttpRequest(); xhr.onreadystatechange = function () { if (xhr.readyState === 4) { if (!TurbulenzEngine || !TurbulenzEngine.isUnloading()) { var xhrStatus = xhr.status; // Fix for loading from file if (xhrStatus === 0 && (window.location.protocol === "file:" || window.location.protocol === "chrome-extension:")) { xhrStatus = 200; } // Sometimes the browser sets status to 200 OK when the connection is closed // before the message is sent (weird!). // In order to address this we fail any completely empty responses. // Hopefully, nobody will get a valid response with no headers and no body! if (xhr.getAllResponseHeaders() === "" && !xhr.response && !xhr.responseText) { if (onload) { onload(null, 0); } } else { if (xhrStatus === 200 || xhrStatus === 0) { var data; if (xhr.responseType === "blob" || xhr.responseType === "arraybuffer") { data = xhr.response; } else if ((<any>xhr).mozResponseArrayBuffer) { data = (<any>xhr).mozResponseArrayBuffer; } else { /* tslint:disable:no-bitwise */ var text = xhr.responseText; var numChars = text.length; data = []; data.length = numChars; for (var i = 0; i < numChars; i += 1) { data[i] = (text.charCodeAt(i) & 0xff); } /* tslint:enable:no-bitwise */ } debug.assert(data); sound._initializeFromData(data, extension, onload); } else if (onload) { onload(null, xhrStatus); } } xhr.onreadystatechange = null; xhr = null; } } }; xhr.open('GET', soundPath, true); if (typeof xhr.responseType === "string" || (xhr.hasOwnProperty && xhr.hasOwnProperty("responseType"))) { xhr.responseType = "arraybuffer"; } else if (xhr.overrideMimeType) { xhr.overrideMimeType("text/plain; charset=x-user-defined"); } else { xhr.setRequestHeader("Content-Type", "text/plain; charset=x-user-defined"); } xhr.send(); } return sound; } else { if (data) { numSamples = data.length; numChannels = (params.channels || 1); samplerRate = params.frequency; sound.data = data; sound.frequency = samplerRate; sound.channels = numChannels; sound.bitrate = (samplerRate * numChannels * 2 * 8); sound.length = (numSamples / (samplerRate * numChannels)); if (onload) { onload(sound, 200); } return sound; } } } if (onload) { onload(null, undefined); } return null; } } // // WebGLSoundGlobalSource // class WebGLSoundGlobalSource implements SoundGlobalSource { /* tslint:disable:no-unused-variable */ static version = 1; /* tslint:enable:no-unused-variable */ // SoundGlobalSource gain : number; looping : boolean; pitch : number; playing : boolean; paused : boolean; tell : number; // WebGLSoundGlobalSource _gain : number; _looping : boolean; _pitch : number; sd: WebGLSoundDevice; id: number; sound: WebGLSound; audioContext: any; // window.AudioContext || window.webkitAudioContext bufferNode: any; // window.AudioContext.createbufferSource() mediaNode: any; // window.AudioContext.createMediaElementSource() playStart: number; playPaused: number; _gainNode: any; // window.AudioContext.createGain() audio: HTMLAudioElement; updateAudioVolume: { (): void; }; loopAudio: { (): void; }; // Public API play(sound: Sound, seek?: number) { if (seek === undefined) { seek = 0; } if (this.sound === sound) { return this.seek(seek); } if (this.playing) { this._stop(); } this.sound = <WebGLSound>sound; var audioContext = this.audioContext; if ((<WebGLSound>sound).buffer) { var bufferNode = this._createBufferNode(<WebGLSound>sound); if (0 < seek) { var buffer = (<WebGLSound>sound).buffer; if (bufferNode.loop) { bufferNode.start(0, seek, buffer.duration); } else { bufferNode.start(0, seek, (buffer.duration - seek)); } this.playStart = (audioContext.currentTime - seek); } else { bufferNode.start(0); this.playStart = audioContext.currentTime; } } else { var audioItem: WebGLAudioPoolItem = this.sd._allocateAudioElement(); var audio = audioItem.audio; if ((<WebGLSound>sound).data) { audio.mozSetup(sound.channels, sound.frequency); } this.audio = audio; audio.loop = this._looping; audio.addEventListener('ended', this.loopAudio, false); var mediaNode = audioItem.mediaNode; if (mediaNode) { mediaNode.connect(this._gainNode); } this.mediaNode = mediaNode; if (this.updateAudioVolume) { this.updateAudioVolume(); } if ((<WebGLSound>sound).data) { (<any>audio).mozWriteAudio((<WebGLSound>sound).data); } else { audio.src = (<WebGLSound>sound).url; audio.play(); if (sound.length === 0) { var checkLoaded = function checkLoadedFn() { if (3 <= audio.readyState) { WebGLSound.audioLoaded(<WebGLSound>sound, audio); return true; } return false; }; this.sd.addLoadingSound(checkLoaded); } } if (0.05 < seek) { try { audio.currentTime = seek; } catch (e) { // It seems there is no reliable way of seeking } } } this.playing = true; this.paused = false; this.sd.addPlayingSource(this); return true; } _stop(): void { this.playing = false; this.paused = false; this.sound = null; var audio = this.audio; if (audio) { audio.removeEventListener('ended', this.loopAudio, false); this.sd._releaseAudioElement(this.audio, this.mediaNode); this.audio = null; this.mediaNode = null; } else { var bufferNode = this.bufferNode; if (bufferNode) { this.bufferNode = null; bufferNode.stop(0); bufferNode.disconnect(); } } } stop(): boolean { var playing = this.playing; if (playing) { this._stop(); this.sd.removePlayingSource(this); } return playing; } pause() { if (this.playing) { if (!this.paused) { this.paused = true; var audio = this.audio; if (audio) { audio.pause(); } else { var bufferNode = this.bufferNode; if (bufferNode) { this.bufferNode = null; this.playPaused = this.audioContext.currentTime; bufferNode.stop(0); bufferNode.disconnect(); } } this.sd.removePlayingSource(this); } return true; } return false; } resume(seek?: number) { if (this.paused) { this.paused = false; var audio = this.audio; if (audio) { if (seek !== undefined) { if (0.05 < Math.abs(audio.currentTime - seek)) { try { audio.currentTime = seek; } catch (e) { // It seems there is no reliable way of seeking } } } audio.play(); } else { var audioContext = this.audioContext; if (audioContext) { if (seek === undefined) { seek = (this.playPaused - this.playStart); } var bufferNode = this._createBufferNode(this.sound); if (0 < seek) { var buffer = this.sound.buffer; if (bufferNode.loop) { bufferNode.start(0, seek, buffer.duration); } else { bufferNode.start(0, seek, (buffer.duration - seek)); } this.playStart = (audioContext.currentTime - seek); } else { bufferNode.start(0); this.playStart = audioContext.currentTime; } } } this.sd.addPlayingSource(this); return true; } return false; } rewind() { if (this.playing) { var audio = this.audio; if (audio) { audio.currentTime = 0; return true; } else { var audioContext = this.audioContext; if (audioContext) { var bufferNode = this.bufferNode; if (bufferNode) { bufferNode.stop(0); bufferNode.disconnect(); } bufferNode = this._createBufferNode(this.sound); bufferNode.start(0); this.playStart = audioContext.currentTime; return true; } } } return false; } seek(seek: number): boolean { if (this.playing) { var tell = this.tell; var delta = Math.abs(tell - seek); if (this._looping) { delta = Math.min(Math.abs(tell - (this.sound.length + seek)), delta); } if (0.05 < delta) { var audio = this.audio; if (audio) { try { audio.currentTime = seek; } catch (e) { // It seems there is no reliable way of seeking } } else { var audioContext = this.audioContext; if (audioContext) { var bufferNode = this.bufferNode; if (bufferNode) { bufferNode.stop(0); bufferNode.disconnect(); } bufferNode = this._createBufferNode(this.sound); if (0 < seek) { var buffer = this.sound.buffer; if (bufferNode.loop) { bufferNode.start(0, seek, buffer.duration); } else { bufferNode.start(0, seek, (buffer.duration - seek)); } this.playStart = (audioContext.currentTime - seek); } else { bufferNode.start(0); this.playStart = audioContext.currentTime; } } } } return true; } return false; } clear() { this.stop(); } setAuxiliarySendFilter(index: number, effectSlot: SoundEffectSlot, filter: SoundFilter) { return false; } setDirectFilter(filter: SoundFilter) { return false; } destroy() { this.stop(); var gainNode = this._gainNode; if (gainNode) { this._gainNode = null; gainNode.disconnect(); } } _createBufferNode(sound: WebGLSound): any { var buffer = sound.buffer; var bufferNode = this.audioContext.createBufferSource(); bufferNode.buffer = buffer; bufferNode.loop = this._looping; if (bufferNode.playbackRate) { bufferNode.playbackRate.value = this._pitch; } bufferNode.connect(this._gainNode); // Backwards compatibility if (!bufferNode.start) { bufferNode.start = function audioStart(when, offset, duration) { if (arguments.length <= 1) { this.noteOn(when); } else { this.noteGrainOn(when, offset, duration); } }; } if (!bufferNode.stop) { bufferNode.stop = function audioStop(when) { this.noteOff(when); }; } this.bufferNode = bufferNode; return bufferNode; } _checkBufferNode(currentTime: number): boolean { var bufferNode = this.bufferNode; if (bufferNode) { var tell = (currentTime - this.playStart); var duration = bufferNode.buffer.duration; if (duration < tell) { if (this._looping) { this.playStart = (currentTime - (tell - duration)); } else { bufferNode.disconnect(); this.playing = false; this.sound = null; this.bufferNode = null; return false; } } } return true; } static create(sd: WebGLSoundDevice, id: number, params: SoundGlobalSourceParameters): WebGLSoundGlobalSource { var source = new WebGLSoundGlobalSource(); source.sd = sd; source.id = id; source.sound = null; source.audio = null; source.playing = false; source.paused = false; source._gain = (typeof params.gain === "number" ? params.gain : 1); source._looping = (params.looping || false); source._pitch = (params.pitch || 1); var audioContext = sd.audioContext; if (audioContext) { source.bufferNode = null; source.mediaNode = null; source.playStart = -1; source.playPaused = -1; var masterGainNode = sd._gainNode; var gainNode = (audioContext.createGain ? audioContext.createGain() : audioContext.createGainNode()); gainNode.gain.value = source._gain; source._gainNode = gainNode; gainNode.connect(masterGainNode); source.loopAudio = function loopAudioFn() { source.stop(); }; } else { source.updateAudioVolume = function updateAudioVolumeFn() { var audio = this.audio; if (audio) { var volume = Math.min(this._gain, 1); audio.volume = volume; if (0 >= volume) { audio.muted = true; } else { audio.muted = false; } } }; if (sd.loopingSupported) { source.loopAudio = function loopAudioFn() { source.stop(); }; } else { source.looping = source._looping; source.loopAudio = function loopAudioFn() { var audio = source.audio; if (audio) { if (this.looping) { audio.currentTime = 0; audio.play(); } else { source.stop(); } } }; } } return source; } } // // WebGLSoundSource // class WebGLSoundSource extends WebGLSoundGlobalSource implements SoundSource { /* tslint:disable:no-unused-variable */ static version = 1; /* tslint:enable:no-unused-variable */ // SoundSource position : any; // v3 velocity : any; // v3 direction : any; // v3 minDistance : number; maxDistance : number; rollOff : number; relative : boolean; // WebGLSoundSource _position: any; // v3 _velocity: any; // v3 _direction: any; // v3 _pannerNode: any; // window.AudioContext.createPanner() _gainFactor: number; updateRelativePosition: { (lp0: number, lp1: number, lp2: number): void; }; _updateRelativePositionWebAudio(listenerPosition0, listenerPosition1, listenerPosition2) { var position = this._position; this._pannerNode.setPosition(position[0] + listenerPosition0, position[1] + listenerPosition1, position[2] + listenerPosition2); } _updateRelativePositionHTML5(listenerPosition0, listenerPosition1, listenerPosition2) { // Change volume depending on distance to listener var minDistance = this.minDistance; var maxDistance = this.maxDistance; var position = this._position; var position0 = position[0]; var position1 = position[1]; var position2 = position[2]; var distanceSq; if (this.relative) { distanceSq = ((position0 * position0) + (position1 * position1) + (position2 * position2)); } else { var delta0 = (listenerPosition0 - position0); var delta1 = (listenerPosition1 - position1); var delta2 = (listenerPosition2 - position2); distanceSq = ((delta0 * delta0) + (delta1 * delta1) + (delta2 * delta2)); } var gainFactor; if (distanceSq <= (minDistance * minDistance)) { gainFactor = 1; } else if (distanceSq >= (maxDistance * maxDistance)) { gainFactor = 0; } else { var distance = Math.sqrt(distanceSq); if (this.sd.linearDistance) { gainFactor = ((maxDistance - distance) / (maxDistance - minDistance)); } else { gainFactor = minDistance / (minDistance + (this.rollOff * (distance - minDistance))); } } gainFactor *= this.sd.listenerGain; if (this._gainFactor !== gainFactor) { this._gainFactor = gainFactor; this.updateAudioVolume(); } } // Public API destroy() { this.stop(); var gainNode = this._gainNode; if (gainNode) { this._gainNode = null; gainNode.disconnect(); } var pannerNode = this._pannerNode; if (pannerNode) { this._pannerNode = null; pannerNode.disconnect(); } } static create(sd: WebGLSoundDevice, id: number, params: SoundSourceParameters): WebGLSoundSource { var source = new WebGLSoundSource(); source.sd = sd; source.id = id; source.sound = null; source.audio = null; source.playing = false; source.paused = false; var buffer = new Float32Array(9); source._position = buffer.subarray(0, 3); source._velocity = buffer.subarray(3, 6); source._direction = buffer.subarray(6, 9); source._gain = (typeof params.gain === "number" ? params.gain : 1); source._looping = (params.looping || false); source._pitch = (params.pitch || 1); var audioContext = sd.audioContext; if (audioContext) { source.bufferNode = null; source.mediaNode = null; source.playStart = -1; source.playPaused = -1; var masterGainNode = sd._gainNode; var pannerNode = audioContext.createPanner(); source._pannerNode = pannerNode; pannerNode.connect(masterGainNode); var gainNode = (audioContext.createGain ? audioContext.createGain() : audioContext.createGainNode()); gainNode.gain.value = source._gain; source._gainNode = gainNode; gainNode.connect(pannerNode); if (sd.linearDistance) { if (typeof pannerNode.distanceModel === "string") { pannerNode.distanceModel = "linear"; } else if (typeof pannerNode.LINEAR_DISTANCE === "number") { pannerNode.distanceModel = pannerNode.LINEAR_DISTANCE; } } if (typeof pannerNode.panningModel === "string") { pannerNode.panningModel = "equalpower"; } else { pannerNode.panningModel = pannerNode.EQUALPOWER; } source.updateRelativePosition = source._updateRelativePositionWebAudio; source.loopAudio = function loopAudioFn() { source.stop(); }; } else { source._gainFactor = 1; source.updateAudioVolume = function updateAudioVolumeFn() { var audio = this.audio; if (audio) { var volume = Math.min((this._gainFactor * this._gain), 1); audio.volume = volume; if (0 >= volume) { audio.muted = true; } else { audio.muted = false; } } }; source.updateRelativePosition = source._updateRelativePositionHTML5; if (sd.loopingSupported) { source.loopAudio = function loopAudioFn() { source.stop(); }; } else { source.looping = source._looping; source.loopAudio = function loopAudioFn() { var audio = source.audio; if (audio) { if (this.looping) { audio.currentTime = 0; audio.play(); } else { source.stop(); } } }; } } source.relative = (params.relative || false); source.minDistance = (params.minDistance || 1); source.maxDistance = (params.maxDistance || 3.402823466e+38); source.rollOff = (params.rollOff || 1); if (params.position) { source.position = params.position; } if (params.velocity) { source.velocity = params.velocity; } if (params.direction) { source.direction = params.direction; } return source; } } interface WebGLSoundDeviceExtensions { ogg: boolean; mp3: boolean; mp4: boolean; m4a: boolean; aac: boolean; wav: boolean; } interface WebGLAudioPoolItem { audio: HTMLAudioElement; mediaNode: any; // window.AudioContext.createMediaElementSource() } // // WebGLSoundDevice // class WebGLSoundDevice implements SoundDevice { /* tslint:disable:no-unused-variable */ static version = 1; /* tslint:enable:no-unused-variable */ // SoundDevice vendor : string; // prototype renderer : string; /* tslint:disable:no-duplicate-variable */ version : string; /* tslint:enable:no-duplicate-variable */ deviceSpecifier : string; extensions : string; listenerTransform : any; // m43 listenerVelocity : any; // v3 listenerGain : number; frequency : number; dopplerFactor : number; dopplerVelocity : number; speedOfSound : number; alcVersion : string; alcExtensions : string; alcEfxVersion : string; alcMaxAuxiliarySends : number; // WebGLSoundDevice _listenerTransform : any; // m43 _listenerVelocity : any; // v3 audioContext : any; // _gainNode : any; // linearDistance : boolean; loadingSounds : WebGLSoundDeviceSoundCheckCall[]; loadingInterval : number; // window.setIntervalID id numPlayingSources : number; playingSources : WebGLSoundSource[]; playingSourcesMap : WebGLSoundDeviceSourceMap; lastSourceID : number; loopingSupported : boolean; supportedExtensions : WebGLSoundDeviceExtensions; _audioPool: WebGLAudioPoolItem[]; update: { (): void; }; // Public API createSource(params: SoundSourceParameters): SoundSource { this.lastSourceID += 1; return WebGLSoundSource.create(this, this.lastSourceID, params); } createGlobalSource(params: SoundGlobalSourceParameters): SoundGlobalSource { this.lastSourceID += 1; return WebGLSoundGlobalSource.create(this, this.lastSourceID, params); } createSound(params: SoundParameters): Sound { return WebGLSound.create(this, params); } loadSoundsArchive(params: SoundArchiveParameters): boolean { var src = params.src; if (typeof SoundTARLoader !== 'undefined') { SoundTARLoader.create({ sd: this, src : src, decodearchive: params.decodearchive, decodesound: params.decodesound, uncompress : params.uncompress, onsoundload : function tarSoundLoadedFn(sound) { params.onsoundload(sound); }, onload : function soundTarLoadedFn(success, status) { if (params.onload) { params.onload(success, status); } }, onerror : function soundTarFailedFn(status) { if (params.onload) { params.onload(false, status); } } }); return true; } else { (<WebGLTurbulenzEngine>TurbulenzEngine).callOnError( 'Missing archive loader required for ' + src); return false; } } createEffect(params: SoundEffectParameters): SoundEffect { return null; } createEffectSlot(params: SoundEffectSlotParameters): SoundEffectSlot { return null; } createFilter(params: SoundFilterParameters): SoundFilter { return null; } _updateHTML5(): void { var listenerTransform = this._listenerTransform; var listenerPosition0 = listenerTransform[9]; var listenerPosition1 = listenerTransform[10]; var listenerPosition2 = listenerTransform[11]; var numPlayingSources = this.numPlayingSources; var playingSources = this.playingSources; var n; for (n = 0; n < numPlayingSources; n += 1) { var source = playingSources[n]; if (source.updateRelativePosition) { source.updateRelativePosition(listenerPosition0, listenerPosition1, listenerPosition2); } } } _updateWebAudio(): void { this._gainNode.gain.value = this.listenerGain; var listenerTransform = this._listenerTransform; var listenerPosition0 = listenerTransform[9]; var listenerPosition1 = listenerTransform[10]; var listenerPosition2 = listenerTransform[11]; var numPlayingSources = this.numPlayingSources; var playingSources = this.playingSources; var playingSourcesMap = this.playingSourcesMap; var currentTime = this.audioContext.currentTime; var n = 0; while (n < numPlayingSources) { var source = playingSources[n]; if (!source._checkBufferNode(currentTime)) { numPlayingSources -= 1; playingSources[n] = playingSources[numPlayingSources]; playingSources[numPlayingSources] = null; delete playingSourcesMap[source.id]; continue; } if (source.relative) { source.updateRelativePosition(listenerPosition0, listenerPosition1, listenerPosition2); } n += 1; } this.numPlayingSources = numPlayingSources; /* tslint:disable:no-bitwise */ if (numPlayingSources < (playingSources.length >> 1)) { playingSources.length = numPlayingSources; } /* tslint:enable:no-bitwise */ } isSupported(name): boolean { if ("FILEFORMAT_OGG" === name) { return this.supportedExtensions.ogg; } else if ("FILEFORMAT_MP3" === name) { return this.supportedExtensions.mp3; } else if ("FILEFORMAT_M4A" === name || "FILEFORMAT_MP4" === name) { return this.supportedExtensions.m4a; } else if ("FILEFORMAT_AAC" === name) { return this.supportedExtensions.aac; } else if ("FILEFORMAT_WAV" === name) { return this.supportedExtensions.wav; } return false; } // Private API addLoadingSound(soundCheckCall): void { var loadingSounds = this.loadingSounds; loadingSounds[loadingSounds.length] = soundCheckCall; var loadingInterval = this.loadingInterval; var that = this; if (loadingInterval === null) { this.loadingInterval = loadingInterval = window.setInterval(function checkLoadingSources() { var numLoadingSounds = loadingSounds.length; var n = 0; do { var soundCheck = loadingSounds[n]; if (soundCheck()) { numLoadingSounds -= 1; if (n < numLoadingSounds) { loadingSounds[n] = loadingSounds[numLoadingSounds]; } loadingSounds.length = numLoadingSounds; } else { n += 1; } } while (n < numLoadingSounds); if (numLoadingSounds === 0) { window.clearInterval(loadingInterval); that.loadingInterval = null; } }, 100); } } addPlayingSource(source) { var id = source.id; if (!this.playingSourcesMap[id]) { this.playingSourcesMap[id] = true; var numPlayingSources = this.numPlayingSources; this.playingSources[numPlayingSources] = source; this.numPlayingSources = (numPlayingSources + 1); } } removePlayingSource(source) { delete this.playingSourcesMap[source.id]; var numPlayingSources = this.numPlayingSources; var playingSources = this.playingSources; var n; for (n = 0; n < numPlayingSources; n += 1) { if (playingSources[n] === source) { numPlayingSources -= 1; playingSources[n] = playingSources[numPlayingSources]; playingSources[numPlayingSources] = null; this.numPlayingSources = numPlayingSources; break; } } } isResourceSupported(soundPath) { var extension = soundPath.slice(-3).toLowerCase(); return this.supportedExtensions[extension]; } _allocateAudioElement(): WebGLAudioPoolItem { if (this._audioPool.length) { return this._audioPool.pop(); } else { var audio = new Audio(); audio.preload = 'auto'; audio.autobuffer = true; var mediaNode = (this.audioContext ? this.audioContext.createMediaElementSource(audio) : null); return <WebGLAudioPoolItem>{ audio: audio, mediaNode: mediaNode }; } } _releaseAudioElement(audio: HTMLAudioElement, mediaNode: any): void { if (mediaNode) { mediaNode.disconnect(); } audio.pause(); //audio.src = ""; if (this._audioPool.length < 8) { this._audioPool.push(<WebGLAudioPoolItem>{ audio: audio, mediaNode: mediaNode }); } } destroy() { var loadingInterval = this.loadingInterval; if (loadingInterval !== null) { window.clearInterval(loadingInterval); this.loadingInterval = null; } var loadingSounds = this.loadingSounds; if (loadingSounds) { loadingSounds.length = 0; this.loadingSounds = null; } var numPlayingSources = this.numPlayingSources; var playingSources = this.playingSources; var n; for (n = 0; n < numPlayingSources; n += 1) { playingSources[n]._stop(); } this.numPlayingSources = 0; this.playingSources = null; this.playingSourcesMap = null; WebGLSound.prototype.audioContext = null; WebGLSoundSource.prototype.audioContext = null; WebGLSoundGlobalSource.prototype.audioContext = null; } static create(params: SoundDeviceParameters): WebGLSoundDevice { var sd = new WebGLSoundDevice(); sd.extensions = ''; sd.renderer = 'HTML5 Audio'; sd.alcVersion = "0"; sd.alcExtensions = ''; sd.alcEfxVersion = "0"; sd.alcMaxAuxiliarySends = 0; sd.deviceSpecifier = (params.deviceSpecifier || null); sd.frequency = (params.frequency || 44100); sd.dopplerFactor = (params.dopplerFactor || 1); sd.dopplerVelocity = (params.dopplerVelocity || 1); sd.speedOfSound = (params.speedOfSound || 343.29998779296875); sd.linearDistance = (params.linearDistance !== undefined ? params.linearDistance : true); sd.loadingSounds = []; sd.loadingInterval = null; sd.numPlayingSources = 0; sd.playingSources = []; sd.playingSourcesMap = <WebGLSoundDeviceSourceMap><any>{}; sd.lastSourceID = 0; var AudioContextConstructor; if (sd.deviceSpecifier !== "audioelement") { AudioContextConstructor = (window.AudioContext || window.webkitAudioContext); } var listener = null; if (AudioContextConstructor) { var audioContext; try { audioContext = new AudioContextConstructor(); } catch (error) { (<WebGLTurbulenzEngine>TurbulenzEngine).callOnError( 'Failed to create AudioContext:' + error); return null; } if (audioContext.sampleRate === 0) { return null; } WebGLSound.prototype.forceUncompress = !audioContext.createMediaElementSource; WebGLSound.prototype.audioContext = audioContext; WebGLSoundSource.prototype.audioContext = audioContext; WebGLSoundGlobalSource.prototype.audioContext = audioContext; sd.renderer = 'WebAudio'; sd.audioContext = audioContext; sd.frequency = audioContext.sampleRate; sd._gainNode = (audioContext.createGain ? audioContext.createGain() : audioContext.createGainNode()); sd._gainNode.connect(audioContext.destination); listener = audioContext.listener; listener.dopplerFactor = sd.dopplerFactor; listener.speedOfSound = sd.speedOfSound; sd.update = sd._updateWebAudio; // Set setters and getters WebGLSoundGlobalSource Object.defineProperty(WebGLSoundGlobalSource.prototype, "gain", { get : function getGainFn() { return this._gain; }, set : function setGainFn(newGain) { if (this._gain !== newGain) { this._gain = newGain; this._gainNode.gain.value = newGain; } }, enumerable : true, configurable : false }); Object.defineProperty(WebGLSoundGlobalSource.prototype, "pitch", { get : function getPitchFn() { return this._pitch; }, set : function setPitchFn(newPitch) { this._pitch = newPitch; var audio = this.audio; if (audio) { audio.playbackRate = newPitch; } else { var bufferNode = this.bufferNode; if (bufferNode) { if (bufferNode.playbackRate) { bufferNode.playbackRate.value = newPitch; } } } }, enumerable : true, configurable : false }); Object.defineProperty(WebGLSoundGlobalSource.prototype, "tell", { get : function tellFn() { if (this.playing) { var audio = this.audio; if (audio) { return audio.currentTime; } else { if (this.paused) { return (this.playPaused - this.playStart); } else { return (audioContext.currentTime - this.playStart); } } } else { return 0; } }, enumerable : true, configurable : false }); Object.defineProperty(WebGLSoundGlobalSource.prototype, "looping", { get : function getLoopingFn() { return this._looping; }, set : function setLoopingFn(newLooping) { this._looping = newLooping; var audio = this.audio; if (audio) { audio.loop = newLooping; } else { var bufferNode = this.bufferNode; if (bufferNode) { bufferNode.loop = newLooping; } } }, enumerable : true, configurable : false }); // Set setters and getters WebGLSoundSource Object.defineProperty(WebGLSoundSource.prototype, "position", { get : function getPositionFn() { return this._position.slice(); }, set : function setPositionFn(newPosition) { var oldPosition = this._position; if (oldPosition[0] !== newPosition[0] || oldPosition[1] !== newPosition[1] || oldPosition[2] !== newPosition[2]) { oldPosition[0] = newPosition[0]; oldPosition[1] = newPosition[1]; oldPosition[2] = newPosition[2]; if (!this.relative) { this._pannerNode.setPosition(newPosition[0], newPosition[1], newPosition[2]); } } }, enumerable : true, configurable : false }); Object.defineProperty(WebGLSoundSource.prototype, "direction", { get : function getDirectionFn() { return this._direction.slice(); }, set : function setDirectionFn(newDirection) { this._direction = VMath.v3Copy(newDirection, this._direction); this._pannerNode.setOrientation(newDirection[0], newDirection[1], newDirection[2]); }, enumerable : true, configurable : false }); Object.defineProperty(WebGLSoundSource.prototype, "velocity", { get : function getVelocityFn() { return this._velocity.slice(); }, set : function setVelocityFn(newVelocity) { this._velocity = VMath.v3Copy(newVelocity, this._velocity); this._pannerNode.setVelocity(newVelocity[0], newVelocity[1], newVelocity[2]); }, enumerable : true, configurable : false }); Object.defineProperty(WebGLSoundSource.prototype, "minDistance", { get : function getMinDistanceFn() { return this._pannerNode.refDistance; }, set : function setMinDistanceFn(minDistance) { if (this._pannerNode.maxDistance === minDistance) { minDistance = this._pannerNode.maxDistance * 0.999; } this._pannerNode.refDistance = minDistance; }, enumerable : true, configurable : false }); Object.defineProperty(WebGLSoundSource.prototype, "maxDistance", { get : function getMaxDistanceFn() { return this._pannerNode.maxDistance; }, set : function setMaxDistanceFn(maxDistance) { if (this._pannerNode.refDistance === maxDistance) { maxDistance = this._pannerNode.refDistance * 1.001; } this._pannerNode.maxDistance = maxDistance; }, enumerable : true, configurable : false }); Object.defineProperty(WebGLSoundSource.prototype, "rollOff", { get : function getRolloffFactorFn() { return this._pannerNode.rolloffFactor; }, set : function setRolloffFactorFn(rollOff) { this._pannerNode.rolloffFactor = rollOff; }, enumerable : true, configurable : false }); } else { sd.update = sd._updateHTML5; WebGLSound.prototype.forceUncompress = false; // Set setters and getters WebGLSoundGlobalSource Object.defineProperty(WebGLSoundGlobalSource.prototype, "gain", { get : function getGainFn() { return this._gain; }, set : function setGainFn(newGain) { if (this._gain !== newGain) { this._gain = newGain; this.updateAudioVolume(); } }, enumerable : true, configurable : false }); Object.defineProperty(WebGLSoundGlobalSource.prototype, "pitch", { get : function getPitchFn() { return this._pitch; }, set : function setPitchFn(newPitch) { this._pitch = newPitch; var audio = this.audio; if (audio) { audio.playbackRate = newPitch; } }, enumerable : true, configurable : false }); Object.defineProperty(WebGLSoundGlobalSource.prototype, "tell", { get : function tellFn() { if (this.playing) { var audio = this.audio; if (audio) { return audio.currentTime; } } return 0; }, enumerable : true, configurable : false }); // Set setters and getters WebGLSoundSource Object.defineProperty(WebGLSoundSource.prototype, "position", { get : function getPositionFn() { return this._position.slice(); }, set : function setPositionFn(newPosition) { this._position = VMath.v3Copy(newPosition, this._position); }, enumerable : true, configurable : false }); Object.defineProperty(WebGLSoundSource.prototype, "direction", { get : function getDirectionFn() { return this._direction.slice(); }, set : function setDirectionFn(newDirection) { this._direction = VMath.v3Copy(newDirection, this._direction); }, enumerable : true, configurable : false }); Object.defineProperty(WebGLSoundSource.prototype, "velocity", { get : function getVelocityFn() { return this._velocity.slice(); }, set : function setVelocityFn(newVelocity) { this._velocity = VMath.v3Copy(newVelocity, this._velocity); }, enumerable : true, configurable : false }); } sd._listenerTransform = (params.listenerTransform ? VMath.m43Copy(params.listenerTransform) : VMath.m43BuildIdentity()); sd._listenerVelocity = (params.listenerVelocity ? VMath.v3Copy(params.listenerVelocity) : VMath.v3BuildZero()); Object.defineProperty(sd, "listenerTransform", { get : function getListenerTransformFn() { return this._listenerTransform.slice(); }, set : function setListenerTransformFn(transform) { this._listenerTransform = VMath.m43Copy(transform, this._listenerTransform); if (listener) { var position0 = transform[9]; var position1 = transform[10]; var position2 = transform[11]; listener.setPosition(position0, position1, position2); listener.setOrientation(-transform[6], -transform[7], -transform[8], transform[3], transform[4], transform[5]); } }, enumerable : true, configurable : false }); Object.defineProperty(sd, "listenerVelocity", { get : function getListenerVelocityFn() { return this._listenerVelocity.slice(); }, set : function setListenerVelocityFn(velocity) { this._listenerVelocity = VMath.v3Copy(velocity, this._listenerVelocity); if (listener) { listener.setVelocity(velocity[0], velocity[1], velocity[2]); } }, enumerable : true, configurable : false }); sd.listenerGain = (typeof params.listenerGain === "number" ? params.listenerGain : 1); // Need a temporary Audio element to test capabilities var audio; try { audio = new Audio(); } catch (error) { (<WebGLTurbulenzEngine>TurbulenzEngine).callOnError( 'Failed to create Audio:' + error); return null; } if (sd.audioContext) { sd.loopingSupported = true; } else { if (audio.mozSetup) { try { audio.mozSetup(1, 22050); } catch (e) { return null; } } // Check for looping support sd.loopingSupported = (typeof audio.loop === 'boolean'); if (sd.loopingSupported) { Object.defineProperty(WebGLSoundGlobalSource.prototype, "looping", { get : function getLoopingFn() { return this._looping; }, set : function setLoopingFn(newLooping) { this._looping = newLooping; var audio = this.audio; if (audio) { audio.loop = newLooping; } }, enumerable : true, configurable : false }); } } // Check for supported extensions var supportedExtensions : WebGLSoundDeviceExtensions = { ogg: false, mp3: false, mp4: false, m4a: false, aac: false, wav: false, }; if (audio.canPlayType('application/ogg')) { supportedExtensions.ogg = true; } if (audio.canPlayType('audio/mp3')) { supportedExtensions.mp3 = true; } if (audio.canPlayType('audio/mp4')) { supportedExtensions.mp4 = true; supportedExtensions.m4a = true; } if (audio.canPlayType('audio/aac')) { supportedExtensions.aac = true; } if (audio.canPlayType('audio/wav')) { supportedExtensions.wav = true; } sd.supportedExtensions = supportedExtensions; sd._audioPool = []; // Reuse audio element if (sd.audioContext) { sd._audioPool.push({ audio: audio, mediaNode: sd.audioContext.createMediaElementSource(audio) }); } else { sd._audioPool.push({ audio: audio, mediaNode: null }); } return sd; } } WebGLSoundDevice.prototype.vendor = "Turbulenz";
the_stack
const SchemaValidatorError = { /** * methods: PropertyFactory.register * The context ‘set’ is only valid for properties that are instances of NamedProperties. If you want to * use a context of ‘set’, make sure your template includes: * Inherits: ‘NamedProperty’ * Or * Inherits: [‘NamedProperty’, …] */ SET_ONLY_NAMED_PROPS: 'SV-001: Only properties that inherit from NamedProperty can have a context of "set". typeid: ', /** * You updated an existing template but the change to the version number was not as expected. * Make sure you updated your version number correctly. You may have violated one of the following rules: * - Adding one or more template attributes is a MINOR change. * - Removing one or more template attributes is a MAJOR change. */ CHANGE_LEVEL_TOO_LOW_1: "SV-002: Template mutation requires a higher version change level: ", /** * methods: PropertyFactory.register * The template version number is not valid. A valid version number should look like: ‘1.0.0’ */ INVALID_VERSION_1: "SV-003: Invalid template version in 'typeid' attribute: ", /** * methods: PropertyFactory.register * Your property template should include a typeid attribute. * @example *```json * { * "typeid": "my.example:point2d-1.0.0", * "properties": [ * {"id": "x", "typeid": "Float64"}, * {"id": "y", "typeid": "Float64"} * ] *} *``` */ MISSING_TYPE_ID: "SV-004: Template is missing the mandatory 'typeid' attribute. This is not a valid template: ", /** * methods: PropertyFactory.register * Typeid should contain a template version number. * @example * “typeid: my.example:point2d-1.0.0” */ MISSING_VERSION: "SV-005: Missing template version in 'typeid' attribute: ", /** * methods: PropertyFactory.register * The template you are using is different from the previous version and you did not update the version number. * If any changes were made to the template, you should update the version number to a higher number. * - Major change: removing one or more attribute (e.g. 1.0.0 -\> 2.0.0) * - Minor change: adding one or more attribute (e.g. 1.0.0 -\> 1.1.0) * - Patch: template description changes (e.g. 1.0.0 -\> 1.0.1) */ MODIFIED_TEMPLATE_1: "SV-006: Template has changed at path: ", /** * methods: PropertyFactory.register * When changing your template, you need to increase its version number. For example, if the previous version * number was 1.0.0, it should increase to 1.0.1 for a patch (if the template description has changed), * to 1.1.0 for a minor change (if you added one or more attributes) or to 2.0.0 for a major change (if you * removed one or more attributes). */ MODIFIED_TEMPLATE_SAME_VERSION_1: "SV-007: Template has changed but its version was not increased. Path: ", /** * methods: PropertyFactory.register * PropertyFactory.register requires a template as a parameter. */ NO_TEMPLATE: "SV-008: Template cannot be null or undefined.", /** * methods: PropertyFactory.register * When updating a template’s version number, the version number can only increase, never decrease. */ VERSION_REGRESSION_1: "SV-009: New template version is older than the previously registered version: ", /** * Your template contains a typeid that is not a string. */ TYPEID_MUST_BE_STRING: "SV-010: typeid must be a string. This is not valid: ", /** * INTERNAL ERROR - If you encounter this error, please contact the development team. * The TemplateValidator constructor should have in its parameters param.inheritsFrom and params.hasSchema. * Neither of them can be undefined. */ MISSING_INHERITSFROM_OR_HASSCHEMA: "SV-011: Internal error: TemplateValidator constructor missing one of inheritsFrom or hasSchema function", /** * Your template has an invalid key for the kind of map it is. */ KEY_MUST_BE_TYPEID: "SV-012: A key of a typeid key map must be a valid typeids. Key: ", /** * Your template has an invalid contextKeyType parameter. */ INVALID_OPTION_NONE_CONSTANTS: "SV-013: A map with typeids as keys must be constant", /** * You tried to use draft as a versoin when it is not enabled. */ DRAFT_AS_VERSION_TYPEID: "SV-014: By default, draft is not a valid version for a typeId. Set allowDraft to true to support this.", }; const TypeidHelperError = { /** * INTERNAL ERROR - If you encounter this error, please contact the development team. */ TYPEID_NOT_DEFINED: "TH-001: extractVersion requires a typeid parameter", }; const PropertyError = { TYPEID_NOT_NATIVE: "TYPEID_NOT_NATIVE", /** * methods: ArrayProperty.length.set, StringProperty.length.set * Cannot directly set the array.length or string.length. This is a read-only property. */ MODIFY_READ_ONLY: "PR-001: Trying to modify read only property value (array.length).", /** * methods: Property.getRelativePath * There is no path between the property and the ancestor you passed in to .getRelativePath. */ NO_PATH_BETWEEN: "PR-002: No path between ", /** * methods: Property.applyChangeSet * One of the paths included in your changeSet is not valid for this property. * Check that the modifications in the changeSet match the structure of the template. */ INVALID_PATH: "PR-003: Invalid path in ChangeSet: ", /** * methods: NodeProperty.insert * The property you inserted into a NodeProperty has a parent. * If your property has a parent, changing the property’s id will break the parent. * Make the change to the parent first. */ ID_CHANGE_FOR_PROPERTY_WITH_PARENT: "PR-004: Cannot change the ID of a property that has a parent. Could not change id: ", /** * methods: Property.resolvePath * Part of the path entered to Property.resolvePath was not valid. */ INVALID_PATH_TOKEN: "PR-005: ResolvePath error: accessed a child via an invalid path syntax: ", /** * methods: MapProperty.insert, NodeProperty.insert, ReferenceMapProperty.insert, Workspace.insert, * SetProperty.insert * Your map, set or nodeproperty already contains an entry under in_key. */ PROPERTY_ALREADY_EXISTS: "PR-006: The property already exists in this collection: ", /** * methods: MapProperty.applyChangeset, NodeProperty.applyChangeset, SetProperty.applyChangeset * The changeset you applied contains properties that have already been inserted. * Check the ‘insert’ fields in your changeSet for properties that might already exist. */ INSERTED_EXISTING_ENTRY: "PR-007: Inserted an already existing entry: ", /** * methods: MapProperty.remove, SetProperty.remove * Tried to remove an entry that does not exist. * This can be caused indirectly by deserialize / applyChangeset methods. * One of the ‘remove’ fields in your changeSet must contain a property that does not exist. */ REMOVED_NON_EXISTING_ENTRY: "PR-008: Trying to remove a non-existing entry: ", /** * methods: MapProperty.applyChangeset, NodeProperty.applyChangeset, SetProperty.applyChangeset * One of the key you are trying to modify in your changeSet does not exist. * One of the ‘modify’ fields in your changeSet refers to a property that does not exist. */ MODIFY_NON_EXISTING_ENTRY: "PR-009: Trying to modify a not existing entry: ", /** * methods: MapProperty.insert, MapProperty.set, SetProperty.insert, NodeProperty.insert * The value you are trying to insert in your map property, set property or node property has a parent. * You cannot insert a property that has a parent. */ INSERTED_ENTRY_WITH_PARENT: "PR-010: Trying to insert into a collection a property that already has " + "a parent.", /** * methods: Property.resolvePath, Workspace.resolvePath * Paths should not contain empty sections such as ‘..’, ‘//’ or ‘[], etc. * Sections in the path are delimited by ‘.’ ‘[ ]’, ‘/’ . * There should always be a path between any two delimiters. */ EMPTY_TOKEN: "PR-011: ResolvePath error: Encountered empty token in path: ", /** * methods: Property.resolvePath, Workspace.resolvePath * Paths should not contain quotes except at the beginning and end of the path. * For example: resolvePath('my"path.nested".other') is not valid because the first quote is in front of ‘path’ * but after the ‘.nested’ which is part of the subsequent path. */ QUOTES_WITHIN_TOKEN: "PR-012: ResolvePath error: Quotes must only be at the start and the " + "end of a path. Error in path: ", /** * methods: Property.resolvePath, Workspace.resolvePath * Paths should not end with a ‘.’ */ DOT_AT_END: "PR-013: ResolvePath error: Encountered a dot at the end of path: ", /** * methods: Property.resolvePath, Workspace.resolvePath * Paths using square brackets should not contain ‘.’ within those square brackets. * If your path contains any dots, these should be escaped e.g. [my\.path] instead of [my.path]. */ DOTS_IN_SQUARE_BRACKETS: "PR-014: ResolvePath error: Encountered a dot within a square bracket. " + "These have to be escaped. Error in path: ", /** * methods: Property.resolvePath, Workspace.resolvePath * resolvePath error while parsing your string. * It encountered an end to a path segment that was not * followed by a “.” or a “[“ indicating the beginning of a new segment. */ MISSING_DOT_AT_SEGMENT_START: "PR-015: Missing . or [ at segment start in path: ", /** * methods: Property.resolvePath, Workspace.resolvePath * Closing square bracket not followed by the correct character (., [ or *). * For example, this is not valid: resolvePath(myArray[2]nested). * This is valid: resolvePath(myArray[2].nested). */ INVALID_END_OF_SQUARE_BRACKETS: "PR-016: ResolvePath error: Square brackets have to be followed either " + 'by "." or by "[" or by "*". Error in path: ', /** * methods: Property.resolvePath, Workspace.resolvePath * Every opening bracket ([) needs a matching closing bracket (]) */ UNCLOSED_BRACKETS: "PR-017: ResolvePath error: Unclosed brackets at the end of path: ", /** * methods: Property.resolvePath, Workspace.resolvePath * Any closing bracket (]) must be preceded by a matching opening bracket ([). */ CLOSING_BRACKET_WITHOUT_OPENING: "PR-018: ResolvePath error: Encountered closing bracket without " + "corresponding opening one in path: ", /** * methods: Property.resolvePath, Workspace.resolvePath */ INVALID_ESCAPE_SEQUENCE: "PR-019: Encountered an invalid escape sequence in path: ", /** * methods: Property.resolvePath, Workspace.resolvePath * A quotation mark at the beginning of a path must have a matching closing quotation mark * at the end of the same path. */ UNCLOSED_QUOTATION_MARKS: "PR-020: ResolvePath error: Encountered unclosed quotation marks in path: ", /** * methods: SetProperty.insert, SetProperty.set, SetProperty.setValues * The property you insert in a setProperty must be an instance of NamedProperty. * When creating the property to be inserted, make sure it inherits from NamedProperty. * @example * #Creating a property that inherits from NamedProperty * ```json * { * typeid:”my.example:myprop-1.0.0”, * inherits:’NamedProperty’ (or [‘NamedProperty’, …] * ... * } *``` */ CANT_INSERT_NON_NAMED_PROPERTIES: "PR-021: Set can only contain named properties", /** * methods: NodeProperty.insert, Workspace.insert * The property you inserted does not have an id. * Unless the property is an instance of NamedProperty, you must pass in an id as the first parameter * (and the property second) */ ADDED_CHILD_WITHOUT_ID: "PR-022: Added child without id.", /** * methods: ArrayProperty.resolvePath * Cannot use .resolvePath on a primitive array, only on a Custom type array. * For a primitive array, use .getValue instead. * For example, instead of MyValueArray.resolvePath(1), use MyValueArray.getValue(1) */ NO_PATHS_FOR_NON_PRIMITIVE_ARRAYS: "PR-023: Path resolution is not supported for primitive type arrays.", /** * methods: ArrayProperty.resolvePath * When using an array index as part of your path, it needs to have a numeric value. */ INVALID_NON_NUMERIC_SEGMENT_IN_PATH: "PR-024: ResolvePath error: Accessed an array via an non numeric index: ", /** * methods: ArrayProperty.getRelativePath, ArrayProperty.getAbsolutePath * INTERNAL ERROR * If you encounter this error, please contact the development team. * Part of the path you are trying to find points to a non-existing array item. */ GET_PATH_SEGMENT_CALLED_FOR_NON_ENTRY: "PR-025: Internal error: _getPathSegmentForChildNode has been called " + "for an entry that is not an entry of the collection. ", /** * methods: * The changeSet passed to .deserialize was not a valid non-normalized changeset. */ NO_NORMALIZED_CHANGESET: "PR-026: deserialize was called with a non-normalized ChangeSet.", /** * INTERNAL ERROR - If you encounter this error, please contact the development team. * One of your changesets contained a NamedProperty without a GUID. * This should not happen and should have been validated already. */ MISSING_GUID_IN_NORMALIZED_CHANGESET: "PR-027: Missing GUID in a normalized ChangeSet with named properties", /** * methods: EnumProperty.getEnumString, EnumProperty.setValue, EnumProperty.setEnumByString, * EnumProperty.getEnumString, EnumArrayProperty.getEnumStrings * This Enum Property does not have any entry with that value. * EnumProperty.getEnumByString -\> the EnumProperty you used to pass this function does not have an entry. * EnumProperty.setValue -\> no entry exists for in_value * EnumProperty.setEnumByString -\> no entry exists for in_stringId * EnumArrayProperty.getEnumString -\> the value found at in_position does not correspond to an entry. * EnumArrayProperty.getEnumStrings -\> one of the values found at one of the positions sought does * not correspond to an entry. */ UNKNOWN_ENUM: "PR-028: enum value unknown: ", /** * methods: Property.applyChangeSet * Changeset contains an operation that is unknown. * Valid operations are insert, modify and remove. */ UNKNOWN_OPERATION: "PR-029: Unknown ChangeSet operation: ", /** * methods: Workspace.remove, NodeProperty.remove * The property you passed to workspace.remove or nodeProperty.remove does not exist. * Check that you passed the correct property, and that it has not yet been removed. */ REMOVING_NON_EXISTING_KEY: "PR-033: Trying to remove something that does not exist: ", /** * methods: Workspace.get, Property.get * Workspace.get and Property.get take in an id (string or number) or an array of ids. * @example * ```ts *.get(‘position’).get(‘x’) or .get([‘property’, ‘x’]) * ``` */ STRING_OR_ARRAY_STRINGS: "PR-034: in_id must be a string, a number or an array of these. This is not valid: ", /** * methods: Property.serialize * Property.serialize only takes in one parameter: an options object. That parameter is optional. */ SERIALIZE_TAKES_OBJECT: "PR-035: Argument of serialize() should be an object.", /** * ArrayProperty.insert, ArrayProperty.insertRange * The in_position (for .insert) or in_offset (for .insertRange) should not be smaller than 0 * or larger than the length of the array. */ START_OFFSET_INVALID: "PR-036: ArrayProperty: insert range - Start offset is invalid: ", // PR-037 removed /** * ArrayProperty.remove, ArrayProperty.removeRange, ArrayProperty.pop * INTERNAL ERROR - If you encounter this error, please contact the development team. * The item (or one of the items) you are trying to remove from the array has a parent that is not the array. * This should not happen because you should not have been able to insert the item in the array in the first place. */ CANNOT_REMOVE_WITH_DIFFERENT_PARENT: "PR-038: Internal error: Trying to remove from an array a property that " + "has not the array as parent.", /** * methods ArrayProperty.set, ArrayProperty.setRange * Your first parameter: in_position (for .set) and in_offset (for .setRange) cannot have a negative value. */ START_OFFSET_NEGATIVE: "PR-039: ArrayProperty: Modify range - Start offset cannot be negative: ", /** * methods: ArrayProperty.removeRange, ArrayProperty.setRange, ArrayProperty.insertRange, * ArrayProperty.insert, EnumArrayProperty.getEnumStrings * The parameter needs to be a number. * For .removeRange: in_offset and in_deleteCount * For .setRange: in_offset * For .insertRange: in_offset * For .getEnumStrings: in_offset, in_length * For StringProperty.insert: in_position */ NOT_NUMBER: "PR-049: This parameter must be a number: parameter: ", /** * methods: Property.traverseUp, Property.traverseDown * Property.traverseUp and Property.traverseDown take one parameter: a callback function */ CALLBACK_NOT_FCT: "PR-050: traverseUp / traverseDown parameter: in_callback must " + "be a function.", /** * methods: ArrayProperty.insertRange * Array.insertRange takes two parameters. The second one (in_array) must be an array. * To pass in only one item, either use .insert(index, item) * or put that item into an array: .insertRange(index, [item]) */ IN_ARRAY_NOT_ARRAY: "PR-051: Parameter error: in_array must be an array for method: ", /** * methods: EnumProperty.setEnumByString * EnumProperty.setEnumByString takes one parameter: a string id. It must be a string. */ STRING_ID_MUST_BE_STRING: "PR-052: EnumProperty.setEnumByString parameter: in_stringId must " + "be a string. This is not valid: ", /** * methods: Integer64Property.setValueHigh * Integer64Property.setValueHigh takes one parameter: a number. */ IN_HIGH_MUST_BE_NUMBER: "PR-053: Integer64Property.setValueHigh parameter: in_high must " + "be a number. This is not valid: ", /** * methods: Integer64Property.setValueLow * Integer64Property.setValueLow takes one parameter: a number. */ IN_LOW_MUST_BE_NUMBER: "PR-054: Integer64Property.setValueLow parameter: in_low must " + "be a number. This is not valid: ", /** * methods: IntegerProperty.toString * Integer64Property.toString takes one optional parameter: a number (in_radix). * If no value is passed, will default to 10. */ IN_RADIX_MUST_BE_NUMBER: "PR-055: Integer64Property.toString parameter: in_radix must be a number. This is not valid: ", /** * methods: IntegerProperty.fromString * Integer64Property.fromString takes two parameters. The first parameter (in_string) must be a string. * (the second parameter is in_radix, a number. It is optional: defaults to 10). */ IN_STRING_MUST_BE_STRING: "PR-056: Integer64Property.fromString parameter: in_string must " + "be a string. This is not valid: ", /** * methods: Integer64Property.fromString * Integer64Property.fromString takes two parameters. The second parameter is optional but if passed, * it must be a number between 2 and 36. If not passed, it defaults to 10. (the first parameter is * in_string and must be a string). */ IN_RADIX_BETWEEN_2_36: "PR-057: Integer64Property.fromString parameter: in_radix must be a " + "number between 2 and 36. This is not valid: ", /** * methods MapProperty.insert, ReferenceMapProperty.set * MapProperty.insert and ReferenceMapProperty.set both take two parameters. * The first parameter (in_key) must be a string. */ KEY_NOT_STRING: "PR-058: MapProperty.insert / ReferenceMapProperty.set parameter: " + "in_key must be a string. This is not valid: ", /** * methods: NodeProperty.insert, Workspace.insert * he second parameter (in_property) must be a valid property (it must be an instance of BaseProperty). */ NOT_A_PROPERTY: "PR-059: NodeProperty.insert parameter in_property is not a property. " + "The property you passed is not a valid property.", /** * methods: ReferenceProperty.set / setValue, ReferenceMapProperty.insert / set / setValue / setValues, * ReferenceArrayProperty.enqueue / push / unshift / insert / insertRange / set / setRange / setValue / setValues * The provided value (or values) must be a valid property (an instance of BaseProperty), be undefined or a string (a path). */ PROPERTY_OR_UNDEFINED: "PR-060: Parameter for setting a Reference should be a path to a property, a property or undefined. This is not valid: ", /** * ArrayProperty.insertRange, ArrayProperty.insert, ArrayProperty.push * INTERNAL ERROR - If you encounter this error, please contact the development team. * The item (or one of the items) you are trying to insert in this array has a parent that is not the array. */ NO_INSERT_WITH_PARENT: "PR-088: Internal error: Trying to insert a property into an array or " + "string that already has a parent.", /** * methods: ArrayProperty.get * ArrayProperty.get takes in one parameter: in_position, which can a single position or an array. * If it is a single position, it is the numerical position of the item in the array. * It must be a number or a string that parses into a number (e.g. '2'). * If you pass in an array, the first item of the array must be a number or a string that parses into a number. * (other items in the array are child paths within the array item at this position). * This error happens only when in_position is an array. */ FIRST_ITEM_MUST_BE_NUMBER: "PR-092: The first item in the in_position array must be an array " + "position (a number). This is not valid: ", /** * methods: ArrayProperty.get * ArrayProperty.get takes in one parameter: in_position, which can a single position or an array. * If it is a single position, it is the numerical position of the item in the array. * It must be a number or a string that parses into a number (e.g. '2'). * If you pass in an array, the first item of the array must be a number or a string that parses into a number * (other items in the array are child paths within the array item at this position). * This error happens only when in_position is a single position. */ IN_POSITION_MUST_BE_NUMBER: "PR-093: in_position must be a number. This is not valid: ", /** * methods: Property.getValue * Property.getValue takes one parameter: in_id. * It can either be a string or an array of strings. It cannot be an empty array. */ CANNOT_BE_EMPTY_ARRAY: "PR-095: Property.getValue parameter: in_id cannot be an empty array.", /** * methods: EnumArrayProperty.set * EnumArrayProperty.set takes two parameters: in_index and in_value. * In_value should be a number. setValue cannot be used to set enum by string. * Use .setEnumByString instead. */ VALUE_SHOULD_BE_NUMBER: "PR-096: EnumArrayProperty.set parameter: in_value should be " + "a number. To set the value of an enum string, use .setEnumByString " + "instead.", /** * methods: EnumArrayProperty.setEnumByString * EnumArrayProperty.setEnumByString cannot be use to set enum by number. Use .set instead. */ VALUE_SHOULD_BE_STRING: "PR-098: EnumArrayProperty.setEnumByString parameter: in_value should be " + "a string. To set a number, use .set instead.", /** * methods: ArrayProperty.get * In_position or in_offset is either lower than 0 or higher than the length of the array. * Make sure that the property you are trying to get from the array exists and that the position is correct. */ GET_OUT_OF_RANGE: "PR-106: Trying to access out of bounds at index: ", /** * methods: ArrayProperty.setRange, ArrayProperty.set * setRange: Either in_offset is lower than zero or in_offset + length of in_array is higher than * the length of the array. If you need to add items that were not there before, add those using * .push, .insert or .insertRange. */ SET_OUT_OF_BOUNDS: "PR-107: Trying to set out of bounds. ", /** * methods: ArrayProperty.removeRange, ArrayProperty.remove * RemoveRange: Either in_offset is smaller than zero or in_offset + in_deleteCount is higher than * the length of the array. Make sure that the properties you are trying to remove exist in that * array and that you entered the positions correctly. * Remove: in_offset is either smaller than zero or larger than the length of the array. */ REMOVE_OUT_OF_BOUNDS: "PR-110: Trying to remove out of bounds. ", /** * methods: PropertyFactory.create * INTERNAL ERROR - If you encounter this error, please contact the development team. * Warning: Something went wrong when creating your property: it did not successfully create * the property and then tried to set its value. */ NON_EXISTING_PROPERTY_REPOSITORY_REFERENCE: "PR-111: Internal error: PropertyFactory.create failed to create " + "this property.", /** * methods: Property.setValues * One of the path you used in .setValues leads to a property. * When passing an object to setValues, make sure that all paths lead to a primitive value. */ SET_VALUES_PATH_PROPERTY: "PR-115: trying to set value to a path leading to a property: ", /** * methods: Property.setValues * One of the path in the object you passed to .setValues does not match the structure of this property. */ SET_VALUES_PATH_INVALID: "PR-116: trying to set value to an invalid path: ", /** * methods: Property.setValues * .setValues takes one parameter: an object containing paths to the values to be changed. * It should be an object (or in the case of ArrayProperty, an array) */ SET_VALUES_PARAM_NOT_OBJECT: "PR-117: setValues parameter: in_properties must be an object.", /** * methods: PropertyFactory.create * The array you tried to create had a typeid that was no recognized. * It was not a custom type array or one of the following: ‘String’, ‘Int64’, ‘Uint64’ or ‘Bool’. */ UNKNOWN_ARRAY_TYPEID: "PR-118: Unknown typeid in array: ", /** * methods: MapProperty.insert, MapProperty.set * .insert and .set take two parameters. The first one is in_id (or in_key), which is the id under * which the property is added. It can only be a string or a number. Only in the case of named * property can it be omitted. */ ID_STRING_OR_NUMBER: "PR-119: id should be a string or a number. This is not valid: ", /** * methods: ValueProperty.deserialize * .deserialize takes on parameter: a serialized object. It cannot be undefined. */ DESERIALIZE_EMPTY_CHANGESET: "PR-121: ValueProperty.deserialize() called on an empty changeset", /** * methods: IntMapProperty.insert, UintMapProperty.insert * You tried to insert into a (u)Int64MapProperty or (u)Int64ArrayProperty properties that * were not Int64 or UInt64 properties or properties that can be casted to the correct type. */ INT_64_NON_INT64_TYPE: "PR-122: Tried to use (u)Int64MapProperty or (u)Int64ArrayProperty with an invalid type.", /** * methods: Integer64.toString, Integer64.fromString */ BASE_OUT_OF_RANGE: "PR-123: Base is out of range. Base should be in range [2,36]. This is not valid: ", /** * methods: Integer64.fromString * If your property is an instance of Uint64, you cannot set it to a negative number. * Uint64 does not support negative numbers. Use Int64 if you need to support negative numbers. */ CANNOT_UPDATE_TO_NEGATIVE: "PR-124: Cannot update value to negative: ", /** * methods: Integer64.fromString * The string you passed as a first parameter to .fromString contains non-numerical characters. */ CANNOT_PARSE_INVALID_CHARACTERS: "PR-125: Cannot parse. String contains invalid characters: ", /** * methods: Property constructor * INTERNAL ERROR - If you encounter this error, please contact the development team. * Something went wrong while the property constructor was creating a property. * The parameters it received were not objects. */ PROP_CONSTRUCTOR_EXPECTS_OBJECTS: "PR-126: Internal error: Object expected as parameters to " + "BaseProperty constructor", /** * methods: Property.applyChangeSet * One of the ‘modify’ field in your changeset points to an index in the array that does not exist. * Check that the changeset you passed to applyChangeSet is valid. If you did not enter the changeSet yourself, * this is an internal error and you should contact the development team. */ INDEX_INVALID: "PR-131: modified property - index invalid: ", /** * methods: Property.isAncestorOf, Property.isDescendantOf * Property.isAncestorOf and .isDescendantOf take one parameter: a property. It cannot be undefined. */ MISSING_IN_OTHERPROP: "PR-132: isAncestorOf parameter: in_otherProperty must be specified.", /** * methods: StringProperty.insert, StringProperty.push * StringProperty.insert takes two parameters: in_position and in_value. * The second one (in_value) must be a string. * StringProperty.push takes only one parameter (in_value), which must be a string. */ IN_VALUE_MUST_BE_STRING: "PR-133: parameter error: in_value must be a string. This is not valid: ", /** * methods: ValueProperty.getValues * You cannot use the method .getValues on value properties. getValues is used to get multiple nested * values from a custom property. To get the value of a primitive property, use .getValue instead. */ NO_VALUE_PROPERTY_GETVALUES: "PR-134: Cannot use .getValues on value properties or strings. " + "Use .getValue instead.", /** * methods: Property.setValues, ArrayProperty.insertRange, ArrayProperty.removeRange, * ArrayProperty.setRange, EnumArrayProperty.setEnumByString, ArrayProperty.insert, ArrayProperty.set, * ArrayProperty.clear * If a property is created as a constant, it cannot be changed. */ MODIFICATION_OF_CONSTANT_PROPERTY: "PR-140: Modifications of constants are not allowed.", /** * methods: ArrayProperty.insert, ArrayProperty.insertRange * In a non-primitive array, you can only insert instances of properties. You should use PropertyFactory.create * to create an instance of your property before inserting it into the array. */ INSERT_NOT_A_PROP: "PR-141: In an array of properties, you can only insert instances of " + "properties. This value is not valid: ", /** * If a property is a reference, it cannot be changed. */ MODIFICATION_OF_REFERENCED_PROPERTY: "PR-142: Modifications of referenced properties are not allowed.", /** * methods: Property.getValue * Property.getValue(in_ids) is a shortcut for Property.get(in_ids).getValue(). * Property.get(in_ids) must resolve to a ValueProperty. */ GET_VALUE_NOT_A_VALUE: "PR-160: in_ids does not resolve to a ValueProperty: ", /** * methods: MapProperty.insert * If your map is not a ValueMap, in_property must be an instance of BaseProperty. * Use PropertyFactory.create to create an instance of a property. */ NONVALUE_MAP_INSERT_PROP: "PR-161: In a map of properties, you can only insert properties.", /** * methods: PropertyFactory.create * INTERNAL ERROR - If you encounter this error, please contact the development team. * This error is thrown while creating a Property, when the actual context * ('array', 'map', 'set' or 'single') is different than what was expected. */ CONTEXT_NOT_AS_EXPECTED: "PR-163: Property context is different than expected: ", /** * methods: Property.deserialize * INTERNAL ERROR - If you encounter this error, please contact the development team. * This error is thrown by .deserialize but is caused by an invalid changeSet. A serialized ChangeSet for an * Integer64Property must be an array of 2 integers: the low and the high values. Since users cannot yet supply * their changesets directly, this should not happen. */ INVALID_INT64_CHANGESET: "PR-164: Cannot deserialize invalid change set for Int64 property", /** * methods: StringProperty.set * StringProperty.set takes two parameters: in_index (a number, the index of the string that you wish to change) * and in_string (the string you want to insert at that index). To set the value of the whole string, use * setValue. */ STRING_SET_NEEDS_INDEX: "PR-165: String.set first parameter should be an index (number). This is not valid: ", /** * methods: Property.get * The token DEREFERENCE_TOKEN should only be used with .get when the in_ids passed to .get is an array. * the DEREFERENCE_TOKEN should follow a path to a reference. * @example <caption>valid: </caption> * myProp.get(['myReference', TOKENS.DEREFERENCE_TOKEN]) * @example <caption>not valid: </caption> * myProp.get('myReference').get(TOKENS.DEREFERENCE_TOKEN) */ NO_GET_DEREFERENCE_ONLY: "PR-166: Cannot use a dereference token only with .get", /** * methods: StringProperty.setValues * You cannot call .setValues on a StringProperty. To set the value of the string, use .setValue * instead. */ NO_VALUE_PROPERTY_SETVALUES: "PR-167: setValues is not a valid method for String Properties", /** * methods: Property.getRelativePath * In cases where you have more than one repository reference property pointing to the same repository, finding a path * between a properties in different repositories can lead to more than one valid results. In that case, .getRelativePath * will return the first valid path it finds. If you want to control how which path is used, you should construct the * string path by concatenating the absolute path for the prop in the nested repository and the relative path between your * repository reference and the target property. */ MORE_THAN_ONE_PATH: "PR-169: More than one paths exist between ", /** * methods: Property.getRelativePath * getRelativePath takes one parameter: the property from which the path will start. This must be an instance of * BaseProperty. The method will return the path from that property to the property on which it was called ('this') */ IN_FROMPROPERTY_MUST_BE_PROPERTY: "PR-170: getRelativePath parameter error: in_fromProperty must be a property", /** * methods: Property.getRelativePath * getRelativePath does not return a path between a property that is inside a child repository to one that is in * a parent repository. A path like this could not be used with .resolvePath or be used in a reference property * because neither method can go from the root of a referenced repository to a reference property. */ NO_PATH_FROM_CHILD_REPO: "PR-171: cannot get a path from a child repository to a parent repository", /** * methods: getReferencedWorkspace * This repository reference is in read-only mode. Call enableWrite() to access the workspace. */ REPOSITORY_REFERENCE_WORKSPACE_READ_ONLY: "PR-173: This repository reference is in read-only mode. Call enableWrite() to access the workspace.", /** * methods: Property.enableWrite * Repository reference is already in writable mode. */ REPOSITORY_REFERENCE_WORKSPACE_EXIST_ALREADY: "PR-174: Repository reference is already in writable mode", /** * methods: Property.enableWrite * Can't enable write on an empty repository reference without at least a repositoryGUID and branchGUID. */ WRITABLE_REPOSITORY_REFERENCE_NEED_GUIDS: "PR-176: Can't enable write on an empty repository reference without at least a repositoryGUID and branchGUID", /** * methods: Property.enableWrite * Repository reference failed to automatically commit the new commitGUID. */ WRITABLE_REPOSITORY_AUTO_COMMIT_FAIL: "PR-177: Repository reference failed to automatically commit the new commitGUID", /** * methods: Property._setFollowBranch * An unexpected error occurred while trying to switch a repository reference followBranch property */ WRITABLE_REPOSITORY_SET_FOLLOW_BRANCH_FAILED: "PR-178: An unexpected error occurred while trying to switch a repository reference followBranch property to ", /** * methods: MapProperty.insert, MapProperty.set * .insert and .set take two parameters. The first one is in_id (or in_key), which is the id under * which the property is added. It can not be an empty string. */ ID_SHOULD_NOT_BE_EMPTY_STRING: "PR-179: id should not be an empty string.", /** * methods: StringProperty.set * StringProperty.set: in_character must have a length of 1. */ STRING_SET_ONE_CHAR: "PR-180: String.set, only one character can be set (in_character must have a length of 1).", /** * methods: EnumArrayProperty.set * EnumArrayProperty.set only accepts a string or number as input for in_value */ VALUE_STRING_OR_NUMBER: "PR-181: in_value should be a string or a number. This is not valid: ", /** * methods: ArrayProperty.set * The in_value input of ArrayProperty.set should not be an array. */ ARRAY_SET_ONE_ELEMENT: "PR-182: in_value should be a single element. This is not valid: ", CANT_DIRTY_MISSING_PROPERTY: "PR-183: Can't dirty missing property: ", /** * methods: MapProperty.insert, MapProperty.set, SetProperty.insert, NodeProperty.insert * The property you are trying to insert in your map property, set property or node property is a root. */ INSERTED_ROOT_ENTRY: "PR-184: Trying to insert a root property into a collection.", /** * methods: MapProperty.insert, MapProperty.set, SetProperty.insert, NodeProperty.insert * The property you are trying to insert in your map property, set property or node property is already * a parent of the map, set, or node property. You cannot insert this property there or you would create * a cycle in your data tree. */ INSERTED_IN_OWN_CHILDREN: "PR-185: Trying to insert a property in itself or in one of its children.", /** * methods: MapProperty.insert, MapProperty.set, SetProperty.insert, NodeProperty.insert * The property you are trying to insert (or at least one if its children) in your map property, set * property or node property is not covered by the paths of the partial checkout. * You cannot insert this property because you would not receive updates for this path after the * insertion and you could corrupt your data by doing subsequent modifications. */ INSERTED_OUTSIDE_PATHS: "PR-186: Trying to insert a property outside the paths covered by the partial checkout.", SHARED_BEFORE_INSERTED: "PR-187: Property must be inserted in the workspace before sharing.", CUSTOM_ID_NOT_ALLOWED: "PR-188: The following property does not support custom id: ", }; const PropertyFactoryError = { /** * methods: PropertyFactory.create * Each property created with PropertyFactory.create should have a unique id. You should make sure your * code generates a unique id for each property created, or make your property an instanced of NamedProperties * (which are identified by a unique Urn) */ OVERWRITING_ID: "PF-001: Id already exists: ", /** * methods: PropertyFactory.register * Warning: The template passed into the register method does not match the expected structure for this type. */ TEMPLATE_MISMATCH: "PF-004: Template structures do not match for typeid: ", /** * methods: PropertyFactory.register * The typeid assigned to your property template should include a version. * E.g. 1.0.0 - an example of a valid typeid: “my.example:point2d-1.0.0” */ UNVERSIONED_TEMPLATE: "PF-005: Templates must be versioned.", /** * INTERNAL ERROR - If you encounter this error, please contact the development team. * Error occurs when a template has been inserted into the branch without a SEMVER version. * This can occur when registering templates through the commit REST interface. At this point * the data is corrupted and should be reported to the development team */ UNVERSIONED_REMOTE_TEMPLATE: "PF-006: Internal error: Remote template is not versioned.", /** * methods: PropertyFactory.create * RepositoryReferences are not yet fully implemented. They will be soon. */ REPOSITORY_REF_NOT_FULLY_IMPLEMENTED: "PF-007: Repository references are not yet fully implemented and may not " + "yet be used", /** * methods: PropertyFactory.create * When using ‘inherits’ in your property template, it must be a string or an array. * @example * ```json * { * typeid:'my.example:point2d-1.0.0', * inherits: ‘ another property’ * } * ``` * or : * ```json * { * typeid:'my.example:point2d-1.0.0', * inherits: [‘another property’, ‘property2’] * } * ``` */ INHERITS_ARRAY_OR_STRING: "PF-008: Internal error: Inherits must be an Array or a String. This is not valid: ", /** * methods: PropertyFactory.create * Context can be ‘array, ‘set’, ‘map’, ‘enum’ or ‘single’. If not specified, will default to ‘single’. */ UNKNOWN_CONTEXT_SPECIFIED: "PF-009: Unknown context specified: ", /** * methods: PropertyFactory.create * The property you entered into PropertyFactory.create has a typeid that is not registered. * Make sure you register the template before creating an instance of that property. This could * also be caused by a failure in the registration process. */ UNKNOWN_TYPEID_SPECIFIED: "PF-010: Unknown typeid specified: ", /** * methods: PropertyFactory.getAllParentsForTemplate, PropertyFactory.inheritsFrom * Cannot find a template for this typeid. Make sure you registered the template and that the typeid * is entered correctly. This can be an error with the template you are trying to insert or one of the * templates it inherits from. */ NON_EXISTING_TYPEID: "PF-011: Missing template for the property you entered or one of the templates it inherits from: ", /** * methods: PropertyFactory.register * The property you passed in to .register is a primitive property. These do not need to be registered with a * typeid. It can be created without being registered. E.g. PropertyFactory.create(‘String’) */ CANNOT_REGISTER_PRIMITIVE: "PF-012: Cannot register a primitive property with the public `register` " + "function typeid = ", /** * methods: PropertyFactory.convertToTemplates, PropertyFactory.registerFrom * Your template’s id field must be a string. */ DEFINITION_ID_MUST_BE_STRING: 'PF-024: Value "id" of a definition should be a string. "', /** * methods: PropertyFactory.convertToTemplates, PropertyFactory.registerFrom * The "$ref" keyword is used to reference a schema, and provides the ability to validate recursive structures * through self-reference. * An object schema with a "$ref" property MUST be interpreted as a "$ref" reference. The value of the "$ref" * property MUST be a URI Reference (a string) */ REF_SHOULD_BE_STRING: 'PF-025: Value of "$ref" should be a string. "', /** * methods: PropertyFactory.convertToTemplates, PropertyFactory.registerFrom * The identifier passed to $ref does not point to any schema. */ COULD_NOT_FIND_REFERENCE: 'PF-026: Couldn\'t find reference "', /** * methods: PropertyFactory.convertToTemplates, PropertyFactory.registerFrom * The identifier passed to $ref does not point to an object. */ REFERENCED_DEFINITION_SHOULD_BE_OBJECT: 'PF-027: A referenced definition should be an object. "', /** * methods: PropertyFactory.convertToTemplates, PropertyFactory.registerFrom * In a JSON schema, the properties field must be an object. */ PROPERTIES_SHOULD_BE_OBJECT: 'PF-028: The "properties" value should be an object. "', /** * methods: PropertyFactory.convertToTemplates, PropertyFactory.registerFrom * oneOf’s value MUST be a non-empty array. Each item of the array MUST be a valid JSON Schema. * An instance validates successfully against this keyword if it validates successfully against exactly one * schema defined by this keyword's value. */ ONE_OF_ONLY_FOR_ARRAYS_OF_ONE_OBJECT: 'PF-029: The "oneOf" object is supported only for arrays of one object.', /** * methods: PropertyFactory.convertToTemplates, PropertyFactory.registerFrom * oneOf’s value MUST be a non-empty array. Each item of the array MUST be a valid JSON Schema. */ ONE_OF_SHOULD_CONTAIN_OBJECTS: 'PF-030: The "oneOf" array should contain objects. "', /** * methods: PropertyFactory.convertToTemplates, PropertyFactory.registerFrom * This keyword's value MUST be a non-empty array. Each item of the array MUST be a valid JSON Schema. */ ALL_OF_SHOULD_BE_ARRAY: 'PF-031: The "allOf" object should be an array.', /** * methods: PropertyFactory.convertToTemplates, PropertyFactory.registerFrom * This keyword's value MUST be a non-empty array. Each item of the array MUST be a valid JSON Schema. */ ALL_OF_SHOULD_CONTAIN_OBJECTS: 'PF-032: The "allOf" array should contain objects. Element ', /** * methods: PropertyFactory.convertToTemplates, PropertyFactory.registerFrom * Your schema definition contains infinite recursion. For example, if your definition ‘a’ refers to definition * ‘b’ as being one of its children and ‘b’ refers to ‘a’ as one of its children. */ INFINITE_RECURSION: "PF-033: Infinite recursion detected in path: ", /** * methods: PropertyFactory.convertToTemplates, PropertyFactory.registerFrom * One part of your template object might contain something that is not of type ‘object’, ‘string’, * ‘number’ or ‘integer’. */ UNSUPPORTED_VALUE_TYPE: 'PF-034: Unsupported value of field "type": ', REQUIRED_PROPERTY_NAME_NOT_STRING: 'PF-035: Required property name should be a string, "', /** * This property is required but it is not listed in the properties field. */ PROPERTY_NAME_DOES_NOT_MATCH: "PF-036: Required property name does not match any property in object: ", /** * The ‘inherits’ field in your template object should be a string or an array of strings. */ INHERITS_SHOULD_BE_STRING: 'PF-037: The "inherits" object should be a string or an array of strings. This is not valid: ', /** * The ‘context’ field in your template should be a string. */ CONTEXT_SHOULD_BE_STRING: 'PF-038: The "context" value should be a string. This is not valid: ', /** * methods: PropertyFactory.convertToTemplates, PropertyFactory.registerFrom * Warning: If you have a ‘length’ field in your template and the context is not set to ‘array’, * ‘length’ will be ignored. */ IGNORING_LENGTH_NOT_ARRAY: 'PF-039: ignoring "length" value since "context" is not "array".', /** * methods: PropertyFactory.convertToTemplates, PropertyFactory.registerFrom * In your template, the field ‘length’ should be a number. */ LENGTH_SHOULD_BE_NUMBER: 'PF-040: The "length" value should be a number. This is not valid: ', /** * methods: PropertyFactory.convertToTemplates, PropertyFactory.registerFrom * Your template contains more than one definition field for this field. */ DUPLICATE_DEFINITION: "PF-041: Duplicate definition for ", /** * methods: PropertyFactory.convertToTemplates, PropertyFactory.registerFrom * The field ‘id’ is missing from your JSON schema. */ FIELD_ID_IS_REQUIRED: 'PF-042: Field "id" is required.', /** * methods: PropertyFactory.convertToTemplates, PropertyFactory.registerFrom * You need a ‘typeid’ field in your template schema. * @example * ```json * { * ‘typeid’: 'autodesk.test:set.set-1.0.0', * ‘properties’: [ * {‘typeid’: 'String', * ‘context’: 'set', * ‘id’: 'DummySet', * ‘inherits’:['NamedProperty']} * ] * } *``` */ FIELD_TYPEID_IS_REQUIRED: 'PF-043: Field "typeid" is required. It is the "typeid" of the resulting ' + "PropertySets Template.", /** * methods: PropertyFactory.register * The ‘length’ field in your template must be a number. */ LENGTH_MUST_BE_NUMBER: "PF-045: length must be a number. This is not valid: ", /** * methods: PropertyFactory.register * Each entry in your enum property array must have an id. * @example * ```json * { * "typeid": "Adsk.Core:Units.Metric-1.0.0", * "inherits": "Enum", * "annotation": { "description": "The metric units" }, * "properties": [ * { "id": "m" , "value": 1, "annotation": { "description": "meter" }}, * { "id": "cm", "value": 2, "annotation": { "description": "centimeter" }}, * { "id": "mm", "value": 3, "annotation": { "description": "millimeter" }} * ] * } * ``` */ ENUM_TYPEID_MISSING: "PF-046: Enum: typeid missing", /** * methods: PropertyTemplate constructor * Each entry in your enum property must have a value that is a number. * @example * ```json * { * "typeid": "Adsk.Core:Units.Metric-1.0.0", * "inherits": "Enum", * "annotation": { "description": "The metric units" }, * "properties": [ * { "id": "m" , "value": 1, "annotation": { "description": "meter" }}, * { "id": "cm", "value": 2, "annotation": { "description": "centimeter" }}, * { "id": "mm", "value": 3, "annotation": { "description": "millimeter" }} * ] * } * ``` */ ENUM_VALUE_NOT_NUMBER: "PF-047: Enum: value must be a number. This is not valid: ", /** * methods: PropertyTemplate.getVersion * INTERNAL ERROR - If you encounter this error, please contact the development team. * Warning: you that the template on which you are calling the .getVersion method is not versioned. * The method will return undefined. This should not happen as we now validate that all templates are * versioned when registering them. */ TEMPLATE_NOT_VERSIONED: "PF-048: Internal error: Template is not versioned.", /** * methods: PropertyFactory.register * Warning: Template already exists. The incoming template MUST match what is currently registered. * If they do not match, an error will be thrown letting you know that the templates are incompatible. * See error PF-004 */ REGISTERING_EXISTING_TYPEID: "PF-049: Registering a typeid that already exists typeid = ", /** * methods: PropertyFactory.register * There were errors validating the template you are trying to register. See detailed errors attached. */ FAILED_TO_REGISTER: "PF-050: Failed to register typeid = ", /** * methods: PropertyFactory.convertToTemplates, PropertyFactory.registerFrom * So far, these methods can only convert from a JSON schema. The first parameter (in_fromType) * must be ‘JSONSchema’. */ UNKNOWN_TYPE: "PF-051: Unknown type: ", /** * methods: PropertyTemplate.serializeCanonical * INTERNAL ERROR - If you encounter this error, please contact the development team. * This error shouldn’t occur. The underlying private function that is called is a close cousin of the * deepCopy function which could have an arbitrary target specified. This doesn’t happen in the case of * the serializeCanonical. */ INVALID_TARGET_PROPERTY_TEMPLATE: "PF-053 Copying into incompatible target property template: ", /** * methods: PropertyFactory.loadTemplate * In order for the PropertyFactory to retrieve templates from remote store it has to have at least one store * interface to interact with. This is accomplished by making the PropertyFactory.addStore call. */ MISSING_CASE_IN_TEMPLATE_SERIALIZATION: "PF-054 Missing case in template canonical serialization: ", OVERRIDDING_INHERITED_TYPES: "PF-057: Overriding of inherited typed properties is not allowed: ", /** * methods: PropertyFactory.register * PropertyFactory.register takes one parameter, which can be a string (typeid), a json object (a template) or an * array of these. */ ATTEMPT_TO_REGISTER_WITH_BAD_ARGUMENT: "PF-058: register only accepts strings, json structures or array of those", /** * methods: PropertyFactory.resolve * No store has been added yet to the PropertyFactory. A template store has to be instantiated then added with * propertyFactory.addStore() */ NO_STORE_HAS_BEEN_INITIALIZED_YET: "PF-059: No store has been initialized yet to the PropertyFactory.", /** * methods: PropertyFactory.resolve * resolve cannot be called until previous call to resolve has completed. */ DEPENDENCIES_RESOLUTION_IN_PROGRESS: "PF-060: Dependencies resolution already in progress", /** * methods: PropertyFactory.create * Typed values must contain properties that inherit from the base type. */ TYPED_VALUES_MUST_DERIVE_FROM_BASE_TYPE: "PF-061: Typed values must be derived from base type: ", /** * methods: PropertyFactory.create * The field ‘value’ is missing from your JSON schema. */ FIELD_VALUE_IS_REQUIRED: 'PF-062: Field "value" is required: ', /** * methods: PropertyFactory.register * Warning: The structure of the template passed into the register method does not match the structure of a remote template registered under the same typeid. */ REMOTE_TEMPLATE_MISMATCH: "PF-064: Template structures do not match an already registered remote template with the same typeid for typeid: ", /** * methods: PropertyFactory.initializeSchemaStore * Warning: The initializeSchemaStore method must be provided with an options object containing a getBearerToken function and the url to the ForgeSchemaService. */ MISSING_FSS_INIT_OPTIONS: "PF-065: The initializeSchemaStore method must be provided with an options object " + "containing a getBearerToken function and the url to the ForgeSchemaService.", /** * methods: PropertyFactory.initializeSchemaStore * Warning: The initializeSchemaStore method url option must be passed a valid base url. */ FSS_BASEURL_WRONG: "PF-066: The initializeSchemaStore method url option must be passed a valid base url.", /** * methods: PropertyFactory.create * Overridden properties must have same context as the base type. */ OVERRIDEN_PROP_MUST_HAVE_SAME_CONTEXT_AS_BASE_TYPE: "PF-067: Overridden properties must have same context as the base type: ", /** * methods: PropertyFactory.create * Primitive types does not support typedValues. */ TYPED_VALUES_FOR_PRIMITIVES_NOT_SUPPORTED: "PF-068: Primitive types does not support typedValues: ", /** * methods: PropertyFactory.inheritsFrom */ TYPEID_IS_NOT_ENUM: "PF-069: The provided type does not inherit from Enum: ", }; const RepositoryError = { /** * methods: NodeProperty.remove * The property you tried to remove does not exist (its id was not found) - check that the id is correct * and that the property has not already been removed. */ REMOVING_NON_EXISTING_ID: "RE-001: Removing non existing id: ", /** * methods: BranchNode.isEqual, CommitNode.isEqual * In_node parameter is required. In_node is the branch or commit you wish to compare to ‘this’ branch/commit * node to check for equality. */ MISSING_IN_NODE_PARAM: "RE-004: BranchNode.isEqual / CommitNode.isEqual missing " + "parameter: in_node", /** * methods: BranchNode, CommitNode, Workspace.commit * The BranchNode or CommitNode constructor was called without in_params.guid or we tried to send a commit * without a guid. */ MISSING_GUID_FIELD: "RE-006: Missing guid field", /** * INTERNAL ERROR - If you encounter this error, please contact the development team. */ LOCAL_AND_REMOTE_BRANCH_NOT_EQUAL: "RE-007: Remote branch urn must equal the local branch urn", /** * INTERNAL ERROR - If you encounter this error, please contact the development team. */ BRANCH_NOT_REMOTE_BRANCH: "RE-008: Branch is not a remote branch ", /** * now rebasing to \<commit guid\>' * INTERNAL ERROR - If you encounter this error, please contact the development team. * A commit node already rebased has been rebased again. */ COMMIT_ALREADY_REBASED: "RE-009: Should not rebase commit more than once: ", /** * INTERNAL ERROR - If you encounter this error, please contact the development team. * A new commit has been received from the server but was not expected. See specific message for more details. */ UNEXPECTED_COMMIT_FROM_SERVER: "RE-010: Adding commit to remote branch ", /** * This error will occur if the repository associated to a branch hasn't been found. */ REPOSITORY_NOT_FOUND: "RE-011: Internal Error: The repository hasn't been found.", /** * This error will occur if the branch within a repository hasn't been found. */ BRANCH_NOT_FOUND: "RE-012: The branch hasn't been found.", }; const ServerError = { /** * INTERNAL ERROR - If you encounter this error, please contact the development team. */ URL_MUST_BE_STRING: "SE-001: Url must be a string.", /** * INTERNAL ERROR - If you encounter this error, please contact the development team. */ PORT_MUST_BE_NUMBER: "SE-002: Port must be a number", }; const ChangeSetError = { /** * Context can only be ‘single’, ‘array’, ‘map’, ‘set’ or ‘enum’. All other values are invalid. */ UNKNOWN_CONTEXT: "CS-001: Unknown context: ", /** * INTERNAL ERROR - If you encounter this error, please contact the development team. */ ALREADY_EXISTING_ENTRY: "CS-003: Internal error: Added an already existing entry: ", /** * INTERNAL ERROR - If you encounter this error, please contact the development team. * The ChangeSet did not include an oldValue member which is computed when making the ChangeSet reversible. */ OLD_VALUE_NOT_FOUND: "CS-004: Internal error: Old value not found while inverting a change set. The change set is probably not reversible.", CONTEXT_VALIDATION_IN_PROGRESS: "CONTEXT_VALIDATION_IN_PROGRESS", NOT_A_VALID_CONTEXT: "NOT_A_VALID_CONTEXT", MISSING_PRE_POST_CALLBACK: "Missing callback. Either pre- or postcallback must be provided.", }; const UtilsError = { INVALID_PATH_IN_REFERENCE: "UT-001: References may only contain absolute repository references " + "or empty strings", /** * methods: Utils.traverseChangesetRecursively * Your changeset contains an operator other than MODIFY, INSERT or REMOVE. If you created the changeset youserlf, * check that you only use valid operators. Otherwise, this is an internal error. Please contact the development team. */ UNKNOWN_OPERATOR: "UT-004: ArrayChangeSetIterator: unknown operator ", /** * INTERNAL ERROR - If you encounter this error, please contact the development team. */ NON_PRIMITIVE_ARRAY_NO_TYPEID: "UT-005: Found a non primitive type array without typeids. " + "This should never happen.", /** * Filtering paths within arrays are not supported. */ FILTER_PATH_WITHIN_ARRAY: "UT-006: Filtering paths within arrays are not supported", /** * INTERNAL ERROR - If you encounter this error, please contact the development team. * See specific message for more details. */ ASSERTION_FAILED: "UT-007: INTERNAL ERROR. Failed assertion. ", /** * You used a deprecated function. It will likely be removed in the next major version. * See the custom information if provided. */ DEPRECATED_FUNCTION: "UT-008: Deprecated function %s.", /** * You used a deprecated function parameter. It will likely be removed in the next major version. * See the custom information if provided. */ DEPRECATED_PARAMETER: "UT-009: Deprecated function parameter %s of %s.", /** * You used an experimental feature. It will likely changed in future releases. * See the custom information if provided. */ EXPERIMENTAL_FEATURE: "UT-010: Feature %s is experimental and subject to future changes.", }; const PssClientError = { /** * This error message will be displayed when a repository creation fails */ FAILED_REPOSITORY_CREATION: "PC-001: Server error: Failed to create a repository. ", /** * This error message will be displayed when the deletion of a repository fails */ FAILED_REPOSITORY_DELETION: "PC-002: Server error: Failed to delete a repository. Repository guid: ", /** * This error message will be displayed when the undelete operation of a repository fails */ FAILED_REPOSITORY_UNDELETION: "PC-003: Server error: Failed to undelete a repository. Repository guid: ", /** * This error message will be displayed when getting the expiry of a repository fails */ FAILED_GET_EXPIRY_REQUEST: "PC-004: Server error: Failed to get the expiry of a repository. Repository guid: ", /** * This error message will be displayed when setting the expiry of a repository fails */ FAILED_SET_EXPIRY_REQUEST: "PC-005: Server error: Failed to set the expiry of a repository. Repository guid: ", /** * This error message will be displayed when squashing commit history fails */ FAILED_SQUASH_COMMIT_HISTORY: "PC-006: Server error: Failed to squash the commit history. Branch guid: ", /** * This error message will be displayed when fetching a commit fails */ FAILED_FETCH_COMMIT: "PC-007: Server error: Failed to fetch a commit. Commit guid: ", /** * This error message will be displayed when containers creation fails */ FAILED_CONTAINER_CREATION: "PC-008: Server error: Failed to create containers. ", /** * This error message will be displayed when a branch creation fails */ FAILED_BRANCH_CREATION: "PC-009: Server error: Failed to create a branch. ", /** * This error message will be displayed when a commit fails */ FAILED_TO_COMMIT: "PC-011: Server error: Failed to commit. ", /** * This error message will be displayed when a share operation fails */ FAILED_SHARE: "PC-013: Server error: Failed to share or unshare resources. ", /** * This error message will be displayed when getting the branches of a repository fails */ FAILED_GET_ENUMERATE_BRANCHES: "PC-014: Server error: Failed to get the branches of a repository. Repository guid: ", /** * This error message will be displayed when a request to get an lca fails */ FAILED_GET_LCA: "PC-015: Server error: Failed to get the lca.", /** * This error message will be displayed when a commit fails because of an internal error while committing */ FAILED_TO_COMMIT_INTERNAL: "PC-016: internal error: Failed to commit.", /** * This error message will be displayed when getting a feature flag fails */ FAILED_TO_GET_FEATURE: "PC-017: Server error: Failed to get feature flag from PSS. ", /** * This error message will be displayed when getting squashed commit range fails */ FAILED_TO_GET_SQUASHED_COMMIT_RANGE: "PC-018: Server error: Failed to get squashed commit range. Branch guid: ", }; const PROPERTY_PATH_DELIMITER = "."; const MESSAGE_CONSTANTS = { ...ChangeSetError, ...PropertyError, ...PropertyFactoryError, ...RepositoryError, ...SchemaValidatorError, ...PssClientError, ...UtilsError, ...TypeidHelperError, ...ServerError, }; export const constants = { MSG: MESSAGE_CONSTANTS, PROPERTY_PATH_DELIMITER, };
the_stack
import EventEmitter from "eventemitter3"; import { ChannelQueue } from "@buttercup/channel-queue"; import { Layerr } from "layerr"; import Vault from "./Vault"; import Credentials from "../credentials/Credentials"; import { getCredentials } from "../credentials/channel"; import { getUniqueID } from "../tools/encoding"; import { getSourceOfflineArchive, sourceHasOfflineCopy, storeSourceOfflineCopy } from "../tools/vaultManagement"; import { credentialsToDatasource, prepareDatasourceCredentials } from "../datasources/register"; import { generateVaultInsights } from "../insight/vault"; import AttachmentManager from "../attachments/AttachmentManager"; import TextDatasource from "../datasources/TextDatasource"; import VaultManager from "./VaultManager"; import { VaultSourceID, VaultSourceStatus } from "../types"; interface StateChangeEnqueuedFunction { (): void | Promise<any>; } export interface VaultSourceConfig { colour?: string; id?: VaultSourceID; order?: number; meta?: VaultSourceMetadata; } export interface VaultSourceUnlockOptions { initialiseRemote?: boolean; loadOfflineCopy?: boolean; storeOfflineCopy?: boolean; } export interface VaultSourceMetadata { [property: string]: any; } const COLOUR_TEST = /^#([a-f0-9]{3}|[a-f0-9]{6})$/i; const DEFAULT_COLOUR = "#000000"; const DEFAULT_ORDER = 1000; function processDehydratedCredentials(credentialsString: string, masterPassword: string): Promise<Credentials> { if (/^v1\n/.test(credentialsString)) { const [, sourceCredStr] = credentialsString.split("\n"); return Credentials.fromSecureString(sourceCredStr, masterPassword); } return Credentials.fromSecureString(credentialsString, masterPassword); } /** * Vault source class for managing a single vault * within a vault manager * @augments EventEmitter * @memberof module:Buttercup */ export default class VaultSource extends EventEmitter { static STATUS_LOCKED = VaultSourceStatus.Locked; static STATUS_PENDING = VaultSourceStatus.Pending; static STATUS_UNLOCKED = VaultSourceStatus.Unlocked; /** * Rehydrate the vault source from a dehydrated state * @param dehydratedString The dehydrated form of the vault source * @returns A rehydrated instance * @memberof VaultSource * @static */ static rehydrate(dehydratedString: string): VaultSource { const target = JSON.parse(dehydratedString); let credentials = target.credentials; if (target.v !== 2) { const { sourceCredentials, archiveCredentials } = target; if (!sourceCredentials || !archiveCredentials) { throw new Error("Invalid legacy vault state: missing credentials"); } credentials = `v1\n${sourceCredentials}\n${archiveCredentials}`; } const { id, name, type, colour = DEFAULT_COLOUR, order = DEFAULT_ORDER, meta = {} } = target; return new VaultSource(name, type, credentials, { id, colour, order, meta }); } _attachmentManager: AttachmentManager = null; _colour: string; _credentials: string | Credentials; _datasource: TextDatasource = null; _id: VaultSourceID; _meta: VaultSourceMetadata; _name: string; _order: number; _queue: ChannelQueue; _shares: Array<any> = []; _status: VaultSourceStatus; _type: string; _vault: Vault = null; _vaultManager: VaultManager = null; constructor(name: string, type: string, credentialsString: string, config: VaultSourceConfig = {}) { super(); const { colour = DEFAULT_COLOUR, id = getUniqueID(), order = DEFAULT_ORDER, meta = {} } = config; // Queue for managing state transitions this._queue = new ChannelQueue(); // Credentials state and status go hand-in-hand: // - Locked = credentials string // - Unlocked = credentials instance this._credentials = credentialsString; this._status = VaultSource.STATUS_LOCKED; // Set other configuration items to properties this._id = id; this._name = name; this._type = type; this._colour = colour; this._order = order; this._meta = meta; } /** * The attachment manager * @memberof VaultSource * @readonly */ get attachmentManager(): AttachmentManager { return this._attachmentManager; } /** * Source colour * @memberof VaultSource */ get colour(): string { return this._colour; } /** * Source ID * @memberof VaultSource * @readonly */ get id(): VaultSourceID { return this._id; } /** * Meta data * @memberof VaultSource * @readonly */ get meta(): VaultSourceMetadata { return { ...this._meta }; } /** * Source name * @memberof VaultSource * @readonly */ get name() { return this._name; } /** * The vault order on a vault management instance * @memberof VaultSource * @readonly */ get order(): number { return this._order; } /** * Source status * @memberof VaultSource * @readonly */ get status(): VaultSourceStatus { return this._status; } /** * The datasource type * @memberof VaultSource * @readonly */ get type(): string { return this._type; } /** * Vault reference * @memberof VaultSource * @readonly */ get vault(): Vault { return this._vault; } set colour(newColour: string) { if (COLOUR_TEST.test(newColour) !== true) { throw new Layerr(`Failed setting colour: Invalid format (expected hex): ${newColour}`); } this._colour = newColour; this.emit("updated"); } set order(newOrder: number) { if (isNaN(newOrder) || typeof newOrder !== "number" || newOrder < 0) { throw new Layerr(`Failed setting order: Order must be greater than or equal to 0: ${newOrder}`); } this._order = newOrder; this.emit("updated"); } /** * Change the master vault password * @param oldPassword The original/current password * @param newPassword The new password to change to * @param meta Optional metadata * @memberof VaultSource */ async changeMasterPassword(oldPassword: string, newPassword: string, meta: { [key: string]: any } = {}) { if (oldPassword === newPassword) { throw new Error("New password cannot be the same as the previous one"); } else if (!newPassword) { throw new Error("New password must be specified"); } const datasourceSupportsChange = this._datasource.supportsPasswordChange(); const newMasterCreds = new Credentials(meta, newPassword); let wasLocked = false; if (this.status !== VaultSource.STATUS_UNLOCKED) { wasLocked = true; // Locked, so unlock await this.unlock(Credentials.fromPassword(oldPassword)); } else { // Unlocked, so check password.. const credentials = getCredentials((<Credentials>this._credentials).id); if (credentials.masterPassword !== oldPassword) { throw new Error("Old password does not match current unlocked instance value"); } // ..and then update await this.update(); } // Check datasource is ready if (datasourceSupportsChange) { const isReady = await this._datasource.changePassword( prepareDatasourceCredentials(newMasterCreds, this._datasource.type), /* preflight: */ true ); if (!isReady) { throw new Error("Datasource not capable of changing password at this time"); } } // Clear offline cache await storeSourceOfflineCopy(this._vaultManager._cacheStorage, this.id, null); // Change password const newCredentials = Credentials.fromCredentials(this._credentials as Credentials, oldPassword); const newCreds = getCredentials(newCredentials.id); newCreds.masterPassword = newPassword; await this._updateVaultCredentials(newCredentials); // Re-lock if it was locked earlier if (wasLocked) { await this.lock(); } // Change remote if supported if (datasourceSupportsChange) { await this._datasource.changePassword( prepareDatasourceCredentials(newMasterCreds, this._datasource.type), /* preflight: */ false ); } this.emit("passwordChanged"); this.emit("updated"); } /** * Check if the vault source can be updated * @memberof VaultSource */ canBeUpdated(): boolean { return this.status === VaultSource.STATUS_UNLOCKED && this._vault.format.dirty === false; } /** * Check if the source has an offline copy * @returns {Promise.<Boolean>} A promise which resolves with whether an offline * copy is available or not * @memberof VaultSource */ checkOfflineCopy() { return sourceHasOfflineCopy(this._vaultManager._cacheStorage, this.id); } /** * Dehydrate the source to a JSON string, ready for storage * @memberof VaultSource */ dehydrate(): Promise<string> { return this._enqueueStateChange(async () => { const payload = { v: 2, id: this.id, name: this.name, type: this.type, status: VaultSource.STATUS_LOCKED, colour: this.colour, order: this.order, meta: this.meta, credentials: null }; if (this.status === VaultSource.STATUS_PENDING) { throw new Layerr(`Failed dehydrating source: Source in pending state: ${this.id}`); } else if (this.status === VaultSource.STATUS_LOCKED) { payload.credentials = this._credentials; } else { payload.credentials = await (<Credentials>this._credentials).toSecureString(); } return JSON.stringify(payload); }); } /** * Get offline content, if it exists * @returns A promise a resolves with the content, or null * if it doesn't exist * @memberof VaultSource */ getOfflineContent(): Promise<string | null> { return this.checkOfflineCopy().then(hasContent => hasContent ? getSourceOfflineArchive(this._vaultManager._cacheStorage, this.id) : null ); } /** * Detect whether the local archives (in memory) differ from their remote copies * Fetches the remote copies from their datasources and detects differences between * them and their local counterparts. Does not change/update the local items. * @returns A promise that resolves with a boolean - true if * there are differences, false if there is not * @memberof VaultSource */ localDiffersFromRemote(): Promise<boolean> { if (this.status !== VaultSource.STATUS_UNLOCKED) { return Promise.reject( new Layerr(`Failed diffing source: Source not unlocked (${this.status}): ${this.id}`) ); } if (typeof (<any>this._datasource).localDiffersFromRemote === "function") { return (<any>this._datasource).localDiffersFromRemote( prepareDatasourceCredentials(this._credentials as Credentials, this._datasource.type), this.vault.format.history ); } if (this._datasource.type !== "text") { // Only clear if not a TextDatasource this._datasource.setContent(""); } return this._datasource .load(prepareDatasourceCredentials(this._credentials as Credentials, this._datasource.type)) .then(({ Format, history }) => { if (Format !== this.vault.format.getFormat()) { throw new Error("Loaded format does not match that of current vault"); } return Format.historiesDiffer(this.vault.format.history, history); }); } /** * Lock the source * @memberof VaultSource */ async lock() { if (this.status !== VaultSource.STATUS_UNLOCKED) { throw new Layerr(`Failed locking source: Source in invalid state (${this.status}): ${this.id}`); } await this._enqueueStateChange(async () => { this._status = VaultSource.STATUS_PENDING; const currentCredentials = this._credentials; const currentVault = this._vault; const currentDatasource = this._datasource; const currentAttachmentMgr = this._attachmentManager; try { const credentialsStr = await (<Credentials>this._credentials).toSecureString(); this._credentials = credentialsStr; this._datasource = null; this._vault = null; this._attachmentManager = null; this._status = VaultSource.STATUS_LOCKED; this.emit("locked"); } catch (err) { this._credentials = currentCredentials; this._datasource = currentDatasource; this._vault = currentVault; this._status = VaultSource.STATUS_UNLOCKED; this._attachmentManager = currentAttachmentMgr; throw new Layerr(err, "Failed locking source"); } }); } /** * Merge remote contents * Detects differences between a local and a remote item, and merges the * two copies together. * @returns A promise that resolves with the newly merged archive - * This archive is automatically saved over the original local copy. * @memberof VaultSource */ async mergeFromRemote(): Promise<Vault> { if (this._datasource.type !== "text") { // Only clear if not a TextDatasource this._datasource.setContent(""); } const { Format, history } = await this._datasource.load( prepareDatasourceCredentials(this._credentials as Credentials, this._datasource.type) ); if (Format !== this._vault.format.getFormat()) { throw new Error("Format loaded during merge did not match current"); } const newVault = Format.vaultFromMergedHistories(this._vault.format.history, history); this._vault._updateFormat(newVault.format); return this._vault; } /** * Rename the vault source * @param name The new name * @memberof VaultSource */ rename(name: string) { this._name = name; this.emit("updated"); } /** * Save the vault to the remote, ensuring that it's first merged and * updated to prevent conflicts or overwrites. * @memberof VaultSource */ async save() { await this._enqueueStateChange(async () => { if (await this.localDiffersFromRemote()) { await this.mergeFromRemote(); } await this._datasource.save( this._vault.format.history, prepareDatasourceCredentials(this._credentials as Credentials, this._datasource.type) ); this._vault.format.dirty = false; await this._updateInsights(); }, /* stack */ "saving"); this.emit("updated"); } supportsAttachments(): boolean { if (this.status !== VaultSource.STATUS_UNLOCKED) return false; return this._datasource.supportsAttachments(); } async testMasterPassword(password: string): Promise<boolean> { if (this.status !== VaultSourceStatus.Locked && this.status !== VaultSourceStatus.Unlocked) { throw new Error(`Source in invalid state for password test: ${this.status}`); } const credStr = this.status === VaultSourceStatus.Locked ? (this._credentials as string) : await (<Credentials>this._credentials).toSecureString(); try { await processDehydratedCredentials(credStr, password); return true; } catch (err) { return false; } } async unlock(vaultCredentials: Credentials, config: VaultSourceUnlockOptions = {}) { if (!Credentials.isCredentials(vaultCredentials)) { throw new Layerr(`Failed unlocking source: Invalid credentials passed to source: ${this.id}`); } const { initialiseRemote = false, loadOfflineCopy = false, storeOfflineCopy = true } = config; if (this.status !== VaultSource.STATUS_LOCKED) { throw new Layerr(`Failed unlocking source: Source in invalid state (${this.status}): ${this.id}`); } const { masterPassword } = getCredentials(vaultCredentials.id); const originalCredentials = this._credentials; this._status = VaultSource.STATUS_PENDING; await this._enqueueStateChange(() => { let offlineContent = null; return this.getOfflineContent() .then(availableOfflineContent => { if (availableOfflineContent && loadOfflineCopy) { offlineContent = availableOfflineContent; } return processDehydratedCredentials(this._credentials as string, masterPassword); }) .then((newCredentials: Credentials) => { const credentials = (this._credentials = newCredentials); const datasource = (this._datasource = credentialsToDatasource( Credentials.fromCredentials(credentials, masterPassword) )); if (typeof offlineContent === "string") { datasource.setContent(offlineContent); } datasource.on("updated", () => { this._waitNonPending() .then(async () => { if (this.status === VaultSource.STATUS_UNLOCKED) { await this._updateCredentialsFromDatasource(); } this.emit("updated"); }) .catch(err => { console.error(`Error updating datasource credentials for vault: ${this.id}`, err); }); }); const defaultVault = Vault.createWithDefaults(); const loadWork = initialiseRemote ? datasource.save(defaultVault.format.history, credentials).then(() => { this._vault = defaultVault; }) : datasource.load(credentials).then(({ Format, history }) => { this._vault = Vault.createFromHistory(history, Format); }); return loadWork .then(() => { if (storeOfflineCopy) { // Store an offline copy for later use return storeSourceOfflineCopy( this._vaultManager._cacheStorage, this.id, datasource._content ); } if (loadOfflineCopy) { // Flag the format as read-only this.vault.format._readOnly = true; } }) .then(() => { this._status = VaultSource.STATUS_UNLOCKED; this.emit("unlocked"); this._attachmentManager = new AttachmentManager(this); }); }) .catch(err => { this._status = VaultSource.STATUS_LOCKED; this._vault = null; this._datasource = null; this._credentials = originalCredentials; this._attachmentManager = null; throw new Layerr(err, "Failed unlocking source"); }); }); } /** * Update the vault * @returns A promise that resolves once the update has * completed * @memberof VaultSource */ async update({ skipDiff = false } = {}) { const didUpdate = await this._enqueueStateChange( () => (skipDiff ? Promise.resolve(false) : this.localDiffersFromRemote()).then(differs => { if (differs) { return this.mergeFromRemote().then(() => true); } return false; }), // @todo shares // .then(() => initialiseShares(this)), /* stack */ "updating" ); if (didUpdate) { this.emit("updated"); } } /** * Write the vault to the remote * - This does not perform any merging or sync checks, but simply * writes the vault contents to the remote, overwriting whatever * was there before. * @returns A promise that resolves when saving has completed * @memberof VaultSource */ async write() { await this._enqueueStateChange(async () => { await this._datasource.save( this._vault.format.history, prepareDatasourceCredentials(this._credentials as Credentials, this._datasource.type) ); this._vault.format.dirty = false; await this._updateInsights(); }, /* stack */ "saving"); this.emit("updated"); } _applyShares() { // @todo // this._shares.forEach(share => { // if (!share.archiveHasAppliedShare(this.archive)) { // share.applyToArchive(this.archive); // } // }); } _enqueueStateChange(cb: StateChangeEnqueuedFunction, stack?: string): Promise<any> { const channel = this._queue.channel("state"); return stack ? channel.enqueue(cb, undefined, stack) : channel.enqueue(cb); } _unloadShares() { const Format = this.vault.format.getFormat(); const extractedShares = Format.extractSharesFromHistory(this.vault.format.history); // Reset archive history (without shares) const { base } = extractedShares; delete extractedShares.base; this.vault.format.erase(); this.vault.format.execute(base); // Update share payloads Object.keys(extractedShares).forEach(shareID => { const share = this._shares.find(share => share.id === shareID); if (!share) { throw new Error(`Failed updating extracted share: No share found in workspace for ID: ${shareID}`); } share.updateHistory(extractedShares[shareID]); }); } async _updateCredentialsFromDatasource() { if (this.status !== VaultSource.STATUS_UNLOCKED) { throw new Layerr(`Failed updating source credentials: Source is not unlocked: ${this.id}`); } const { masterPassword } = getCredentials((<Credentials>this._credentials).id); this._credentials = Credentials.fromCredentials(this._datasource.credentials, masterPassword); } async _updateInsights() { if (this.status !== VaultSource.STATUS_UNLOCKED) { throw new Layerr(`Failed updating vault insights: Source is not unlocked: ${this.id}`); } const insights = generateVaultInsights(this.vault); await this._datasource.updateInsights(insights); } async _updateVaultCredentials(newCredentials) { if (this.status !== VaultSource.STATUS_UNLOCKED) { throw new Layerr(`Failed updating vault credentials: Source is not unlocked: ${this.id}`); } this._credentials = newCredentials; await this.write(); } _waitNonPending() { return new Promise<void>(resolve => { if (this.status !== VaultSource.STATUS_PENDING) return resolve(); const handleChange = () => { this.removeListener("unlocked", handleChange); this.removeListener("locked", handleChange); resolve(); }; this.on("unlocked", handleChange); this.on("locked", handleChange); }); } }
the_stack
import * as AsyncLock from 'async-lock' import { DB, SparseMerkleTree, SparseMerkleTreeImpl, MerkleTreeInclusionProof, } from '@pigi/core-db' import { DefaultSignatureVerifier, serializeObject, SignatureVerifier, BigNumber, ONE, ZERO, getLogger, runInDomain, NULL_ADDRESS, } from '@pigi/core-utils' /* Internal Imports */ import { Address, Balances, Swap, isSwapTransaction, Transfer, isTransferTransaction, RollupTransaction, SignedTransaction, TokenType, State, StateUpdate, StateSnapshot, InclusionProof, StateMachineCapacityError, SignatureError, abiEncodeTransaction, abiEncodeState, parseStateFromABI, } from '../index' import { UNISWAP_ADDRESS, UNI_TOKEN_TYPE, PIGI_TOKEN_TYPE, NON_EXISTENT_SLOT_INDEX, } from './utils' import { InsufficientBalanceError, InvalidTransactionTypeError, NegativeAmountError, RollupStateMachine, SlippageError, } from '../types' const log = getLogger('rollup-state-machine') /** * A Tree-backed Rollup State Machine, facilitating state transitions for * swaps and transactions for Uniswap. */ export class DefaultRollupStateMachine implements RollupStateMachine { public static readonly ROOT_KEY: Buffer = Buffer.from('state_machine_root') public static readonly LAST_OPEN_KEY: Buffer = Buffer.from('last_open_key') public static readonly ADDRESS_TO_KEYS_COUNT_KEY: Buffer = Buffer.from( 'address_to_keys_count' ) private static readonly lockKey: string = 'lock' private lastOpenKey: BigNumber private usedKeys: Set<string> private addressesToKeys: Map<Address, BigNumber> private maxAddresses: BigNumber private tree: SparseMerkleTree private readonly lock: AsyncLock /** * Creates and initializes a DefaultRollupStateMachine. * * @param genesisState The genesis state to set * @param db The DB to use * @param aggregatorAddress The address of the aggregator * @param signatureVerifier The signature verifier to use * @param swapFeeBasisPoints The fee for swapping, in basis points * @param treeHeight The height of the tree to use for underlying storage * @returns The constructed and initialized RollupStateMachine */ public static async create( genesisState: State[], db: DB, aggregatorAddress: Address, signatureVerifier: SignatureVerifier = DefaultSignatureVerifier.instance(), swapFeeBasisPoints: number = 30, treeHeight: number = 32 ): Promise<RollupStateMachine> { const stateMachine = new DefaultRollupStateMachine( db, aggregatorAddress, signatureVerifier, swapFeeBasisPoints, treeHeight ) const previousStateExists = await stateMachine.init() if (!previousStateExists && !!genesisState.length) { for (const state of genesisState) { await stateMachine.setAddressState(state.pubkey, state.balances) } } return stateMachine } private constructor( private readonly db: DB, private readonly aggregatorAddress: Address, private readonly signatureVerifier: SignatureVerifier, private readonly swapFeeBasisPoints: number, private readonly treeHeight: number = 32 ) { this.maxAddresses = new BigNumber(Math.pow(2, this.treeHeight) - 1) this.lock = new AsyncLock({ domainReentrant: true, }) } /** * Initializes this RollupStateMachine, reading stored state from the DB * and populating local variables from saved state if there is any. * * @returns True if there was existing state, false otherwise. */ private async init(): Promise<boolean> { const storedRoot: Buffer = await this.db.get( DefaultRollupStateMachine.ROOT_KEY ) this.tree = await SparseMerkleTreeImpl.create( this.db, storedRoot, this.treeHeight ) this.addressesToKeys = new Map<Address, BigNumber>() this.usedKeys = new Set<string>() this.lastOpenKey = ZERO if (!storedRoot) { log.info( `No existing state root found, starting RollupStateMachine fresh` ) return false } const [lastKeyBuffer, addressToKeyCountBuffer] = await Promise.all([ this.db.get(DefaultRollupStateMachine.LAST_OPEN_KEY), this.db.get(DefaultRollupStateMachine.ADDRESS_TO_KEYS_COUNT_KEY), ]) this.lastOpenKey = new BigNumber(lastKeyBuffer) const addressCount = parseInt(addressToKeyCountBuffer.toString(), 10) log.info( `RollupStateMachine found root ${storedRoot.toString( 'hex' )}, last open key: ${this.lastOpenKey.toString( 10 )}, and address key count: ${addressCount}. Initializing with this state.` ) const addressPromises: Array<Promise<Buffer>> = [] for (let i = 0; i < addressCount; i++) { addressPromises.push( this.db.get(DefaultRollupStateMachine.getAddressMapDBKey(i)) ) } const addressToKeysBuffers: Buffer[] = await Promise.all(addressPromises) for (const addressKeyBuf of addressToKeysBuffers) { const addressAndKey: any[] = DefaultRollupStateMachine.deserializeAddressToKeyFromDB( addressKeyBuf ) this.addressesToKeys.set(addressAndKey[0], addressAndKey[1]) } for (const key of this.addressesToKeys.values()) { this.usedKeys.add(key.toString()) if (key.gt(this.lastOpenKey)) { this.lastOpenKey = key } } return true } /** * Gets the state associated with the provided address. * * @param address The address for which state will be fetched * @returns The snapshot of the address's state */ public async getState(address: Address): Promise<StateSnapshot> { const [accountState, proof, stateRoot]: [ Buffer, MerkleTreeInclusionProof, string ] = await this.lock.acquire(DefaultRollupStateMachine.lockKey, async () => { const key: BigNumber = this.getAddressKey(address) if (!!key) { const leaf: Buffer = await this.tree.getLeaf(key) if (!!leaf) { const merkleProof: MerkleTreeInclusionProof = await this.tree.getMerkleProof( key, leaf ) return [leaf, merkleProof, merkleProof.rootHash.toString('hex')] } } return [ undefined, undefined, (await this.tree.getRootHash()).toString('hex'), ] }) let state: State let inclusionProof: InclusionProof let slotIndex: number if (!accountState) { state = undefined inclusionProof = undefined slotIndex = NON_EXISTENT_SLOT_INDEX } else { state = DefaultRollupStateMachine.deserializeState(accountState) inclusionProof = proof.siblings.map((x: Buffer) => x.toString('hex')) slotIndex = this.getAddressKey(address).toNumber() } return { slotIndex, state, stateRoot, inclusionProof, } } /** * Applies the provided transactions and returns the resulting state updates. * * @param transactions The transactions to apply * @returns The updated state */ public async applyTransactions( transactions: SignedTransaction[] ): Promise<StateUpdate[]> { return runInDomain(undefined, async () => { return this.lock.acquire(DefaultRollupStateMachine.lockKey, async () => { const stateUpdates: StateUpdate[] = [] for (const tx of transactions) { // TODO: How do we represent when some fail and some succeed, since the state will be partially updated? stateUpdates.push(await this.applyTransaction(tx)) } return stateUpdates }) }) } /** * Applies the provided SignedTransaction, returning the resulting statue update. * * @param signedTransaction The transaction to apply * @returns The updated state */ public async applyTransaction( signedTransaction: SignedTransaction ): Promise<StateUpdate> { let signer: Address log.debug(`Validating Signature: ${JSON.stringify(signedTransaction)}`) signer = this.signatureVerifier.verifyMessage( abiEncodeTransaction(signedTransaction.transaction), signedTransaction.signature ) if ( signer !== signedTransaction.transaction.sender && signer !== this.aggregatorAddress ) { log.info( `Received transaction with invalid signature: ${serializeObject( signedTransaction )}, which recovered a signer of ${signer}` ) throw new SignatureError() } return this.lock.acquire(DefaultRollupStateMachine.lockKey, async () => { const stateUpdate = { transaction: signedTransaction } const transaction: RollupTransaction = signedTransaction.transaction let updatedStates: State[] if (isTransferTransaction(transaction)) { stateUpdate['receiverCreated'] = !this.getAddressKey( transaction.recipient ) updatedStates = await this.applyTransfer(transaction) stateUpdate['receiverSlotIndex'] = this.getAddressKey( transaction.recipient ).toNumber() } else if (isSwapTransaction(transaction)) { updatedStates = await this.applySwap(signer, transaction) stateUpdate['receiverCreated'] = false stateUpdate['receiverSlotIndex'] = this.getAddressKey( UNISWAP_ADDRESS ).toNumber() } else { throw new InvalidTransactionTypeError() } const root: Buffer = await this.tree.getRootHash() await this.db.put(DefaultRollupStateMachine.ROOT_KEY, root) const senderState: State = updatedStates[0] const receiverState: State = updatedStates[1] stateUpdate['senderSlotIndex'] = this.getAddressKey( transaction.sender ).toNumber() stateUpdate['senderState'] = senderState stateUpdate['receiverState'] = receiverState const inclusionProof = async (state: State): Promise<InclusionProof> => { const proof: MerkleTreeInclusionProof = await this.tree.getMerkleProof( this.getAddressKey(state.pubkey), DefaultRollupStateMachine.serializeBalances( state.pubkey, state.balances ) ) return proof.siblings.map((p) => p.toString('hex')) } ;[ stateUpdate['senderStateInclusionProof'], stateUpdate['receiverStateInclusionProof'], ] = await Promise.all([ inclusionProof(senderState), inclusionProof(receiverState), ]) stateUpdate['stateRoot'] = (await this.tree.getRootHash()).toString('hex') return stateUpdate }) } public async getStateRoot(): Promise<Buffer> { const lockedRoot = await this.lock.acquire( DefaultRollupStateMachine.lockKey, async () => { return this.tree.getRootHash() } ) return lockedRoot } public getNextNewAccountSlot(): number { return this.lastOpenKey.toNumber() + 1 } public async getSnapshotFromSlot(key: number): Promise<StateSnapshot> { const [accountState, proof, stateRoot]: [ Buffer, MerkleTreeInclusionProof, string ] = await this.lock.acquire(DefaultRollupStateMachine.lockKey, async () => { const bigKey: BigNumber = new BigNumber(key, 10) let leaf: Buffer = await this.tree.getLeaf(bigKey) if (!leaf || leaf.equals(SparseMerkleTreeImpl.siblingBuffer)) { // if we didn't get the leaf it must be empty leaf = SparseMerkleTreeImpl.emptyBuffer } const merkleProof: MerkleTreeInclusionProof = await this.tree.getMerkleProof( bigKey, leaf ) if (!merkleProof) { const msg: string = `Unable to get merkle proof for key ${bigKey.toNumber()}. Leaf data: [${ !leaf ? 'undefined' : leaf.toString() }]` log.error(msg) throw Error(msg) } return [leaf, merkleProof, merkleProof.rootHash.toString('hex')] }) let state: State let inclusionProof: InclusionProof state = !!accountState && !accountState.equals(SparseMerkleTreeImpl.emptyBuffer) ? DefaultRollupStateMachine.deserializeState(accountState) : { pubkey: NULL_ADDRESS, balances: { [UNI_TOKEN_TYPE]: 0, [PIGI_TOKEN_TYPE]: 0 }, } inclusionProof = proof.siblings.map((x: Buffer) => x.toString('hex')) return { slotIndex: key, state, stateRoot, inclusionProof, } } /** * Gets the balances for the provided address. * * @param address The address in question * @returns The provided address's balances */ private async getBalances(address: string): Promise<Balances> { const key: BigNumber = this.getAddressKey(address) if (!!key) { const leaf: Buffer = await this.tree.getLeaf(key) if (!!leaf) { return DefaultRollupStateMachine.deserializeState(leaf).balances } } return { [UNI_TOKEN_TYPE]: 0, [PIGI_TOKEN_TYPE]: 0 } } /** * Sets the state for the provided address to be the provided balances. * * @param address The address to set * @param balances The balances state to set * @returns True if the updates succeeded, false otherwise */ private async setAddressState( address: string, balances: Balances ): Promise<boolean> { const addressKey: BigNumber = await this.getOrCreateAddressKey(address) const serializedBalances: Buffer = DefaultRollupStateMachine.serializeBalances( address, balances ) const result: boolean = await this.tree.update( addressKey, serializedBalances ) if (!result) { log.error( `ERROR UPDATING TREE, address: [${address}], key: [${addressKey}], balances: [${serializeObject( balances )}]` ) } else { log.debug( `${address} with key ${addressKey} balance updated to ${serializeObject( balances )}` ) } return result } /** * Determines whether the provided address has the provided balance of the * provided token type. * * @param address The address in question * @param tokenType The token type * @param balance The balance * @returns True if so, false otherwise */ private async hasBalance( address: Address, tokenType: TokenType, balance: number ): Promise<boolean> { // Check that the account has more than some amount of pigi/uni const balances = await this.getBalances(address) return tokenType in balances && balances[tokenType] >= balance } /** * Applies the provided Transfer transaction, returning the updated state. * * @param transfer The Transfer in question * @returns The updated balances */ private async applyTransfer(transfer: Transfer): Promise<State[]> { // Make sure the amount is above zero if (transfer.amount < 1) { throw new NegativeAmountError() } // Check that the sender has enough money if ( !(await this.hasBalance( transfer.sender, transfer.tokenType, transfer.amount )) ) { throw new InsufficientBalanceError() } const senderBalances = await this.getBalances(transfer.sender) const recipientBalances = await this.getBalances(transfer.recipient) // Update the balances senderBalances[transfer.tokenType] -= transfer.amount recipientBalances[transfer.tokenType] += transfer.amount // TODO: use batch update await Promise.all([ this.setAddressState(transfer.sender, senderBalances), this.setAddressState(transfer.recipient, recipientBalances), ]) return [ DefaultRollupStateMachine.getStateFromBalances( transfer.sender, senderBalances ), DefaultRollupStateMachine.getStateFromBalances( transfer.recipient, recipientBalances ), ] } /** * Applies the provided Swap transaction, returning the updated state. * * @param sender The sender of the Swap * @param swap The swap * @returns The updated balances */ private async applySwap(sender: Address, swap: Swap): Promise<State[]> { // Make sure the amount is above zero if (swap.inputAmount < 1) { throw new NegativeAmountError() } // Check that the sender has enough money if (!this.hasBalance(sender, swap.tokenType, swap.inputAmount)) { throw new InsufficientBalanceError() } // Check that we'll have ample time to include the swap // Set the post swap balances return this.updateBalancesFromSwap(swap, sender) } /** * Sets and returnsthe new balances for Uniswap and the provided sender * from the provided Swap. * * @param swap The swap in question * @param sender The sender of the swap transaction * @returns The resulting state */ private async updateBalancesFromSwap( swap: Swap, sender: Address ): Promise<State[]> { const uniswapBalances: Balances = await this.getBalances(UNISWAP_ADDRESS) // First let's figure out which token types are input & output const inputTokenType = swap.tokenType const outputTokenType = swap.tokenType === UNI_TOKEN_TYPE ? PIGI_TOKEN_TYPE : UNI_TOKEN_TYPE // Next let's calculate the invariant const invariant = uniswapBalances[UNI_TOKEN_TYPE] * uniswapBalances[PIGI_TOKEN_TYPE] // Now calculate the total input tokens const totalInput = this.assessSwapFee(swap.inputAmount) + uniswapBalances[inputTokenType] const newOutputBalance = Math.ceil(invariant / totalInput) const outputAmount = uniswapBalances[outputTokenType] - newOutputBalance // Let's make sure the output amount is above the minimum if (outputAmount < swap.minOutputAmount) { throw new SlippageError() } const senderBalances: Balances = await this.getBalances(sender) // Calculate the new user & swap balances senderBalances[inputTokenType] -= swap.inputAmount senderBalances[outputTokenType] += outputAmount uniswapBalances[inputTokenType] += swap.inputAmount uniswapBalances[outputTokenType] = newOutputBalance // TODO: use batch update await Promise.all([ this.setAddressState(sender, senderBalances), this.setAddressState(UNISWAP_ADDRESS, uniswapBalances), ]) return [ DefaultRollupStateMachine.getStateFromBalances(sender, senderBalances), DefaultRollupStateMachine.getStateFromBalances( UNISWAP_ADDRESS, uniswapBalances ), ] } /** * Assesses the fee charged for a swap. * * @param amountBeforeFee The amount of the swap * @return the amount, accounting for the fee */ private assessSwapFee(amountBeforeFee: number): number { if (this.swapFeeBasisPoints === 0) { return amountBeforeFee } return amountBeforeFee * ((10_000.0 - this.swapFeeBasisPoints) / 10_000.0) } private getAddressKey(address: string): BigNumber { return this.addressesToKeys.get(address) } private async getOrCreateAddressKey(address: string): Promise<BigNumber> { const existingKey: BigNumber = this.getAddressKey(address) if (!!existingKey) { return existingKey } let newKey: string = this.lastOpenKey.toString() while (this.usedKeys.has(newKey)) { this.lastOpenKey = this.lastOpenKey.add(ONE) if (this.lastOpenKey.gt(this.maxAddresses)) { throw new StateMachineCapacityError() } newKey = this.lastOpenKey.toString() } this.addressesToKeys.set(address, this.lastOpenKey) this.usedKeys.add(newKey) // Order of updates matters here, so can't parallelize await this.db.put( DefaultRollupStateMachine.getAddressMapDBKey( this.addressesToKeys.size - 1 ), DefaultRollupStateMachine.serializeAddressToKeyForDB( address, this.lastOpenKey ) ) await Promise.all([ this.db.put( DefaultRollupStateMachine.ADDRESS_TO_KEYS_COUNT_KEY, Buffer.from(this.addressesToKeys.size.toString(10)) ), this.db.put( DefaultRollupStateMachine.LAST_OPEN_KEY, this.lastOpenKey.toBuffer() ), ]) return this.addressesToKeys.get(address) } /******************** * STATIC UTILITIES * ********************/ public static serializeBalances(address: string, balances: Balances): Buffer { return Buffer.from( abiEncodeState( DefaultRollupStateMachine.getStateFromBalances(address, balances) ) ) } public static deserializeState(state: Buffer): State { return parseStateFromABI(state.toString()) } public static getStateFromBalances( pubKey: string, balances: Balances ): State { return { pubkey: pubKey, balances, } } public static getAddressMapDBKey(index: number): Buffer { return Buffer.from(`ADDR_IDX_${index}`) } public static serializeAddressToKeyForDB( address: Address, key: BigNumber ): Buffer { return Buffer.from(JSON.stringify([address, key.toString()])) } public static deserializeAddressToKeyFromDB(buf: Buffer): any[] { const parsed: any[] = JSON.parse(buf.toString()) return [parsed[0], new BigNumber(parsed[1], 'hex')] } /*********** * GETTERS * ***********/ public getLastOpenKey(): BigNumber { return this.lastOpenKey.clone() } public getUsedKeys(): Set<string> { return new Set<string>(this.usedKeys) } public getAddressesToKeys(): Map<Address, BigNumber> { return new Map<Address, BigNumber>(this.addressesToKeys) } }
the_stack
'use strict'; import * as path from 'path'; import * as fs from 'fs'; import * as _ from 'lodash'; import * as utils from './utils'; import * as cache from './cacheUtils'; import * as semver from 'semver'; import {Promise} from 'es6-promise'; export var grunt: IGrunt = require('grunt'); /////////////////////////// // Helper /////////////////////////// var executeNode: ICompilePromise; var executeNodeDefault : ICompilePromise = function(args, optionalInfo) { return new Promise((resolve, reject) => { grunt.util.spawn({ cmd: process.execPath, args: args }, (error, result, code) => { var ret: ICompileResult = { code: code, // New TypeScript compiler uses stdout for user code errors. Old one used stderr. output: result.stdout || result.stderr }; resolve(ret); }); }); }; ///////////////////////////////////////////////////////////////// // Fast Compilation ///////////////////////////////////////////////////////////////// // Map to store if the cache was cleared after the gruntfile was parsed var cacheClearedOnce: { [targetName: string]: boolean } = {}; function getChangedFiles(files, targetName: string, cacheDir: string, verbose: boolean) { files = cache.getNewFilesForTarget(files, targetName, cacheDir); if (verbose) { _.forEach(files, (file) => { grunt.log.writeln(('### Fast Compile >>' + file).cyan); } ); } return files; } function resetChangedFiles(files, targetName: string, cacheDir: string) { cache.compileSuccessfull(files, targetName, cacheDir); } function clearCache(targetName: string, cacheDir: string) { cache.clearCache(targetName, cacheDir); cacheClearedOnce[targetName] = true; } ///////////////////////////////////////////////////////////////////// // tsc handling //////////////////////////////////////////////////////////////////// function resolveTypeScriptBinPath(): string { var ownRoot = path.resolve(path.dirname(module.filename), '../..'); var userRoot = path.resolve(ownRoot, '..', '..'); var binSub = path.join('node_modules', 'typescript', 'bin'); if (fs.existsSync(path.join(userRoot, binSub))) { // Using project override return path.join(userRoot, binSub); } return path.join(ownRoot, binSub); } function getTsc(binPath: string): string { return path.join(binPath, 'tsc'); } export function compileResultMeansFastCacheShouldBeRefreshed(options: Partial<IGruntTSOptions>, result: ICompileResult) { return (options.fast !== 'never' && (result.code === 0 || (result.code === 2 && !options.failOnTypeErrors))); } export function compileAllFiles(options: Partial<IGruntTSOptions>, compilationInfo: IGruntTSCompilationInfo): Promise<ICompileResult> { let targetFiles: string[] = compilationInfo.src; // Make a local copy so we can modify files without having external side effects let files = _.map(targetFiles, file => file); var newFiles: string[] = files; if (options.fast === 'watch') { // if we only do fast compile if target is watched // if this is the first time its running after this file was loaded if (cacheClearedOnce[grunt.task.current.target] === undefined) { // Then clear the cache for this target clearCache(options.targetName, options.tsCacheDir); } } if (options.fast !== 'never') { if (compilationInfo.out) { grunt.log.writeln('Fast compile will not work when --out is specified. Ignoring fast compilation'.cyan); } else { newFiles = getChangedFiles(files, options.targetName, options.tsCacheDir, options.verbose); if (newFiles.length !== 0 || options.testExecute || utils.shouldPassThrough(options)) { if (options.forceCompileRegex) { const regex = new RegExp(options.forceCompileRegex); // Finds all force compile files const additionalFiles = files.filter((file) => { return regex.test(file); }); // Adds them to newFiles and unique the array newFiles = newFiles.concat(additionalFiles).filter((value, index, self) => { return self.indexOf(value) === index; }); } files = newFiles; // If outDir is specified but no baseDir is specified we need to determine one if (compilationInfo.outDir && !options.baseDir) { options.baseDir = utils.findCommonPath(files, '/'); } } else { grunt.log.writeln('No file changes were detected. Skipping Compile'.green); return new Promise((resolve) => { var ret: ICompileResult = { code: 0, fileCount: 0, output: 'No files compiled as no change detected' }; resolve(ret); }); } } } const tsconfig = <ITSConfigSupport>options.tsconfig; let tsc: string, tscVersion: string = ''; if (options.compiler) { // Custom compiler (task.compiler) grunt.log.writeln('Using the custom compiler : ' + options.compiler); tsc = options.compiler; tscVersion = ''; } else { // the bundled OR npm module based compiler const tscPath = resolveTypeScriptBinPath(); tsc = getTsc(tscPath); tscVersion = getTscVersion(tscPath); grunt.log.writeln('Using tsc v' + tscVersion); } // If baseDir is specified create a temp tsc file to make sure that `--outDir` works fine // see https://github.com/grunt-ts/grunt-ts/issues/77 if (compilationInfo.outDir && options.baseDir && files.length > 0 && !options.rootDir) { const baseDirFile: string = '.baseDir.ts', baseDirFilePath = path.join(options.baseDir, baseDirFile), settingsSource = !!tsconfig ? 'tsconfig.json' : 'Gruntfile ts `options`', settingsSection = !!tsconfig ? 'in the `compilerOptions` section' : 'under the task or ' + 'target `options` object'; if (!fs.existsSync(baseDirFilePath)) { const baseDir_Message = `// grunt-ts creates this file to help TypeScript find ` + `the compilation root of your project. If you wish to get to stop creating ` + `it, specify a \`rootDir\` setting in the ${settingsSource}. See ` + `https://github.com/TypeStrong/grunt-ts#rootdir for details. Note that ` + `\`rootDir\` goes under \`options\`, and is case-sensitive. This message ` + `was revised in grunt-ts v6. Note that \`rootDir\` requires TypeScript 1.5 ` + ` or higher.`; grunt.file.write(baseDirFilePath, baseDir_Message); } if (tscVersion && semver.satisfies(tscVersion, '>=1.5.0')) { grunt.log.warn((`Warning: created ${baseDirFilePath} file because \`outDir\` was ` + `specified in the ${settingsSource}, but not \`rootDir\`. Add \`rootDir\` ` + ` ${settingsSection} to fix this warning.`).magenta); } files.push(baseDirFilePath); } // If reference and out are both specified. // Then only compile the updated reference file as that contains the correct order if (options.reference && compilationInfo.out) { var referenceFile = path.resolve(options.reference); files = [referenceFile]; } // Quote the files to compile. Needed for command line parsing by tsc files = _.map(files, item => utils.possiblyQuotedRelativePath(item)); let args: string[] = files.slice(0); grunt.log.verbose.writeln(`TypeScript path: ${tsc}`); if (tsconfig && tsconfig.passThrough) { args.push('--project', tsconfig.tsconfig); } else { if (options.sourceMap) { args.push('--sourcemap'); } if (options.emitDecoratorMetadata) { args.push('--emitDecoratorMetadata'); } if (options.declaration) { args.push('--declaration'); } if (options.removeComments) { args.push('--removeComments'); } if (options.noImplicitAny) { args.push('--noImplicitAny'); } if (options.noResolve) { args.push('--noResolve'); } if (options.noStrictGenericChecks) { args.push('--noStrictGenericChecks'); } if (options.noEmitOnError) { args.push('--noEmitOnError'); } if (options.preserveConstEnums) { args.push('--preserveConstEnums'); } if (options.preserveSymlinks) { args.push('--preserveSymlinks'); } if (options.suppressImplicitAnyIndexErrors) { args.push('--suppressImplicitAnyIndexErrors'); } if (options.noEmit) { args.push('--noEmit'); } if (options.inlineSources) { args.push('--inlineSources'); } if (options.inlineSourceMap) { args.push('--inlineSourceMap'); } if (options.newLine && !utils.newLineIsRedundantForTsc(options.newLine)) { args.push('--newLine', options.newLine); } if (options.isolatedModules) { args.push('--isolatedModules'); } if (options.noEmitHelpers) { args.push('--noEmitHelpers'); } if (options.experimentalDecorators) { args.push('--experimentalDecorators'); } if (options.experimentalAsyncFunctions) { args.push('--experimentalAsyncFunctions'); } if (options.jsx) { args.push('--jsx', options.jsx.toLocaleLowerCase()); } if (options.moduleResolution) { args.push('--moduleResolution', options.moduleResolution.toLocaleLowerCase()); } if (options.rootDir) { args.push('--rootDir', options.rootDir); } if (options.noLib) { args.push('--noLib'); } if (options.emitBOM) { args.push('--emitBOM'); } if (options.locale) { args.push('--locale', options.locale); } if (options.suppressExcessPropertyErrors) { args.push('--suppressExcessPropertyErrors'); } if (options.stripInternal) { args.push('--stripInternal'); } if (options.allowSyntheticDefaultImports) { args.push('--allowSyntheticDefaultImports'); } if (options.reactNamespace) { args.push('--reactNamespace', options.reactNamespace); } if (options.skipLibCheck) { args.push('--skipLibCheck'); } if (options.skipDefaultLibCheck) { args.push('--skipDefaultLibCheck'); } if (options.pretty) { args.push('--pretty'); } if (options.allowUnusedLabels) { args.push('--allowUnusedLabels'); } if (options.noImplicitReturns) { args.push('--noImplicitReturns'); } if (options.noFallthroughCasesInSwitch) { args.push('--noFallthroughCasesInSwitch'); } if (options.allowUnreachableCode) { args.push('--allowUnreachableCode'); } if (options.forceConsistentCasingInFileNames) { args.push('--forceConsistentCasingInFileNames'); } if (options.allowJs) { args.push('--allowJs'); } if (options.checkJs) { args.push('--checkJs'); } if (options.noImplicitUseStrict) { args.push('--noImplicitUseStrict'); } if (options.alwaysStrict) { args.push('--alwaysStrict'); } if (options.diagnostics) { args.push('--diagnostics'); } if (options.importHelpers) { args.push('--importHelpers'); } if (options.listFiles) { args.push('--listFiles'); } if (options.listEmittedFiles) { args.push('--listEmittedFiles'); } if (options.noImplicitThis) { args.push('--noImplicitThis'); } if (options.noUnusedLocals) { args.push('--noUnusedLocals'); } if (options.noUnusedParameters) { args.push('--noUnusedParameters'); } if (options.strictFunctionTypes) { args.push('--strictFunctionTypes'); } if (options.esModuleInterop) { args.push('--esModuleInterop'); } if (options.strictPropertyInitialization) { args.push('--strictPropertyInitialization'); } if (options.strictNullChecks) { args.push('--strictNullChecks'); } if (options.traceResolution) { args.push('--traceResolution'); } if (options.baseUrl) { args.push('--baseUrl', utils.enclosePathInQuotesIfRequired(options.baseUrl)); } if (options.charset) { args.push('--charset', options.charset); } if (options.declarationDir) { args.push('--declarationDir', utils.possiblyQuotedRelativePath(options.declarationDir)); } if (options.jsxFactory) { args.push('--jsxFactory', options.jsxFactory); } if (options.lib) { let possibleOptions = [ 'es5', 'es6', 'es2015', 'es7', 'es2016', 'es2017', 'esnext', 'dom', 'dom.iterable', 'webworker', 'scripthost', 'es2015.core', 'es2015.collection', 'es2015.generator', 'es2015.iterable', 'es2015.promise', 'es2015.proxy', 'es2015.reflect', 'es2015.symbol', 'es2015.symbol.wellknown', 'es2016.array.include', 'es2017.object', 'es2017.sharedmemory', 'esnext.asynciterable' ]; options.lib.forEach(option => { if (possibleOptions.indexOf((option + '').toLocaleLowerCase()) === -1) { grunt.log.warn(`WARNING: Option "lib" does not support ${option} `.magenta); } }); args.push('--lib', options.lib.join(',')); } if (options.maxNodeModuleJsDepth > 0 || options.maxNodeModuleJsDepth === 0) { args.push('--maxNodeModuleJsDepth', options.maxNodeModuleJsDepth + ''); } if (options.types) { args.push('--types', `"${_.map(options.types, t => utils.stripQuotesIfQuoted(t.trim())).join(',')}"`); } if (options.typeRoots) { args.push('--typeRoots', `"${_.map(options.typeRoots, t => utils.stripQuotesIfQuoted(t.trim())).join(',')}"`); } if (options.downlevelIteration) { args.push('--downlevelIteration'); } if (options.disableSizeLimit) { args.push('--disableSizeLimit'); } if (options.strict) { args.push('--strict'); } args.push('--target', options.target.toUpperCase()); if (options.module) { const moduleOptionString: string = ('' + options.module).toLowerCase(); if ('none|amd|commonjs|system|umd|es6|es2015|esnext'.indexOf(moduleOptionString) > -1) { args.push('--module', moduleOptionString); } else { console.warn(('WARNING: Option "module" only supports "none" | "amd" | "commonjs" |' + ' "system" | "umd" | "es6" | "es2015" | "esnext" ').magenta); } } if (compilationInfo.outDir) { if (compilationInfo.out) { console.warn('WARNING: Option "out" and "outDir" should not be used together'.magenta); } args.push('--outDir', compilationInfo.outDir); } if (compilationInfo.out) { // We only pass --out instead of --outFile for backward-compatability reasons. // It is the same for purposes of the command-line (the subtle difference is handled in the tsconfig code // and the value of --outFile is copied to --out). args.push('--out', compilationInfo.out); } if (compilationInfo.dest && (!compilationInfo.out) && (!compilationInfo.outDir)) { if (utils.isJavaScriptFile(compilationInfo.dest)) { args.push('--out', compilationInfo.dest); } else { if (compilationInfo.dest === 'src') { console.warn(('WARNING: Destination for target "' + options.targetName + '" is "src", which is the default. If you have' + ' forgotten to specify a "dest" parameter, please add it. If this is correct, you may wish' + ' to change the "dest" parameter to "src/" or just ignore this warning.').magenta); } if (Array.isArray(compilationInfo.dest)) { if ((<string[]><any>compilationInfo.dest).length === 0) { // ignore it and do nothing. } else if ((<string[]><any>compilationInfo.dest).length > 0) { console.warn((('WARNING: "dest" for target "' + options.targetName + '" is an array. This is not supported by the' + ' TypeScript compiler or grunt-ts.' + (((<string[]><any>compilationInfo.dest).length > 1) ? ' Only the first "dest" will be used. The' + ' remaining items will be truncated.' : ''))).magenta); args.push('--outDir', (<string[]><any>compilationInfo.dest)[0]); } } else { args.push('--outDir', compilationInfo.dest); } } } if (args.indexOf('--out') > -1 && args.indexOf('--module') > -1) { if (tscVersion === '' && options.compiler) { // don't warn if they are using a custom compiler. } else if (semver.satisfies(tscVersion, '>=1.8.0')) { if ((options.module === 'system' || options.module === 'amd')) { // this is fine. } else { console.warn(('WARNING: TypeScript 1.8+ requires "module" to be set to' + 'system or amd for concatenation of external modules to work.').magenta); } } else { console.warn(('WARNING: TypeScript < 1.8 does not allow external modules to be concatenated with' + ' --out. Any exported code may be truncated. See TypeScript issue #1544 for' + ' more details.').magenta); } } if (options.sourceRoot) { args.push('--sourceRoot', options.sourceRoot); } if (options.mapRoot) { args.push('--mapRoot', options.mapRoot); } } if (options.additionalFlags) { args.push(options.additionalFlags); } /** Reads the tsc version from the package.json of the relevant TypeScript install */ function getTscVersion(tscPath: string) { const pkg = JSON.parse(fs.readFileSync(path.resolve(tscPath, '..', 'package.json')).toString()); return '' + pkg.version; } // To debug the tsc command if (options.verbose) { console.log(args.join(' ').yellow); } else { grunt.log.verbose.writeln(args.join(' ').yellow); } // Create a temp last command file and use that to guide tsc. // Reason: passing all the files on the command line causes TSC to go in an infinite loop. let tempfilename = utils.getTempFile('tscommand'); if (!tempfilename) { throw (new Error('cannot create temp file')); } fs.writeFileSync(tempfilename, args.join(' ')); let command: string[]; // Switch implementation if a test version of executeNode exists. if ('testExecute' in options) { if (_.isFunction(options.testExecute)) { command = [tsc, args.join(' ')]; executeNode = options.testExecute; } else { const invalidTestExecuteError = 'Invalid testExecute node present on target "' + options.targetName + '". Value of testExecute must be a function.'; throw (new Error(invalidTestExecuteError)); } } else { // this is the normal path. command = [tsc, '@' + tempfilename]; executeNode = executeNodeDefault; } // Execute command return executeNode(command, options).then((result: ICompileResult) => { if (compileResultMeansFastCacheShouldBeRefreshed(options, result)) { resetChangedFiles(newFiles, options.targetName, options.tsCacheDir); } result.fileCount = files.length; fs.unlinkSync(tempfilename); grunt.log.writeln(result.output); return (<any>Promise).cast(result); }, (err) => { fs.unlinkSync(tempfilename); throw err; }); }
the_stack
import * as assert from "assert"; import { Aborter, AccountSASPermissions, AccountSASResourceTypes, AccountSASServices, AnonymousCredential, BlobSASPermissions, ContainerSASPermissions, ContainerURL, generateAccountSASQueryParameters, generateBlobSASQueryParameters, PageBlobURL, ServiceURL, SharedKeyCredential, StorageURL } from "../../lib"; import { SASProtocol } from "../../lib/SASQueryParameters"; import { getBSU, getUniqueName } from "../utils"; describe("Shared Access Signature (SAS) generation Node.js only", () => { const serviceURL = getBSU(); it("generateAccountSASQueryParameters should work", async () => { const now = new Date(); now.setMinutes(now.getMinutes() - 5); // Skip clock skew with server const tmr = new Date(); tmr.setDate(tmr.getDate() + 1); // By default, credential is always the last element of pipeline factories const factories = serviceURL.pipeline.factories; const sharedKeyCredential = factories[factories.length - 1]; const sas = generateAccountSASQueryParameters( { expiryTime: tmr, ipRange: { start: "0.0.0.0", end: "255.255.255.255" }, permissions: AccountSASPermissions.parse("rwdlacup").toString(), protocol: SASProtocol.HTTPSandHTTP, resourceTypes: AccountSASResourceTypes.parse("sco").toString(), services: AccountSASServices.parse("btqf").toString(), startTime: now, version: "2016-05-31" }, sharedKeyCredential as SharedKeyCredential ).toString(); const sasURL = `${serviceURL.url}?${sas}`; const serviceURLWithSAS = new ServiceURL( sasURL, StorageURL.newPipeline(new AnonymousCredential()) ); await serviceURLWithSAS.getAccountInfo(Aborter.none); }); it("generateAccountSASQueryParameters should not work with invalid permission", async () => { const tmr = new Date(); tmr.setDate(tmr.getDate() + 1); // By default, credential is always the last element of pipeline factories const factories = serviceURL.pipeline.factories; const sharedKeyCredential = factories[factories.length - 1]; const sas = generateAccountSASQueryParameters( { expiryTime: tmr, permissions: AccountSASPermissions.parse("wdlcup").toString(), resourceTypes: AccountSASResourceTypes.parse("sco").toString(), services: AccountSASServices.parse("btqf").toString() }, sharedKeyCredential as SharedKeyCredential ).toString(); const sasURL = `${serviceURL.url}?${sas}`; const serviceURLWithSAS = new ServiceURL( sasURL, StorageURL.newPipeline(new AnonymousCredential()) ); let error; try { await serviceURLWithSAS.getProperties(Aborter.none); } catch (err) { error = err; } assert.ok(error); }); it("generateAccountSASQueryParameters should not work with invalid service", async () => { const tmr = new Date(); tmr.setDate(tmr.getDate() + 1); // By default, credential is always the last element of pipeline factories const factories = serviceURL.pipeline.factories; const sharedKeyCredential = factories[factories.length - 1]; const sas = generateAccountSASQueryParameters( { expiryTime: tmr, permissions: AccountSASPermissions.parse("rwdlacup").toString(), resourceTypes: AccountSASResourceTypes.parse("sco").toString(), services: AccountSASServices.parse("tqf").toString() }, sharedKeyCredential as SharedKeyCredential ).toString(); const sasURL = `${serviceURL.url}?${sas}`; const serviceURLWithSAS = new ServiceURL( sasURL, StorageURL.newPipeline(new AnonymousCredential()) ); let error; try { await serviceURLWithSAS.getProperties(Aborter.none); } catch (err) { error = err; } assert.ok(error); }); it("generateAccountSASQueryParameters should not work with invalid resource type", async () => { const tmr = new Date(); tmr.setDate(tmr.getDate() + 1); // By default, credential is always the last element of pipeline factories const factories = serviceURL.pipeline.factories; const sharedKeyCredential = factories[factories.length - 1]; const sas = generateAccountSASQueryParameters( { expiryTime: tmr, ipRange: { start: "0.0.0.0", end: "255.255.255.255" }, permissions: AccountSASPermissions.parse("rwdlacup").toString(), protocol: SASProtocol.HTTPSandHTTP, resourceTypes: AccountSASResourceTypes.parse("co").toString(), services: AccountSASServices.parse("btqf").toString(), version: "2016-05-31" }, sharedKeyCredential as SharedKeyCredential ).toString(); const sasURL = `${serviceURL.url}?${sas}`; const serviceURLWithSAS = new ServiceURL( sasURL, StorageURL.newPipeline(new AnonymousCredential()) ); let error; try { await serviceURLWithSAS.getProperties(Aborter.none); } catch (err) { error = err; } assert.ok(error); }); it("generateBlobSASQueryParameters should work for container", async () => { const now = new Date(); now.setMinutes(now.getMinutes() - 5); // Skip clock skew with server const tmr = new Date(); tmr.setDate(tmr.getDate() + 1); // By default, credential is always the last element of pipeline factories const factories = serviceURL.pipeline.factories; const sharedKeyCredential = factories[factories.length - 1]; const containerName = getUniqueName("container"); const containerURL = ContainerURL.fromServiceURL(serviceURL, containerName); await containerURL.create(Aborter.none); const containerSAS = generateBlobSASQueryParameters( { containerName, expiryTime: tmr, ipRange: { start: "0.0.0.0", end: "255.255.255.255" }, permissions: ContainerSASPermissions.parse("racwdl").toString(), protocol: SASProtocol.HTTPSandHTTP, startTime: now, version: "2016-05-31" }, sharedKeyCredential as SharedKeyCredential ); const sasURL = `${containerURL.url}?${containerSAS}`; const containerURLwithSAS = new ContainerURL( sasURL, StorageURL.newPipeline(new AnonymousCredential()) ); await containerURLwithSAS.listBlobFlatSegment(Aborter.none); await containerURL.delete(Aborter.none); }); it("generateBlobSASQueryParameters should work for blob", async () => { const now = new Date(); now.setMinutes(now.getMinutes() - 5); // Skip clock skew with server const tmr = new Date(); tmr.setDate(tmr.getDate() + 1); // By default, credential is always the last element of pipeline factories const factories = serviceURL.pipeline.factories; const sharedKeyCredential = factories[factories.length - 1]; const containerName = getUniqueName("container"); const containerURL = ContainerURL.fromServiceURL(serviceURL, containerName); await containerURL.create(Aborter.none); const blobName = getUniqueName("blob"); const blobURL = PageBlobURL.fromContainerURL(containerURL, blobName); await blobURL.create(Aborter.none, 1024, { blobHTTPHeaders: { blobContentType: "content-type-original" } }); const blobSAS = generateBlobSASQueryParameters( { blobName, cacheControl: "cache-control-override", containerName, contentDisposition: "content-disposition-override", contentEncoding: "content-encoding-override", contentLanguage: "content-language-override", contentType: "content-type-override", expiryTime: tmr, ipRange: { start: "0.0.0.0", end: "255.255.255.255" }, permissions: BlobSASPermissions.parse("racwd").toString(), protocol: SASProtocol.HTTPSandHTTP, startTime: now, version: "2016-05-31" }, sharedKeyCredential as SharedKeyCredential ); const sasURL = `${blobURL.url}?${blobSAS}`; const blobURLwithSAS = new PageBlobURL( sasURL, StorageURL.newPipeline(new AnonymousCredential()) ); const properties = await blobURLwithSAS.getProperties(Aborter.none); assert.equal(properties.cacheControl, "cache-control-override"); assert.equal(properties.contentDisposition, "content-disposition-override"); assert.equal(properties.contentEncoding, "content-encoding-override"); assert.equal(properties.contentLanguage, "content-language-override"); assert.equal(properties.contentType, "content-type-override"); await containerURL.delete(Aborter.none); }); it("generateBlobSASQueryParameters should work for blob with special namings", async () => { const now = new Date(); now.setMinutes(now.getMinutes() - 5); // Skip clock skew with server const tmr = new Date(); tmr.setDate(tmr.getDate() + 1); // By default, credential is always the last element of pipeline factories const factories = serviceURL.pipeline.factories; const sharedKeyCredential = factories[factories.length - 1]; const containerName = getUniqueName("container-with-dash"); const containerURL = ContainerURL.fromServiceURL(serviceURL, containerName); await containerURL.create(Aborter.none); const blobName = getUniqueName( "////Upper/blob/empty /another 汉字 ру́сский язы́к ру́сский язы́к عربي/عربى にっぽんご/にほんご . special ~!@#$%^&*()_+`1234567890-={}|[]\\:\";'<>?,/'" ); const blobURL = PageBlobURL.fromContainerURL(containerURL, blobName); await blobURL.create(Aborter.none, 1024, { blobHTTPHeaders: { blobContentType: "content-type-original" } }); const blobSAS = generateBlobSASQueryParameters( { // NOTICE: Azure Storage Server will replace "\" with "/" in the blob names blobName: blobName.replace(/\\/g, "/"), cacheControl: "cache-control-override", containerName, contentDisposition: "content-disposition-override", contentEncoding: "content-encoding-override", contentLanguage: "content-language-override", contentType: "content-type-override", expiryTime: tmr, ipRange: { start: "0.0.0.0", end: "255.255.255.255" }, permissions: BlobSASPermissions.parse("racwd").toString(), protocol: SASProtocol.HTTPSandHTTP, startTime: now, version: "2016-05-31" }, sharedKeyCredential as SharedKeyCredential ); const sasURL = `${blobURL.url}?${blobSAS}`; const blobURLwithSAS = new PageBlobURL( sasURL, StorageURL.newPipeline(new AnonymousCredential()) ); const properties = await blobURLwithSAS.getProperties(Aborter.none); assert.equal(properties.cacheControl, "cache-control-override"); assert.equal(properties.contentDisposition, "content-disposition-override"); assert.equal(properties.contentEncoding, "content-encoding-override"); assert.equal(properties.contentLanguage, "content-language-override"); assert.equal(properties.contentType, "content-type-override"); await containerURL.delete(Aborter.none); }); it("generateBlobSASQueryParameters should work for blob with access policy", async () => { const now = new Date(); now.setMinutes(now.getMinutes() - 5); // Skip clock skew with server const tmr = new Date(); tmr.setDate(tmr.getDate() + 1); // By default, credential is always the last element of pipeline factories const factories = serviceURL.pipeline.factories; const sharedKeyCredential = factories[factories.length - 1]; const containerName = getUniqueName("container"); const containerURL = ContainerURL.fromServiceURL(serviceURL, containerName); await containerURL.create(Aborter.none); const blobName = getUniqueName("blob"); const blobURL = PageBlobURL.fromContainerURL(containerURL, blobName); await blobURL.create(Aborter.none, 1024); const id = "unique-id"; await containerURL.setAccessPolicy(Aborter.none, undefined, [ { accessPolicy: { expiry: tmr, permission: ContainerSASPermissions.parse("racwdl").toString(), start: now }, id } ]); const blobSAS = generateBlobSASQueryParameters( { containerName, identifier: id }, sharedKeyCredential as SharedKeyCredential ); const sasURL = `${blobURL.url}?${blobSAS}`; const blobURLwithSAS = new PageBlobURL( sasURL, StorageURL.newPipeline(new AnonymousCredential()) ); await blobURLwithSAS.getProperties(Aborter.none); await containerURL.delete(Aborter.none); }); });
the_stack
"use strict"; import { tokenize } from "protobufjs"; import vscode = require("vscode"); import { SyntacticGuessScope } from "./proto3SyntacticScopeGuesser"; let kwSyntax = createCompletionKeyword("syntax"); let kwPackage = createCompletionKeyword("package"); let kwOption = createCompletionKeyword("option"); let kwImport = createCompletionKeyword("import"); let kwMessage = createCompletionKeyword("message"); let kwEnum = createCompletionKeyword("enum"); let kwReserved = createCompletionKeyword("reserved"); let kwRpc = createCompletionKeyword("rpc"); let fileOptions = [ createCompletionOption( "java_package", ` Sets the Java package where classes generated from this .proto will be placed. By default, the proto package is used, but this is often inappropriate because proto packages do not normally start with backwards domain names. ` ), createCompletionOption( "java_outer_classname", ` If set, all the classes from the .proto file are wrapped in a single outer class with the given name. This applies to both Proto1 (equivalent to the old "--one_java_file" option) and Proto2 (where a .proto always translates to a single class, but you may want to explicitly choose the class name). ` ), createCompletionOption( "java_multiple_files", ` If set true, then the Java code generator will generate a separate .java file for each top-level message, enum, and service defined in the .proto file. Thus, these types will *not* be nested inside the outer class named by java_outer_classname. However, the outer class will still be generated to contain the file's getDescriptor() method as well as any top-level extensions defined in the file. ` ), createCompletionOption( "java_generate_equals_and_hash", ` If set true, then the Java code generator will generate equals() and hashCode() methods for all messages defined in the .proto file. This increases generated code size, potentially substantially for large protos, which may harm a memory-constrained application. - In the full runtime this is a speed optimization, as the AbstractMessage base class includes reflection-based implementations of these methods. - In the lite runtime, setting this option changes the semantics of equals() and hashCode() to more closely match those of the full runtime; the generated methods compute their results based on field values rather than object identity. (Implementations should not assume that hashcodes will be consistent across runtimes or versions of the protocol compiler.) ` ), createCompletionOption( "java_string_check_utf8", ` If set true, then the Java2 code generator will generate code that throws an exception whenever an attempt is made to assign a non-UTF-8 byte sequence to a string field. Message reflection will do the same. However, an extension field still accepts non-UTF-8 byte sequences. This option has no effect on when used with the lite runtime. ` ), createCompletionOption( "optimize_for", ` Generated classes can be optimized for speed or code size. ` ), createCompletionOption( "go_package", ` Sets the Go package where structs generated from this .proto will be placed. If omitted, the Go package will be derived from the following: - The basename of the package import path, if provided. - Otherwise, the package statement in the .proto file, if present. - Otherwise, the basename of the .proto file, without extension. ` ), //createCompletionOption('cc_generic_services'), //createCompletionOption('java_generic_services'), //createCompletionOption('py_generic_services'), createCompletionOption( "deprecated", ` Is this file deprecated? Depending on the target platform, this can emit Deprecated annotations for everything in the file, or it will be completely ignored; in the very least, this is a formalization for deprecating files. ` ), createCompletionOption( "cc_enable_arenas", ` Enables the use of arenas for the proto messages in this file. This applies only to generated classes for C++. ` ), createCompletionOption( "objc_class_prefix", ` Sets the objective c class prefix which is prepended to all objective c generated classes from this .proto. There is no default. ` ), createCompletionOption( "csharp_namespace", ` Namespace for generated classes; defaults to the package. ` ), ]; let msgOptions = [ createCompletionOption( "message_set_wire_format", ` Set true to use the old proto1 MessageSet wire format for extensions. This is provided for backwards-compatibility with the MessageSet wire format. You should not use this for any other reason: It's less efficient, has fewer features, and is more complicated. ` ), createCompletionOption( "no_standard_descriptor_accessor", ` Disables the generation of the standard "descriptor()" accessor, which can conflict with a field of the same name. This is meant to make migration from proto1 easier; new code should avoid fields named "descriptor". ` ), createCompletionOption( "deprecated", ` Is this message deprecated? Depending on the target platform, this can emit Deprecated annotations for the message, or it will be completely ignored; in the very least, this is a formalization for deprecating messages. ` ), //createCompletionOption('map_entry', ``), ]; let fieldOptions = [ //createCompletionOption('ctype', ``), createCompletionOption( "packed", ` The packed option can be enabled for repeated primitive fields to enable a more efficient representation on the wire. Rather than repeatedly writing the tag and type for each element, the entire array is encoded as a single length-delimited blob. In proto3, only explicit setting it to false will avoid using packed encoding. ` ), createCompletionOption( "jstype", ` The jstype option determines the JavaScript type used for values of the field. The option is permitted only for 64 bit integral and fixed types (int64, uint64, sint64, fixed64, sfixed64). By default these types are represented as JavaScript strings. This avoids loss of precision that can happen when a large value is converted to a floating point JavaScript numbers. Specifying JS_NUMBER for the jstype causes the generated JavaScript code to use the JavaScript "number" type instead of strings. This option is an enum to permit additional types to be added, e.g. goog.math.Integer. ` ), createCompletionOption( "lazy", ` Should this field be parsed lazily? Lazy applies only to message-type fields. It means that when the outer message is initially parsed, the inner message's contents will not be parsed but instead stored in encoded form. The inner message will actually be parsed when it is first accessed. This is only a hint. Implementations are free to choose whether to use eager or lazy parsing regardless of the value of this option. However, setting this option true suggests that the protocol author believes that using lazy parsing on this field is worth the additional bookkeeping overhead typically needed to implement it. This option does not affect the public interface of any generated code; all method signatures remain the same. Furthermore, thread-safety of the interface is not affected by this option; const methods remain safe to call from multiple threads concurrently, while non-const methods continue to require exclusive access. Note that implementations may choose not to check required fields within a lazy sub-message. That is, calling IsInitialized() on the outher message may return true even if the inner message has missing required fields. This is necessary because otherwise the inner message would have to be parsed in order to perform the check, defeating the purpose of lazy parsing. An implementation which chooses not to check required fields must be consistent about it. That is, for any particular sub-message, the implementation must either *always* check its required fields, or *never* check its required fields, regardless of whether or not the message has been parsed. ` ), createCompletionOption( "deprecated", ` Is this field deprecated? Depending on the target platform, this can emit Deprecated annotations for accessors, or it will be completely ignored; in the very least, this is a formalization for deprecating fields. ` ), ]; let fieldDefault = createCompletionOption("default", ``); let enumOptions = [ createCompletionOption( "allow_alias", ` Set this option to true to allow mapping different tag names to the same value. ` ), createCompletionOption( "deprecated", ` Is this enum deprecated? Depending on the target platform, this can emit Deprecated annotations for the enum, or it will be completely ignored; in the very least, this is a formalization for deprecating enums. ` ), ]; let enumValueOptions = [ createCompletionOption( "deprecated", ` Is this enum value deprecated? Depending on the target platform, this can emit Deprecated annotations for the enum value, or it will be completely ignored; in the very least, this is a formalization for deprecating enum values. ` ), ]; let serviceOptions = [ createCompletionOption( "deprecated", ` Is this service deprecated? Depending on the target platform, this can emit Deprecated annotations for the service, or it will be completely ignored; in the very least, this is a formalization for deprecating services. ` ), ]; let fieldRules = [ createCompletionKeyword("repeated"), createCompletionKeyword("required"), createCompletionKeyword("optional"), ]; let scalarTypes = [ createCompletionKeyword("bool", ``), createCompletionKeyword( "int32", ` Uses variable-length encoding. Inefficient for encoding negative numbers – if your field is likely to have negative values, use sint32 instead.` ), createCompletionKeyword( "int64", ` Uses variable-length encoding. Inefficient for encoding negative numbers – if your field is likely to have negative values, use sint64 instead. ` ), createCompletionKeyword("uint32", `Uses variable-length encoding.`), createCompletionKeyword("uint64", `Uses variable-length encoding.`), createCompletionKeyword( "sint32", ` Uses variable-length encoding. Signed int value. These more efficiently encode negative numbers than regular int32s. ` ), createCompletionKeyword( "sint64", ` Uses variable-length encoding. Signed int value. These more efficiently encode negative numbers than regular int64s. ` ), createCompletionKeyword( "fixed32", ` Always four bytes. More efficient than uint32 if values are often greater than 2^28. ` ), createCompletionKeyword( "fixed64", ` Always eight bytes. More efficient than uint64 if values are often greater than 2^56. ` ), createCompletionKeyword("sfixed32", `Always four bytes.`), createCompletionKeyword("sfixed64", `Always eight bytes.`), createCompletionKeyword("float", ``), createCompletionKeyword("double", ``), createCompletionKeyword( "string", ` A string must always contain UTF-8 encoded or 7-bit ASCII text. ` ), createCompletionKeyword( "bytes", ` May contain any arbitrary sequence of bytes. ` ), ]; function createCompletionKeyword(label: string, doc?: string): vscode.CompletionItem { let item = new vscode.CompletionItem(label); item.kind = vscode.CompletionItemKind.Keyword; if (doc) { item.documentation = doc; } return item; } function createCompletionOption(option: string, doc: string): vscode.CompletionItem { let item = new vscode.CompletionItem(option); item.kind = vscode.CompletionItemKind.Value; item.documentation = doc; return item; } // not very efficiently. function findMessageEnum(document: vscode.TextDocument): Record<"message" | "enum", vscode.CompletionItem[]> { const msgCompletionItems: vscode.CompletionItem[] = []; const enumCompletionItems: vscode.CompletionItem[] = []; const tokenizer = tokenize(document.getText(), false); for (let tok = tokenizer.next(); tok !== null; tok = tokenizer.next()) { if (tok === "message" || tok === "enum") { // find identifiers after `message` keyword const identifier = tokenizer.peek(); if (identifier !== null && /^[a-zA-Z_]+\w*$/.test(identifier)) { if (tok === "message") { const item = new vscode.CompletionItem(identifier, vscode.CompletionItemKind.Struct); item.detail = "message " + identifier; // should extract message declaration and comments here msgCompletionItems.push(item); } else { const item = new vscode.CompletionItem(identifier, vscode.CompletionItemKind.Enum); item.detail = "enum " + identifier; enumCompletionItems.push(item); } } } } return { message: msgCompletionItems, enum: enumCompletionItems, }; } export class Proto3CompletionItemProvider implements vscode.CompletionItemProvider { public provideCompletionItems( document: vscode.TextDocument, position: vscode.Position, token: vscode.CancellationToken ): Thenable<vscode.CompletionItem[]> { return new Promise<vscode.CompletionItem[]>((resolve, reject) => { let syntax = 2; const matches = document.getText().match(/syntax\s*=\s*"(proto2|proto3)"/); if (matches.length >= 2 && matches[1] === "proto3") { syntax = 3; } const textBeforeCursor = document.lineAt(position.line).text.substring(0, position.character - 1); const suggestions: vscode.CompletionItem[] = []; const scope = SyntacticGuessScope(document, position); if (scope === null) { suggestions.push(kwSyntax); suggestions.push(kwPackage); suggestions.push(kwOption); suggestions.push(kwImport); suggestions.push(kwMessage); suggestions.push(kwEnum); if (textBeforeCursor.match(/^\s*option\s+\w*$/)) { suggestions.push(...fileOptions); } return resolve(suggestions); } switch (scope.name) { case "service": if (textBeforeCursor.match(/^\s*option(\s*\(?|\s)\s*\w*$/)) { suggestions.push(...serviceOptions); } else { suggestions.push(kwRpc); suggestions.push(kwOption); } break; case "message": if (textBeforeCursor.match(/(repeated|required|optional)\s*\w*$/)) { const result = findMessageEnum(document); suggestions.push(...scalarTypes, ...result.enum, ...result.message); return resolve(suggestions); } else if (textBeforeCursor.match(/^\s*option(\s*\(?|\s)\s*\w*$/)) { suggestions.push(...msgOptions); return resolve(suggestions); } else if (textBeforeCursor.match(/.*\[.*/)) { suggestions.push(...fieldOptions); if (syntax == 2) { suggestions.push(fieldDefault); } return resolve(suggestions); } suggestions.push(kwOption); suggestions.push(kwMessage); suggestions.push(kwEnum); suggestions.push(kwReserved); if (syntax == 2) { suggestions.push(...fieldRules); } else { suggestions.push(fieldRules[0]); } const result = findMessageEnum(document); suggestions.push(...scalarTypes, ...result.enum, ...result.message); break; case "enum": if (textBeforeCursor.match(/^\s*option(\s*\(?|\s)\s*\w*$/)) { suggestions.push(...enumOptions); } else { suggestions.push(kwOption); } break; case "rpcbody": if (textBeforeCursor.match(/^\s*option(\s*\(?|\s)\s*\w*$/)) { suggestions.push(...serviceOptions); } else { suggestions.push(kwOption); } break; case "rpc": case "returns": suggestions.push(...scalarTypes, ...findMessageEnum(document).message); break; default: break; } return resolve(suggestions); }); } }
the_stack
import { ArrayHelper, StringHelper } from "../ExtensionMethods"; import { ComplexProperty } from "./ComplexProperty"; import { Dictionary, StringKeyPicker } from "../AltDictionary"; import { DictionaryEntryProperty } from "./DictionaryEntryProperty"; import { DictionaryKeyType } from "../Enumerations/DictionaryKeyType"; import { EwsServiceJsonReader } from "../Core/EwsServiceJsonReader"; import { EwsServiceXmlWriter } from "../Core/EwsServiceXmlWriter"; import { EwsUtilities } from "../Core/EwsUtilities"; import { ExchangeService } from "../Core/ExchangeService"; import { ICustomUpdateSerializer } from "../Interfaces/ICustomXmlUpdateSerializer"; import { IOutParam } from "../Interfaces/IOutParam"; import { PropertyDefinition } from "../PropertyDefinitions/PropertyDefinition"; import { ServiceObject } from "../Core/ServiceObjects/ServiceObject"; import { XmlAttributeNames } from "../Core/XmlAttributeNames"; import { XmlElementNames } from "../Core/XmlElementNames"; import { XmlNamespace } from "../Enumerations/XmlNamespace"; /** * Represents a generic dictionary that can be sent to or retrieved from EWS. * * @typeparam <TKey> The type of key. * @typeparam <TEntry> The type of entry. */ export abstract class DictionaryProperty<TKey, TEntry extends DictionaryEntryProperty<TKey>> extends ComplexProperty implements ICustomUpdateSerializer { private dictionaryKeyType: DictionaryKeyType = DictionaryKeyType.EmailAddressKey; private dictionaryKeyTypeEnum: any; private dictionaryKeyDelegate: StringKeyPicker<TKey> = (key) => { return this.dictionaryKeyTypeEnum[<any>key] }; private entries: Dictionary<TKey, TEntry> = new Dictionary<TKey, TEntry>(this.dictionaryKeyDelegate); private removedEntries: Dictionary<TKey, TEntry> = new Dictionary<TKey, TEntry>(this.dictionaryKeyDelegate); private addedEntries: TKey[] = []; private modifiedEntries: TKey[] = []; /** * Gets the entries. * * @value The entries. */ get Entries(): Dictionary<TKey, TEntry> { return this.entries; } /** * @internal Initializes a new instance of the **DictionaryProperty** class. * * @param {DictionaryKeyType} dictionaryKeyType Dictionary Key type, needed to workaround c# type checking of generics. */ constructor(dictionaryKeyType: DictionaryKeyType) { super(); this.dictionaryKeyType = dictionaryKeyType; this.dictionaryKeyTypeEnum = EwsUtilities.GetDictionaryKeyTypeEnum(this.dictionaryKeyType); } /** * @internal Clears the change log. */ ClearChangeLog(): void { this.addedEntries.splice(0); this.removedEntries.clear(); this.modifiedEntries.splice(0); for (var entry of this.entries.Values) { entry.ClearChangeLog(); } } /** * Determines whether this instance contains the specified key. * * @param {TKey} key The key. * @return {boolean} true if this instance contains the specified key; otherwise, false. */ Contains(key: TKey): boolean { return this.Entries.containsKey(key); } /** * @internal Creates the entry. * * @return {TEntry} Dictionary entry. */ CreateEntry(): TEntry { return this.CreateEntryInstance(); } /** * @internal Creates instance of dictionary entry. * * @return {TEntry} New instance. */ abstract CreateEntryInstance(): TEntry; /** * Entry was changed. * * @param {} complexProperty The complex property. */ private EntryChanged(complexProperty: ComplexProperty): void { var key: TKey = (<DictionaryEntryProperty<any>>complexProperty).Key; if (this.addedEntries.indexOf(key) === -1 && this.modifiedEntries.indexOf(key) === -1) { this.modifiedEntries.push(key); this.Changed(); } } /** * @internal Gets the name of the entry XML element. * * @param {TEntry} entry The entry. * @return {string} XML element name. */ GetEntryXmlElementName(entry: TEntry): string { return XmlElementNames.Entry; } /** * @internal Gets the index of the field. * * @param {TKey} key The key. * @return {string} Key index. */ GetFieldIndex(key: TKey): string { return this.dictionaryKeyTypeEnum[<any>key]; } /** * @internal Gets the field URI. * * @return {string} Field URI. */ GetFieldURI(): string { return null; } /** * @internal Add entry. * * @param {TEntry} entry The entry. */ InternalAdd(entry: TEntry): void { entry.OnChange.push(this.EntryChanged.bind(this)); this.entries.Add(entry.Key, entry); this.addedEntries.push(entry.Key); this.removedEntries.remove(entry.Key); this.Changed(); } /** * @internal Add or replace entry. * * @param {TEntry} entry The entry. */ InternalAddOrReplace(entry: TEntry): void { var oldEntry: IOutParam<TEntry> = { outValue: null }; if (this.entries.tryGetValue(entry.Key, oldEntry)) { ArrayHelper.RemoveEntry(oldEntry.outValue.OnChange, this.EntryChanged); entry.OnChange.push(this.EntryChanged.bind(this)); if (this.addedEntries.indexOf(entry.Key) === -1) { if (this.modifiedEntries.indexOf(entry.Key) === -1) { this.modifiedEntries.push(entry.Key); } } this.Changed(); } else { this.InternalAdd(entry); } } /** * Remove entry based on key. * * @param {TKey} key The key. */ InternalRemove(key: TKey): void { var entry: IOutParam<TEntry> = { outValue: null }; if (this.entries.tryGetValue(key, entry)) { ArrayHelper.RemoveEntry(entry.outValue.OnChange, this.EntryChanged); this.entries.remove(key); this.removedEntries.Add(key, entry.outValue); this.Changed(); } ArrayHelper.RemoveEntry(this.addedEntries, key); ArrayHelper.RemoveEntry(this.modifiedEntries, key); } /** * @internal Loads service object from XML. * * @param {any} jsObject Json Object converted from XML. * @param {ExchangeService} service The service. */ LoadFromXmlJsObject(jsonProperty: any, service: ExchangeService): void { if (jsonProperty[XmlElementNames.Entry]) { var entries: any[] = EwsServiceJsonReader.ReadAsArray(jsonProperty, XmlElementNames.Entry); for (var jsonEntry of entries) { var entry: TEntry = this.CreateEntryInstance(); entry.LoadFromXmlJsObject(jsonEntry, service); this.InternalAdd(entry); } } } /** * @internal Writes elements to XML. * * @param {EwsServiceXmlWriter} writer The writer. */ WriteElementsToXml(writer: EwsServiceXmlWriter): void { for (var keyValuePair of this.entries.Items) { keyValuePair.value.WriteToXml(writer, this.GetEntryXmlElementName(keyValuePair.value)); } } /** * @internal Writes to XML. * * @param {EwsServiceXmlWriter} writer The writer. * @param {XmlNamespace} xmlNamespace The XML namespace. * @param {string} xmlElementName Name of the XML element. */ WriteToXml(writer: EwsServiceXmlWriter, xmlElementName: string, xmlNamespace?: XmlNamespace): void { // Only write collection if it has at least one element. if (this.entries.Count > 0) { super.WriteToXml( writer, xmlElementName, xmlNamespace) } } /** * Writes the URI to XML. * * @param {EwsServiceXmlWriter} writer The writer. * @param {TKey} key The key. */ private WriteUriToXml(writer: EwsServiceXmlWriter, key: TKey): void { writer.WriteStartElement(XmlNamespace.Types, XmlElementNames.IndexedFieldURI); writer.WriteAttributeValue(XmlAttributeNames.FieldURI, this.GetFieldURI()); writer.WriteAttributeValue(XmlAttributeNames.FieldIndex, this.GetFieldIndex(key)); writer.WriteEndElement(); } /** * @internal Writes the update to XML. * ICustomUpdateSerializer.WriteSetUpdateToXml * * @param {EwsServiceXmlWriter} writer The writer. * @param {ServiceObject} ewsObject The ews object. * @param {PropertyDefinition} propertyDefinition Property definition. * @return {boolean} True if property generated serialization. */ WriteSetUpdateToXml( writer: EwsServiceXmlWriter, ewsObject: ServiceObject, propertyDefinition: PropertyDefinition): boolean { let tempEntries: TEntry[] = []; for (let key of this.addedEntries) { tempEntries.push(this.entries.get(key)); } for (let key of this.modifiedEntries) { tempEntries.push(this.entries.get(key)); } for (let entry of tempEntries) { if (!entry.WriteSetUpdateToXml( writer, ewsObject, propertyDefinition.XmlElementName)) { writer.WriteStartElement(XmlNamespace.Types, ewsObject.GetSetFieldXmlElementName()); this.WriteUriToXml(writer, entry.Key); writer.WriteStartElement(XmlNamespace.Types, ewsObject.GetXmlElementName()); writer.WriteStartElement(XmlNamespace.Types, propertyDefinition.XmlElementName); entry.WriteToXml(writer, this.GetEntryXmlElementName(entry)); writer.WriteEndElement(); writer.WriteEndElement(); writer.WriteEndElement(); } } for (let entry of this.removedEntries.Values) { if (!entry.WriteDeleteUpdateToXml(writer, ewsObject)) { writer.WriteStartElement(XmlNamespace.Types, ewsObject.GetDeleteFieldXmlElementName()); this.WriteUriToXml(writer, entry.Key); writer.WriteEndElement(); } } return true; } /** * @internal Writes the deletion update to XML. * ICustomUpdateSerializer.WriteDeleteUpdateToXml * * @param {EwsServiceXmlWriter} writer The writer. * @param {ServiceObject} ewsObject The ews object. * @return {boolean} True if property generated serialization. */ WriteDeleteUpdateToXml(writer: EwsServiceXmlWriter, ewsObject: ServiceObject): boolean { // Use the default XML serializer. return false; } }
the_stack
import { expect } from 'chai'; import { ethers } from 'hardhat'; import { describeFilter } from '@solidstate/library'; import { deployMockContract } from 'ethereum-waffle'; import { describeBehaviorOfERC165 } from '../../introspection'; import { ERC1155Base } from '../../../typechain'; import { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers'; import { BigNumber, ContractTransaction } from 'ethers'; interface ERC1155BaseBehaviorArgs { deploy: () => Promise<ERC1155Base>; mint: ( address: string, id: BigNumber, amount: BigNumber, ) => Promise<ContractTransaction>; burn: ( address: string, id: BigNumber, amount: BigNumber, ) => Promise<ContractTransaction>; tokenId?: BigNumber; } export function describeBehaviorOfERC1155Base( { deploy, mint, burn, tokenId }: ERC1155BaseBehaviorArgs, skips?: string[], ) { const describe = describeFilter(skips); describe('::ERC1155Base', function () { let holder: SignerWithAddress; let spender: SignerWithAddress; let instance: ERC1155Base; before(async function () { [holder, spender] = await ethers.getSigners(); }); beforeEach(async function () { instance = await deploy(); }); describeBehaviorOfERC165( { deploy, interfaceIds: ['0xd9b67a26'], }, skips, ); describe('#balanceOf', function () { it('returns the balance of given token held by given address', async function () { const id = tokenId ?? ethers.constants.Zero; expect( await instance.callStatic['balanceOf(address,uint256)']( holder.address, id, ), ).to.equal(0); const amount = ethers.constants.Two; await mint(holder.address, id, amount); expect( await instance.callStatic['balanceOf(address,uint256)']( holder.address, id, ), ).to.equal(amount); await burn(holder.address, id, amount); expect( await instance.callStatic['balanceOf(address,uint256)']( holder.address, id, ), ).to.equal(0); }); describe('reverts if', function () { it('balance of zero address is queried', async function () { await expect( instance.callStatic['balanceOf(address,uint256)']( ethers.constants.AddressZero, ethers.constants.Zero, ), ).to.be.revertedWith('ERC1155: balance query for the zero address'); }); }); }); describe('#balanceOfBatch', function () { it('returns the balances of given tokens held by given addresses', async function () { expect( await instance.callStatic['balanceOfBatch(address[],uint256[])']( [holder.address], [ethers.constants.Zero], ), ).to.have.deep.members([ethers.constants.Zero]); // TODO: test delta }); describe('reverts if', function () { it('input array lengths do not match', async function () { await expect( instance.callStatic['balanceOfBatch(address[],uint256[])']( [holder.address], [], ), ).to.be.revertedWith('ERC1155: accounts and ids length mismatch'); }); it('balance of zero address is queried', async function () { await expect( instance.callStatic['balanceOfBatch(address[],uint256[])']( [ethers.constants.AddressZero], [ethers.constants.Zero], ), ).to.be.revertedWith( 'ERC1155: batch balance query for the zero address', ); }); }); }); describe('#isApprovedForAll', function () { it('returns whether given operator is approved to spend tokens of given account', async function () { expect( await instance.callStatic['isApprovedForAll(address,address)']( holder.address, spender.address, ), ).to.be.false; await instance .connect(holder) ['setApprovalForAll(address,bool)'](spender.address, true); expect( await instance.callStatic['isApprovedForAll(address,address)']( holder.address, spender.address, ), ).to.be.true; }); }); describe('#setApprovalForAll', function () { it('approves given operator to spend tokens on behalf of sender', async function () { await instance .connect(holder) ['setApprovalForAll(address,bool)'](spender.address, true); expect( await instance.callStatic['isApprovedForAll(address,address)']( holder.address, spender.address, ), ).to.be.true; // TODO: test case is no different from #isApprovedForAll test; tested further by #safeTransferFrom and #safeBatchTransferFrom tests }); describe('reverts if', function () { it('given operator is sender', async function () { await expect( instance .connect(holder) ['setApprovalForAll(address,bool)'](holder.address, true), ).to.be.revertedWith('ERC1155: setting approval status for self'); }); }); }); describe('#safeTransferFrom', function () { it('sends amount from A to B', async function () { const id = tokenId ?? ethers.constants.Zero; const amount = ethers.constants.Two; await mint(spender.address, id, amount); expect( await instance.callStatic['balanceOf(address,uint256)']( spender.address, id, ), ).to.equal(amount); await instance .connect(spender) ['safeTransferFrom(address,address,uint256,uint256,bytes)']( spender.address, holder.address, id, amount, ethers.utils.randomBytes(0), ); expect( await instance.callStatic['balanceOf(address,uint256)']( spender.address, id, ), ).to.equal(ethers.constants.Zero); expect( await instance.callStatic['balanceOf(address,uint256)']( holder.address, id, ), ).to.equal(amount); }); describe('reverts if', function () { it('sender has insufficient balance', async function () { const id = tokenId ?? ethers.constants.Zero; const amount = ethers.constants.Two; await expect( instance .connect(spender) ['safeTransferFrom(address,address,uint256,uint256,bytes)']( spender.address, holder.address, id, amount, ethers.utils.randomBytes(0), ), ).to.be.revertedWith('ERC1155: insufficient balances for transfer'); }); it('operator is not approved to act on behalf of sender', async function () { await expect( instance .connect(holder) ['safeTransferFrom(address,address,uint256,uint256,bytes)']( spender.address, holder.address, ethers.constants.Zero, ethers.constants.Zero, ethers.utils.randomBytes(0), ), ).to.be.revertedWith('ERC1155: caller is not owner nor approved'); }); it('receiver is invalid ERC1155Receiver', async function () { const mock = await deployMockContract(holder, [ /* no functions */ ]); await expect( instance .connect(spender) ['safeTransferFrom(address,address,uint256,uint256,bytes)']( spender.address, mock.address, ethers.constants.Zero, ethers.constants.Zero, ethers.utils.randomBytes(0), ), ).to.be.revertedWith('Mock on the method is not initialized'); }); it('receiver rejects transfer', async function () { const mock = await deployMockContract(holder, [ 'function onERC1155Received (address, address, uint, uint, bytes) external view returns (bytes4)', ]); await mock.mock.onERC1155Received.returns('0x00000000'); await expect( instance .connect(spender) ['safeTransferFrom(address,address,uint256,uint256,bytes)']( spender.address, mock.address, ethers.constants.Zero, ethers.constants.Zero, ethers.utils.randomBytes(0), ), ).to.be.revertedWith('ERC1155: ERC1155Receiver rejected tokens'); }); }); }); describe('#safeBatchTransferFrom', function () { it('sends amount from A to B, batch version', async function () { const id = tokenId ?? ethers.constants.Zero; const amount = ethers.constants.Two; await mint(spender.address, id, amount); expect( await instance.callStatic['balanceOfBatch(address[],uint256[])']( [spender.address], [id], ), ).to.have.deep.members([amount]); await instance .connect(spender) ['safeBatchTransferFrom(address,address,uint256[],uint256[],bytes)']( spender.address, holder.address, [id], [amount], ethers.utils.randomBytes(0), ); expect( await instance.callStatic['balanceOfBatch(address[],uint256[])']( [spender.address], [id], ), ).to.have.deep.members([ethers.constants.Zero]); expect( await instance.callStatic['balanceOfBatch(address[],uint256[])']( [holder.address], [id], ), ).to.have.deep.members([amount]); }); describe('reverts if', function () { it('sender has insufficient balance', async function () { const id = tokenId ?? ethers.constants.Zero; const amount = ethers.constants.Two; await expect( instance .connect(spender) [ 'safeBatchTransferFrom(address,address,uint256[],uint256[],bytes)' ]( spender.address, holder.address, [id], [amount], ethers.utils.randomBytes(0), ), ).to.be.revertedWith('ERC1155: insufficient balances for transfer'); }); it('operator is not approved to act on behalf of sender', async function () { await expect( instance .connect(holder) [ 'safeBatchTransferFrom(address,address,uint256[],uint256[],bytes)' ]( spender.address, holder.address, [], [], ethers.utils.randomBytes(0), ), ).to.be.revertedWith('ERC1155: caller is not owner nor approved'); }); it('receiver is invalid ERC1155Receiver', async function () { const mock = await deployMockContract(holder, [ /* no functions */ ]); await expect( instance .connect(spender) [ 'safeBatchTransferFrom(address,address,uint256[],uint256[],bytes)' ]( spender.address, mock.address, [], [], ethers.utils.randomBytes(0), ), ).to.be.revertedWith('Mock on the method is not initialized'); }); it('receiver rejects transfer', async function () { const mock = await deployMockContract(holder, [ 'function onERC1155BatchReceived (address, address, uint[], uint[], bytes) external view returns (bytes4)', ]); await mock.mock.onERC1155BatchReceived.returns('0x00000000'); await expect( instance .connect(spender) [ 'safeBatchTransferFrom(address,address,uint256[],uint256[],bytes)' ]( spender.address, mock.address, [], [], ethers.utils.randomBytes(0), ), ).to.be.revertedWith('ERC1155: ERC1155Receiver rejected tokens'); }); }); }); }); }
the_stack